Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "internal/buildcfg"
     6  import "math"
     7  import "cmd/compile/internal/types"
     8  
     9  func rewriteValueRISCV64(v *Value) bool {
    10  	switch v.Op {
    11  	case OpAbs:
    12  		v.Op = OpRISCV64FABSD
    13  		return true
    14  	case OpAdd16:
    15  		v.Op = OpRISCV64ADD
    16  		return true
    17  	case OpAdd32:
    18  		v.Op = OpRISCV64ADD
    19  		return true
    20  	case OpAdd32F:
    21  		v.Op = OpRISCV64FADDS
    22  		return true
    23  	case OpAdd64:
    24  		v.Op = OpRISCV64ADD
    25  		return true
    26  	case OpAdd64F:
    27  		v.Op = OpRISCV64FADDD
    28  		return true
    29  	case OpAdd8:
    30  		v.Op = OpRISCV64ADD
    31  		return true
    32  	case OpAddPtr:
    33  		v.Op = OpRISCV64ADD
    34  		return true
    35  	case OpAddr:
    36  		return rewriteValueRISCV64_OpAddr(v)
    37  	case OpAnd16:
    38  		v.Op = OpRISCV64AND
    39  		return true
    40  	case OpAnd32:
    41  		v.Op = OpRISCV64AND
    42  		return true
    43  	case OpAnd64:
    44  		v.Op = OpRISCV64AND
    45  		return true
    46  	case OpAnd8:
    47  		v.Op = OpRISCV64AND
    48  		return true
    49  	case OpAndB:
    50  		v.Op = OpRISCV64AND
    51  		return true
    52  	case OpAtomicAdd32:
    53  		v.Op = OpRISCV64LoweredAtomicAdd32
    54  		return true
    55  	case OpAtomicAdd64:
    56  		v.Op = OpRISCV64LoweredAtomicAdd64
    57  		return true
    58  	case OpAtomicAnd32:
    59  		v.Op = OpRISCV64LoweredAtomicAnd32
    60  		return true
    61  	case OpAtomicAnd8:
    62  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    63  	case OpAtomicCompareAndSwap32:
    64  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    65  	case OpAtomicCompareAndSwap64:
    66  		v.Op = OpRISCV64LoweredAtomicCas64
    67  		return true
    68  	case OpAtomicExchange32:
    69  		v.Op = OpRISCV64LoweredAtomicExchange32
    70  		return true
    71  	case OpAtomicExchange64:
    72  		v.Op = OpRISCV64LoweredAtomicExchange64
    73  		return true
    74  	case OpAtomicLoad32:
    75  		v.Op = OpRISCV64LoweredAtomicLoad32
    76  		return true
    77  	case OpAtomicLoad64:
    78  		v.Op = OpRISCV64LoweredAtomicLoad64
    79  		return true
    80  	case OpAtomicLoad8:
    81  		v.Op = OpRISCV64LoweredAtomicLoad8
    82  		return true
    83  	case OpAtomicLoadPtr:
    84  		v.Op = OpRISCV64LoweredAtomicLoad64
    85  		return true
    86  	case OpAtomicOr32:
    87  		v.Op = OpRISCV64LoweredAtomicOr32
    88  		return true
    89  	case OpAtomicOr8:
    90  		return rewriteValueRISCV64_OpAtomicOr8(v)
    91  	case OpAtomicStore32:
    92  		v.Op = OpRISCV64LoweredAtomicStore32
    93  		return true
    94  	case OpAtomicStore64:
    95  		v.Op = OpRISCV64LoweredAtomicStore64
    96  		return true
    97  	case OpAtomicStore8:
    98  		v.Op = OpRISCV64LoweredAtomicStore8
    99  		return true
   100  	case OpAtomicStorePtrNoWB:
   101  		v.Op = OpRISCV64LoweredAtomicStore64
   102  		return true
   103  	case OpAvg64u:
   104  		return rewriteValueRISCV64_OpAvg64u(v)
   105  	case OpBitLen16:
   106  		return rewriteValueRISCV64_OpBitLen16(v)
   107  	case OpBitLen32:
   108  		return rewriteValueRISCV64_OpBitLen32(v)
   109  	case OpBitLen64:
   110  		return rewriteValueRISCV64_OpBitLen64(v)
   111  	case OpBitLen8:
   112  		return rewriteValueRISCV64_OpBitLen8(v)
   113  	case OpBswap16:
   114  		return rewriteValueRISCV64_OpBswap16(v)
   115  	case OpBswap32:
   116  		return rewriteValueRISCV64_OpBswap32(v)
   117  	case OpBswap64:
   118  		v.Op = OpRISCV64REV8
   119  		return true
   120  	case OpClosureCall:
   121  		v.Op = OpRISCV64CALLclosure
   122  		return true
   123  	case OpCom16:
   124  		v.Op = OpRISCV64NOT
   125  		return true
   126  	case OpCom32:
   127  		v.Op = OpRISCV64NOT
   128  		return true
   129  	case OpCom64:
   130  		v.Op = OpRISCV64NOT
   131  		return true
   132  	case OpCom8:
   133  		v.Op = OpRISCV64NOT
   134  		return true
   135  	case OpConst16:
   136  		return rewriteValueRISCV64_OpConst16(v)
   137  	case OpConst32:
   138  		return rewriteValueRISCV64_OpConst32(v)
   139  	case OpConst32F:
   140  		return rewriteValueRISCV64_OpConst32F(v)
   141  	case OpConst64:
   142  		return rewriteValueRISCV64_OpConst64(v)
   143  	case OpConst64F:
   144  		return rewriteValueRISCV64_OpConst64F(v)
   145  	case OpConst8:
   146  		return rewriteValueRISCV64_OpConst8(v)
   147  	case OpConstBool:
   148  		return rewriteValueRISCV64_OpConstBool(v)
   149  	case OpConstNil:
   150  		return rewriteValueRISCV64_OpConstNil(v)
   151  	case OpCopysign:
   152  		v.Op = OpRISCV64FSGNJD
   153  		return true
   154  	case OpCtz16:
   155  		return rewriteValueRISCV64_OpCtz16(v)
   156  	case OpCtz16NonZero:
   157  		v.Op = OpCtz64
   158  		return true
   159  	case OpCtz32:
   160  		v.Op = OpRISCV64CTZW
   161  		return true
   162  	case OpCtz32NonZero:
   163  		v.Op = OpCtz64
   164  		return true
   165  	case OpCtz64:
   166  		v.Op = OpRISCV64CTZ
   167  		return true
   168  	case OpCtz64NonZero:
   169  		v.Op = OpCtz64
   170  		return true
   171  	case OpCtz8:
   172  		return rewriteValueRISCV64_OpCtz8(v)
   173  	case OpCtz8NonZero:
   174  		v.Op = OpCtz64
   175  		return true
   176  	case OpCvt32Fto32:
   177  		v.Op = OpRISCV64FCVTWS
   178  		return true
   179  	case OpCvt32Fto64:
   180  		v.Op = OpRISCV64FCVTLS
   181  		return true
   182  	case OpCvt32Fto64F:
   183  		v.Op = OpRISCV64FCVTDS
   184  		return true
   185  	case OpCvt32to32F:
   186  		v.Op = OpRISCV64FCVTSW
   187  		return true
   188  	case OpCvt32to64F:
   189  		v.Op = OpRISCV64FCVTDW
   190  		return true
   191  	case OpCvt64Fto32:
   192  		v.Op = OpRISCV64FCVTWD
   193  		return true
   194  	case OpCvt64Fto32F:
   195  		v.Op = OpRISCV64FCVTSD
   196  		return true
   197  	case OpCvt64Fto64:
   198  		v.Op = OpRISCV64FCVTLD
   199  		return true
   200  	case OpCvt64to32F:
   201  		v.Op = OpRISCV64FCVTSL
   202  		return true
   203  	case OpCvt64to64F:
   204  		v.Op = OpRISCV64FCVTDL
   205  		return true
   206  	case OpCvtBoolToUint8:
   207  		v.Op = OpCopy
   208  		return true
   209  	case OpDiv16:
   210  		return rewriteValueRISCV64_OpDiv16(v)
   211  	case OpDiv16u:
   212  		return rewriteValueRISCV64_OpDiv16u(v)
   213  	case OpDiv32:
   214  		return rewriteValueRISCV64_OpDiv32(v)
   215  	case OpDiv32F:
   216  		v.Op = OpRISCV64FDIVS
   217  		return true
   218  	case OpDiv32u:
   219  		v.Op = OpRISCV64DIVUW
   220  		return true
   221  	case OpDiv64:
   222  		return rewriteValueRISCV64_OpDiv64(v)
   223  	case OpDiv64F:
   224  		v.Op = OpRISCV64FDIVD
   225  		return true
   226  	case OpDiv64u:
   227  		v.Op = OpRISCV64DIVU
   228  		return true
   229  	case OpDiv8:
   230  		return rewriteValueRISCV64_OpDiv8(v)
   231  	case OpDiv8u:
   232  		return rewriteValueRISCV64_OpDiv8u(v)
   233  	case OpEq16:
   234  		return rewriteValueRISCV64_OpEq16(v)
   235  	case OpEq32:
   236  		return rewriteValueRISCV64_OpEq32(v)
   237  	case OpEq32F:
   238  		v.Op = OpRISCV64FEQS
   239  		return true
   240  	case OpEq64:
   241  		return rewriteValueRISCV64_OpEq64(v)
   242  	case OpEq64F:
   243  		v.Op = OpRISCV64FEQD
   244  		return true
   245  	case OpEq8:
   246  		return rewriteValueRISCV64_OpEq8(v)
   247  	case OpEqB:
   248  		return rewriteValueRISCV64_OpEqB(v)
   249  	case OpEqPtr:
   250  		return rewriteValueRISCV64_OpEqPtr(v)
   251  	case OpFMA:
   252  		v.Op = OpRISCV64FMADDD
   253  		return true
   254  	case OpGetCallerPC:
   255  		v.Op = OpRISCV64LoweredGetCallerPC
   256  		return true
   257  	case OpGetCallerSP:
   258  		v.Op = OpRISCV64LoweredGetCallerSP
   259  		return true
   260  	case OpGetClosurePtr:
   261  		v.Op = OpRISCV64LoweredGetClosurePtr
   262  		return true
   263  	case OpHmul32:
   264  		return rewriteValueRISCV64_OpHmul32(v)
   265  	case OpHmul32u:
   266  		return rewriteValueRISCV64_OpHmul32u(v)
   267  	case OpHmul64:
   268  		v.Op = OpRISCV64MULH
   269  		return true
   270  	case OpHmul64u:
   271  		v.Op = OpRISCV64MULHU
   272  		return true
   273  	case OpInterCall:
   274  		v.Op = OpRISCV64CALLinter
   275  		return true
   276  	case OpIsInBounds:
   277  		v.Op = OpLess64U
   278  		return true
   279  	case OpIsNonNil:
   280  		v.Op = OpRISCV64SNEZ
   281  		return true
   282  	case OpIsSliceInBounds:
   283  		v.Op = OpLeq64U
   284  		return true
   285  	case OpLeq16:
   286  		return rewriteValueRISCV64_OpLeq16(v)
   287  	case OpLeq16U:
   288  		return rewriteValueRISCV64_OpLeq16U(v)
   289  	case OpLeq32:
   290  		return rewriteValueRISCV64_OpLeq32(v)
   291  	case OpLeq32F:
   292  		v.Op = OpRISCV64FLES
   293  		return true
   294  	case OpLeq32U:
   295  		return rewriteValueRISCV64_OpLeq32U(v)
   296  	case OpLeq64:
   297  		return rewriteValueRISCV64_OpLeq64(v)
   298  	case OpLeq64F:
   299  		v.Op = OpRISCV64FLED
   300  		return true
   301  	case OpLeq64U:
   302  		return rewriteValueRISCV64_OpLeq64U(v)
   303  	case OpLeq8:
   304  		return rewriteValueRISCV64_OpLeq8(v)
   305  	case OpLeq8U:
   306  		return rewriteValueRISCV64_OpLeq8U(v)
   307  	case OpLess16:
   308  		return rewriteValueRISCV64_OpLess16(v)
   309  	case OpLess16U:
   310  		return rewriteValueRISCV64_OpLess16U(v)
   311  	case OpLess32:
   312  		return rewriteValueRISCV64_OpLess32(v)
   313  	case OpLess32F:
   314  		v.Op = OpRISCV64FLTS
   315  		return true
   316  	case OpLess32U:
   317  		return rewriteValueRISCV64_OpLess32U(v)
   318  	case OpLess64:
   319  		v.Op = OpRISCV64SLT
   320  		return true
   321  	case OpLess64F:
   322  		v.Op = OpRISCV64FLTD
   323  		return true
   324  	case OpLess64U:
   325  		v.Op = OpRISCV64SLTU
   326  		return true
   327  	case OpLess8:
   328  		return rewriteValueRISCV64_OpLess8(v)
   329  	case OpLess8U:
   330  		return rewriteValueRISCV64_OpLess8U(v)
   331  	case OpLoad:
   332  		return rewriteValueRISCV64_OpLoad(v)
   333  	case OpLocalAddr:
   334  		return rewriteValueRISCV64_OpLocalAddr(v)
   335  	case OpLsh16x16:
   336  		return rewriteValueRISCV64_OpLsh16x16(v)
   337  	case OpLsh16x32:
   338  		return rewriteValueRISCV64_OpLsh16x32(v)
   339  	case OpLsh16x64:
   340  		return rewriteValueRISCV64_OpLsh16x64(v)
   341  	case OpLsh16x8:
   342  		return rewriteValueRISCV64_OpLsh16x8(v)
   343  	case OpLsh32x16:
   344  		return rewriteValueRISCV64_OpLsh32x16(v)
   345  	case OpLsh32x32:
   346  		return rewriteValueRISCV64_OpLsh32x32(v)
   347  	case OpLsh32x64:
   348  		return rewriteValueRISCV64_OpLsh32x64(v)
   349  	case OpLsh32x8:
   350  		return rewriteValueRISCV64_OpLsh32x8(v)
   351  	case OpLsh64x16:
   352  		return rewriteValueRISCV64_OpLsh64x16(v)
   353  	case OpLsh64x32:
   354  		return rewriteValueRISCV64_OpLsh64x32(v)
   355  	case OpLsh64x64:
   356  		return rewriteValueRISCV64_OpLsh64x64(v)
   357  	case OpLsh64x8:
   358  		return rewriteValueRISCV64_OpLsh64x8(v)
   359  	case OpLsh8x16:
   360  		return rewriteValueRISCV64_OpLsh8x16(v)
   361  	case OpLsh8x32:
   362  		return rewriteValueRISCV64_OpLsh8x32(v)
   363  	case OpLsh8x64:
   364  		return rewriteValueRISCV64_OpLsh8x64(v)
   365  	case OpLsh8x8:
   366  		return rewriteValueRISCV64_OpLsh8x8(v)
   367  	case OpMax32F:
   368  		v.Op = OpRISCV64LoweredFMAXS
   369  		return true
   370  	case OpMax64:
   371  		return rewriteValueRISCV64_OpMax64(v)
   372  	case OpMax64F:
   373  		v.Op = OpRISCV64LoweredFMAXD
   374  		return true
   375  	case OpMax64u:
   376  		return rewriteValueRISCV64_OpMax64u(v)
   377  	case OpMin32F:
   378  		v.Op = OpRISCV64LoweredFMINS
   379  		return true
   380  	case OpMin64:
   381  		return rewriteValueRISCV64_OpMin64(v)
   382  	case OpMin64F:
   383  		v.Op = OpRISCV64LoweredFMIND
   384  		return true
   385  	case OpMin64u:
   386  		return rewriteValueRISCV64_OpMin64u(v)
   387  	case OpMod16:
   388  		return rewriteValueRISCV64_OpMod16(v)
   389  	case OpMod16u:
   390  		return rewriteValueRISCV64_OpMod16u(v)
   391  	case OpMod32:
   392  		return rewriteValueRISCV64_OpMod32(v)
   393  	case OpMod32u:
   394  		v.Op = OpRISCV64REMUW
   395  		return true
   396  	case OpMod64:
   397  		return rewriteValueRISCV64_OpMod64(v)
   398  	case OpMod64u:
   399  		v.Op = OpRISCV64REMU
   400  		return true
   401  	case OpMod8:
   402  		return rewriteValueRISCV64_OpMod8(v)
   403  	case OpMod8u:
   404  		return rewriteValueRISCV64_OpMod8u(v)
   405  	case OpMove:
   406  		return rewriteValueRISCV64_OpMove(v)
   407  	case OpMul16:
   408  		return rewriteValueRISCV64_OpMul16(v)
   409  	case OpMul32:
   410  		v.Op = OpRISCV64MULW
   411  		return true
   412  	case OpMul32F:
   413  		v.Op = OpRISCV64FMULS
   414  		return true
   415  	case OpMul64:
   416  		v.Op = OpRISCV64MUL
   417  		return true
   418  	case OpMul64F:
   419  		v.Op = OpRISCV64FMULD
   420  		return true
   421  	case OpMul64uhilo:
   422  		v.Op = OpRISCV64LoweredMuluhilo
   423  		return true
   424  	case OpMul64uover:
   425  		v.Op = OpRISCV64LoweredMuluover
   426  		return true
   427  	case OpMul8:
   428  		return rewriteValueRISCV64_OpMul8(v)
   429  	case OpNeg16:
   430  		v.Op = OpRISCV64NEG
   431  		return true
   432  	case OpNeg32:
   433  		v.Op = OpRISCV64NEG
   434  		return true
   435  	case OpNeg32F:
   436  		v.Op = OpRISCV64FNEGS
   437  		return true
   438  	case OpNeg64:
   439  		v.Op = OpRISCV64NEG
   440  		return true
   441  	case OpNeg64F:
   442  		v.Op = OpRISCV64FNEGD
   443  		return true
   444  	case OpNeg8:
   445  		v.Op = OpRISCV64NEG
   446  		return true
   447  	case OpNeq16:
   448  		return rewriteValueRISCV64_OpNeq16(v)
   449  	case OpNeq32:
   450  		return rewriteValueRISCV64_OpNeq32(v)
   451  	case OpNeq32F:
   452  		v.Op = OpRISCV64FNES
   453  		return true
   454  	case OpNeq64:
   455  		return rewriteValueRISCV64_OpNeq64(v)
   456  	case OpNeq64F:
   457  		v.Op = OpRISCV64FNED
   458  		return true
   459  	case OpNeq8:
   460  		return rewriteValueRISCV64_OpNeq8(v)
   461  	case OpNeqB:
   462  		return rewriteValueRISCV64_OpNeqB(v)
   463  	case OpNeqPtr:
   464  		return rewriteValueRISCV64_OpNeqPtr(v)
   465  	case OpNilCheck:
   466  		v.Op = OpRISCV64LoweredNilCheck
   467  		return true
   468  	case OpNot:
   469  		v.Op = OpRISCV64SEQZ
   470  		return true
   471  	case OpOffPtr:
   472  		return rewriteValueRISCV64_OpOffPtr(v)
   473  	case OpOr16:
   474  		v.Op = OpRISCV64OR
   475  		return true
   476  	case OpOr32:
   477  		v.Op = OpRISCV64OR
   478  		return true
   479  	case OpOr64:
   480  		v.Op = OpRISCV64OR
   481  		return true
   482  	case OpOr8:
   483  		v.Op = OpRISCV64OR
   484  		return true
   485  	case OpOrB:
   486  		v.Op = OpRISCV64OR
   487  		return true
   488  	case OpPanicBounds:
   489  		v.Op = OpRISCV64LoweredPanicBoundsRR
   490  		return true
   491  	case OpPopCount16:
   492  		return rewriteValueRISCV64_OpPopCount16(v)
   493  	case OpPopCount32:
   494  		v.Op = OpRISCV64CPOPW
   495  		return true
   496  	case OpPopCount64:
   497  		v.Op = OpRISCV64CPOP
   498  		return true
   499  	case OpPopCount8:
   500  		return rewriteValueRISCV64_OpPopCount8(v)
   501  	case OpPubBarrier:
   502  		v.Op = OpRISCV64LoweredPubBarrier
   503  		return true
   504  	case OpRISCV64ADD:
   505  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   506  	case OpRISCV64ADDI:
   507  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   508  	case OpRISCV64AND:
   509  		return rewriteValueRISCV64_OpRISCV64AND(v)
   510  	case OpRISCV64ANDI:
   511  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   512  	case OpRISCV64FADDD:
   513  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   514  	case OpRISCV64FADDS:
   515  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   516  	case OpRISCV64FMADDD:
   517  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   518  	case OpRISCV64FMADDS:
   519  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   520  	case OpRISCV64FMOVDload:
   521  		return rewriteValueRISCV64_OpRISCV64FMOVDload(v)
   522  	case OpRISCV64FMOVDstore:
   523  		return rewriteValueRISCV64_OpRISCV64FMOVDstore(v)
   524  	case OpRISCV64FMOVWload:
   525  		return rewriteValueRISCV64_OpRISCV64FMOVWload(v)
   526  	case OpRISCV64FMOVWstore:
   527  		return rewriteValueRISCV64_OpRISCV64FMOVWstore(v)
   528  	case OpRISCV64FMSUBD:
   529  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   530  	case OpRISCV64FMSUBS:
   531  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   532  	case OpRISCV64FNMADDD:
   533  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   534  	case OpRISCV64FNMADDS:
   535  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   536  	case OpRISCV64FNMSUBD:
   537  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   538  	case OpRISCV64FNMSUBS:
   539  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   540  	case OpRISCV64FSUBD:
   541  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   542  	case OpRISCV64FSUBS:
   543  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   544  	case OpRISCV64LoweredPanicBoundsCR:
   545  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsCR(v)
   546  	case OpRISCV64LoweredPanicBoundsRC:
   547  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRC(v)
   548  	case OpRISCV64LoweredPanicBoundsRR:
   549  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRR(v)
   550  	case OpRISCV64MOVBUload:
   551  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   552  	case OpRISCV64MOVBUreg:
   553  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   554  	case OpRISCV64MOVBload:
   555  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   556  	case OpRISCV64MOVBreg:
   557  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   558  	case OpRISCV64MOVBstore:
   559  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   560  	case OpRISCV64MOVBstorezero:
   561  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   562  	case OpRISCV64MOVDload:
   563  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   564  	case OpRISCV64MOVDnop:
   565  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   566  	case OpRISCV64MOVDreg:
   567  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   568  	case OpRISCV64MOVDstore:
   569  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   570  	case OpRISCV64MOVDstorezero:
   571  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   572  	case OpRISCV64MOVHUload:
   573  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   574  	case OpRISCV64MOVHUreg:
   575  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   576  	case OpRISCV64MOVHload:
   577  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   578  	case OpRISCV64MOVHreg:
   579  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   580  	case OpRISCV64MOVHstore:
   581  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   582  	case OpRISCV64MOVHstorezero:
   583  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   584  	case OpRISCV64MOVWUload:
   585  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   586  	case OpRISCV64MOVWUreg:
   587  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   588  	case OpRISCV64MOVWload:
   589  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   590  	case OpRISCV64MOVWreg:
   591  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   592  	case OpRISCV64MOVWstore:
   593  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   594  	case OpRISCV64MOVWstorezero:
   595  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   596  	case OpRISCV64NEG:
   597  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   598  	case OpRISCV64NEGW:
   599  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   600  	case OpRISCV64OR:
   601  		return rewriteValueRISCV64_OpRISCV64OR(v)
   602  	case OpRISCV64ORI:
   603  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   604  	case OpRISCV64ORN:
   605  		return rewriteValueRISCV64_OpRISCV64ORN(v)
   606  	case OpRISCV64ROL:
   607  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   608  	case OpRISCV64ROLW:
   609  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   610  	case OpRISCV64ROR:
   611  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   612  	case OpRISCV64RORW:
   613  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   614  	case OpRISCV64SEQZ:
   615  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   616  	case OpRISCV64SLL:
   617  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   618  	case OpRISCV64SLLI:
   619  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   620  	case OpRISCV64SLLW:
   621  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   622  	case OpRISCV64SLT:
   623  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   624  	case OpRISCV64SLTI:
   625  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   626  	case OpRISCV64SLTIU:
   627  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   628  	case OpRISCV64SLTU:
   629  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   630  	case OpRISCV64SNEZ:
   631  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   632  	case OpRISCV64SRA:
   633  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   634  	case OpRISCV64SRAI:
   635  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   636  	case OpRISCV64SRAW:
   637  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   638  	case OpRISCV64SRL:
   639  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   640  	case OpRISCV64SRLI:
   641  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   642  	case OpRISCV64SRLW:
   643  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   644  	case OpRISCV64SUB:
   645  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   646  	case OpRISCV64SUBW:
   647  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   648  	case OpRISCV64XOR:
   649  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   650  	case OpRotateLeft16:
   651  		return rewriteValueRISCV64_OpRotateLeft16(v)
   652  	case OpRotateLeft32:
   653  		v.Op = OpRISCV64ROLW
   654  		return true
   655  	case OpRotateLeft64:
   656  		v.Op = OpRISCV64ROL
   657  		return true
   658  	case OpRotateLeft8:
   659  		return rewriteValueRISCV64_OpRotateLeft8(v)
   660  	case OpRound32F:
   661  		v.Op = OpRISCV64LoweredRound32F
   662  		return true
   663  	case OpRound64F:
   664  		v.Op = OpRISCV64LoweredRound64F
   665  		return true
   666  	case OpRsh16Ux16:
   667  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   668  	case OpRsh16Ux32:
   669  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   670  	case OpRsh16Ux64:
   671  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   672  	case OpRsh16Ux8:
   673  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   674  	case OpRsh16x16:
   675  		return rewriteValueRISCV64_OpRsh16x16(v)
   676  	case OpRsh16x32:
   677  		return rewriteValueRISCV64_OpRsh16x32(v)
   678  	case OpRsh16x64:
   679  		return rewriteValueRISCV64_OpRsh16x64(v)
   680  	case OpRsh16x8:
   681  		return rewriteValueRISCV64_OpRsh16x8(v)
   682  	case OpRsh32Ux16:
   683  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   684  	case OpRsh32Ux32:
   685  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   686  	case OpRsh32Ux64:
   687  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   688  	case OpRsh32Ux8:
   689  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   690  	case OpRsh32x16:
   691  		return rewriteValueRISCV64_OpRsh32x16(v)
   692  	case OpRsh32x32:
   693  		return rewriteValueRISCV64_OpRsh32x32(v)
   694  	case OpRsh32x64:
   695  		return rewriteValueRISCV64_OpRsh32x64(v)
   696  	case OpRsh32x8:
   697  		return rewriteValueRISCV64_OpRsh32x8(v)
   698  	case OpRsh64Ux16:
   699  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   700  	case OpRsh64Ux32:
   701  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   702  	case OpRsh64Ux64:
   703  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   704  	case OpRsh64Ux8:
   705  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   706  	case OpRsh64x16:
   707  		return rewriteValueRISCV64_OpRsh64x16(v)
   708  	case OpRsh64x32:
   709  		return rewriteValueRISCV64_OpRsh64x32(v)
   710  	case OpRsh64x64:
   711  		return rewriteValueRISCV64_OpRsh64x64(v)
   712  	case OpRsh64x8:
   713  		return rewriteValueRISCV64_OpRsh64x8(v)
   714  	case OpRsh8Ux16:
   715  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   716  	case OpRsh8Ux32:
   717  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   718  	case OpRsh8Ux64:
   719  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   720  	case OpRsh8Ux8:
   721  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   722  	case OpRsh8x16:
   723  		return rewriteValueRISCV64_OpRsh8x16(v)
   724  	case OpRsh8x32:
   725  		return rewriteValueRISCV64_OpRsh8x32(v)
   726  	case OpRsh8x64:
   727  		return rewriteValueRISCV64_OpRsh8x64(v)
   728  	case OpRsh8x8:
   729  		return rewriteValueRISCV64_OpRsh8x8(v)
   730  	case OpSelect0:
   731  		return rewriteValueRISCV64_OpSelect0(v)
   732  	case OpSelect1:
   733  		return rewriteValueRISCV64_OpSelect1(v)
   734  	case OpSignExt16to32:
   735  		v.Op = OpRISCV64MOVHreg
   736  		return true
   737  	case OpSignExt16to64:
   738  		v.Op = OpRISCV64MOVHreg
   739  		return true
   740  	case OpSignExt32to64:
   741  		v.Op = OpRISCV64MOVWreg
   742  		return true
   743  	case OpSignExt8to16:
   744  		v.Op = OpRISCV64MOVBreg
   745  		return true
   746  	case OpSignExt8to32:
   747  		v.Op = OpRISCV64MOVBreg
   748  		return true
   749  	case OpSignExt8to64:
   750  		v.Op = OpRISCV64MOVBreg
   751  		return true
   752  	case OpSlicemask:
   753  		return rewriteValueRISCV64_OpSlicemask(v)
   754  	case OpSqrt:
   755  		v.Op = OpRISCV64FSQRTD
   756  		return true
   757  	case OpSqrt32:
   758  		v.Op = OpRISCV64FSQRTS
   759  		return true
   760  	case OpStaticCall:
   761  		v.Op = OpRISCV64CALLstatic
   762  		return true
   763  	case OpStore:
   764  		return rewriteValueRISCV64_OpStore(v)
   765  	case OpSub16:
   766  		v.Op = OpRISCV64SUB
   767  		return true
   768  	case OpSub32:
   769  		v.Op = OpRISCV64SUB
   770  		return true
   771  	case OpSub32F:
   772  		v.Op = OpRISCV64FSUBS
   773  		return true
   774  	case OpSub64:
   775  		v.Op = OpRISCV64SUB
   776  		return true
   777  	case OpSub64F:
   778  		v.Op = OpRISCV64FSUBD
   779  		return true
   780  	case OpSub8:
   781  		v.Op = OpRISCV64SUB
   782  		return true
   783  	case OpSubPtr:
   784  		v.Op = OpRISCV64SUB
   785  		return true
   786  	case OpTailCall:
   787  		v.Op = OpRISCV64CALLtail
   788  		return true
   789  	case OpTrunc16to8:
   790  		v.Op = OpCopy
   791  		return true
   792  	case OpTrunc32to16:
   793  		v.Op = OpCopy
   794  		return true
   795  	case OpTrunc32to8:
   796  		v.Op = OpCopy
   797  		return true
   798  	case OpTrunc64to16:
   799  		v.Op = OpCopy
   800  		return true
   801  	case OpTrunc64to32:
   802  		v.Op = OpCopy
   803  		return true
   804  	case OpTrunc64to8:
   805  		v.Op = OpCopy
   806  		return true
   807  	case OpWB:
   808  		v.Op = OpRISCV64LoweredWB
   809  		return true
   810  	case OpXor16:
   811  		v.Op = OpRISCV64XOR
   812  		return true
   813  	case OpXor32:
   814  		v.Op = OpRISCV64XOR
   815  		return true
   816  	case OpXor64:
   817  		v.Op = OpRISCV64XOR
   818  		return true
   819  	case OpXor8:
   820  		v.Op = OpRISCV64XOR
   821  		return true
   822  	case OpZero:
   823  		return rewriteValueRISCV64_OpZero(v)
   824  	case OpZeroExt16to32:
   825  		v.Op = OpRISCV64MOVHUreg
   826  		return true
   827  	case OpZeroExt16to64:
   828  		v.Op = OpRISCV64MOVHUreg
   829  		return true
   830  	case OpZeroExt32to64:
   831  		v.Op = OpRISCV64MOVWUreg
   832  		return true
   833  	case OpZeroExt8to16:
   834  		v.Op = OpRISCV64MOVBUreg
   835  		return true
   836  	case OpZeroExt8to32:
   837  		v.Op = OpRISCV64MOVBUreg
   838  		return true
   839  	case OpZeroExt8to64:
   840  		v.Op = OpRISCV64MOVBUreg
   841  		return true
   842  	}
   843  	return false
   844  }
   845  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   846  	v_0 := v.Args[0]
   847  	// match: (Addr {sym} base)
   848  	// result: (MOVaddr {sym} [0] base)
   849  	for {
   850  		sym := auxToSym(v.Aux)
   851  		base := v_0
   852  		v.reset(OpRISCV64MOVaddr)
   853  		v.AuxInt = int32ToAuxInt(0)
   854  		v.Aux = symToAux(sym)
   855  		v.AddArg(base)
   856  		return true
   857  	}
   858  }
   859  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   860  	v_2 := v.Args[2]
   861  	v_1 := v.Args[1]
   862  	v_0 := v.Args[0]
   863  	b := v.Block
   864  	typ := &b.Func.Config.Types
   865  	// match: (AtomicAnd8 ptr val mem)
   866  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   867  	for {
   868  		ptr := v_0
   869  		val := v_1
   870  		mem := v_2
   871  		v.reset(OpRISCV64LoweredAtomicAnd32)
   872  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   873  		v0.AuxInt = int64ToAuxInt(^3)
   874  		v0.AddArg(ptr)
   875  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   876  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   877  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   878  		v3.AuxInt = int64ToAuxInt(0xff)
   879  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   880  		v4.AddArg(val)
   881  		v3.AddArg(v4)
   882  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   883  		v5.AuxInt = int64ToAuxInt(3)
   884  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   885  		v6.AuxInt = int64ToAuxInt(3)
   886  		v6.AddArg(ptr)
   887  		v5.AddArg(v6)
   888  		v2.AddArg2(v3, v5)
   889  		v1.AddArg(v2)
   890  		v.AddArg3(v0, v1, mem)
   891  		return true
   892  	}
   893  }
   894  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   895  	v_3 := v.Args[3]
   896  	v_2 := v.Args[2]
   897  	v_1 := v.Args[1]
   898  	v_0 := v.Args[0]
   899  	b := v.Block
   900  	typ := &b.Func.Config.Types
   901  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   902  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   903  	for {
   904  		ptr := v_0
   905  		old := v_1
   906  		new := v_2
   907  		mem := v_3
   908  		v.reset(OpRISCV64LoweredAtomicCas32)
   909  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   910  		v0.AddArg(old)
   911  		v.AddArg4(ptr, v0, new, mem)
   912  		return true
   913  	}
   914  }
   915  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   916  	v_2 := v.Args[2]
   917  	v_1 := v.Args[1]
   918  	v_0 := v.Args[0]
   919  	b := v.Block
   920  	typ := &b.Func.Config.Types
   921  	// match: (AtomicOr8 ptr val mem)
   922  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   923  	for {
   924  		ptr := v_0
   925  		val := v_1
   926  		mem := v_2
   927  		v.reset(OpRISCV64LoweredAtomicOr32)
   928  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   929  		v0.AuxInt = int64ToAuxInt(^3)
   930  		v0.AddArg(ptr)
   931  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   932  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   933  		v2.AddArg(val)
   934  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   935  		v3.AuxInt = int64ToAuxInt(3)
   936  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   937  		v4.AuxInt = int64ToAuxInt(3)
   938  		v4.AddArg(ptr)
   939  		v3.AddArg(v4)
   940  		v1.AddArg2(v2, v3)
   941  		v.AddArg3(v0, v1, mem)
   942  		return true
   943  	}
   944  }
   945  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   946  	v_1 := v.Args[1]
   947  	v_0 := v.Args[0]
   948  	b := v.Block
   949  	// match: (Avg64u <t> x y)
   950  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   951  	for {
   952  		t := v.Type
   953  		x := v_0
   954  		y := v_1
   955  		v.reset(OpRISCV64ADD)
   956  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   957  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   958  		v1.AuxInt = int64ToAuxInt(1)
   959  		v1.AddArg(x)
   960  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   961  		v2.AuxInt = int64ToAuxInt(1)
   962  		v2.AddArg(y)
   963  		v0.AddArg2(v1, v2)
   964  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   965  		v3.AuxInt = int64ToAuxInt(1)
   966  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   967  		v4.AddArg2(x, y)
   968  		v3.AddArg(v4)
   969  		v.AddArg2(v0, v3)
   970  		return true
   971  	}
   972  }
   973  func rewriteValueRISCV64_OpBitLen16(v *Value) bool {
   974  	v_0 := v.Args[0]
   975  	b := v.Block
   976  	typ := &b.Func.Config.Types
   977  	// match: (BitLen16 x)
   978  	// result: (BitLen64 (ZeroExt16to64 x))
   979  	for {
   980  		x := v_0
   981  		v.reset(OpBitLen64)
   982  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   983  		v0.AddArg(x)
   984  		v.AddArg(v0)
   985  		return true
   986  	}
   987  }
   988  func rewriteValueRISCV64_OpBitLen32(v *Value) bool {
   989  	v_0 := v.Args[0]
   990  	b := v.Block
   991  	typ := &b.Func.Config.Types
   992  	// match: (BitLen32 <t> x)
   993  	// result: (SUB (MOVDconst [32]) (CLZW <t> x))
   994  	for {
   995  		t := v.Type
   996  		x := v_0
   997  		v.reset(OpRISCV64SUB)
   998  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   999  		v0.AuxInt = int64ToAuxInt(32)
  1000  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZW, t)
  1001  		v1.AddArg(x)
  1002  		v.AddArg2(v0, v1)
  1003  		return true
  1004  	}
  1005  }
  1006  func rewriteValueRISCV64_OpBitLen64(v *Value) bool {
  1007  	v_0 := v.Args[0]
  1008  	b := v.Block
  1009  	typ := &b.Func.Config.Types
  1010  	// match: (BitLen64 <t> x)
  1011  	// result: (SUB (MOVDconst [64]) (CLZ <t> x))
  1012  	for {
  1013  		t := v.Type
  1014  		x := v_0
  1015  		v.reset(OpRISCV64SUB)
  1016  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1017  		v0.AuxInt = int64ToAuxInt(64)
  1018  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZ, t)
  1019  		v1.AddArg(x)
  1020  		v.AddArg2(v0, v1)
  1021  		return true
  1022  	}
  1023  }
  1024  func rewriteValueRISCV64_OpBitLen8(v *Value) bool {
  1025  	v_0 := v.Args[0]
  1026  	b := v.Block
  1027  	typ := &b.Func.Config.Types
  1028  	// match: (BitLen8 x)
  1029  	// result: (BitLen64 (ZeroExt8to64 x))
  1030  	for {
  1031  		x := v_0
  1032  		v.reset(OpBitLen64)
  1033  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1034  		v0.AddArg(x)
  1035  		v.AddArg(v0)
  1036  		return true
  1037  	}
  1038  }
  1039  func rewriteValueRISCV64_OpBswap16(v *Value) bool {
  1040  	v_0 := v.Args[0]
  1041  	b := v.Block
  1042  	// match: (Bswap16 <t> x)
  1043  	// result: (SRLI [48] (REV8 <t> x))
  1044  	for {
  1045  		t := v.Type
  1046  		x := v_0
  1047  		v.reset(OpRISCV64SRLI)
  1048  		v.AuxInt = int64ToAuxInt(48)
  1049  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1050  		v0.AddArg(x)
  1051  		v.AddArg(v0)
  1052  		return true
  1053  	}
  1054  }
  1055  func rewriteValueRISCV64_OpBswap32(v *Value) bool {
  1056  	v_0 := v.Args[0]
  1057  	b := v.Block
  1058  	// match: (Bswap32 <t> x)
  1059  	// result: (SRLI [32] (REV8 <t> x))
  1060  	for {
  1061  		t := v.Type
  1062  		x := v_0
  1063  		v.reset(OpRISCV64SRLI)
  1064  		v.AuxInt = int64ToAuxInt(32)
  1065  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1066  		v0.AddArg(x)
  1067  		v.AddArg(v0)
  1068  		return true
  1069  	}
  1070  }
  1071  func rewriteValueRISCV64_OpConst16(v *Value) bool {
  1072  	// match: (Const16 [val])
  1073  	// result: (MOVDconst [int64(val)])
  1074  	for {
  1075  		val := auxIntToInt16(v.AuxInt)
  1076  		v.reset(OpRISCV64MOVDconst)
  1077  		v.AuxInt = int64ToAuxInt(int64(val))
  1078  		return true
  1079  	}
  1080  }
  1081  func rewriteValueRISCV64_OpConst32(v *Value) bool {
  1082  	// match: (Const32 [val])
  1083  	// result: (MOVDconst [int64(val)])
  1084  	for {
  1085  		val := auxIntToInt32(v.AuxInt)
  1086  		v.reset(OpRISCV64MOVDconst)
  1087  		v.AuxInt = int64ToAuxInt(int64(val))
  1088  		return true
  1089  	}
  1090  }
  1091  func rewriteValueRISCV64_OpConst32F(v *Value) bool {
  1092  	b := v.Block
  1093  	typ := &b.Func.Config.Types
  1094  	// match: (Const32F [val])
  1095  	// result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
  1096  	for {
  1097  		val := auxIntToFloat32(v.AuxInt)
  1098  		v.reset(OpRISCV64FMVSX)
  1099  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1100  		v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
  1101  		v.AddArg(v0)
  1102  		return true
  1103  	}
  1104  }
  1105  func rewriteValueRISCV64_OpConst64(v *Value) bool {
  1106  	// match: (Const64 [val])
  1107  	// result: (MOVDconst [int64(val)])
  1108  	for {
  1109  		val := auxIntToInt64(v.AuxInt)
  1110  		v.reset(OpRISCV64MOVDconst)
  1111  		v.AuxInt = int64ToAuxInt(int64(val))
  1112  		return true
  1113  	}
  1114  }
  1115  func rewriteValueRISCV64_OpConst64F(v *Value) bool {
  1116  	b := v.Block
  1117  	typ := &b.Func.Config.Types
  1118  	// match: (Const64F [val])
  1119  	// result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
  1120  	for {
  1121  		val := auxIntToFloat64(v.AuxInt)
  1122  		v.reset(OpRISCV64FMVDX)
  1123  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1124  		v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
  1125  		v.AddArg(v0)
  1126  		return true
  1127  	}
  1128  }
  1129  func rewriteValueRISCV64_OpConst8(v *Value) bool {
  1130  	// match: (Const8 [val])
  1131  	// result: (MOVDconst [int64(val)])
  1132  	for {
  1133  		val := auxIntToInt8(v.AuxInt)
  1134  		v.reset(OpRISCV64MOVDconst)
  1135  		v.AuxInt = int64ToAuxInt(int64(val))
  1136  		return true
  1137  	}
  1138  }
  1139  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
  1140  	// match: (ConstBool [val])
  1141  	// result: (MOVDconst [int64(b2i(val))])
  1142  	for {
  1143  		val := auxIntToBool(v.AuxInt)
  1144  		v.reset(OpRISCV64MOVDconst)
  1145  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
  1146  		return true
  1147  	}
  1148  }
  1149  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
  1150  	// match: (ConstNil)
  1151  	// result: (MOVDconst [0])
  1152  	for {
  1153  		v.reset(OpRISCV64MOVDconst)
  1154  		v.AuxInt = int64ToAuxInt(0)
  1155  		return true
  1156  	}
  1157  }
  1158  func rewriteValueRISCV64_OpCtz16(v *Value) bool {
  1159  	v_0 := v.Args[0]
  1160  	b := v.Block
  1161  	typ := &b.Func.Config.Types
  1162  	// match: (Ctz16 x)
  1163  	// result: (CTZW (ORI <typ.UInt32> [1<<16] x))
  1164  	for {
  1165  		x := v_0
  1166  		v.reset(OpRISCV64CTZW)
  1167  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1168  		v0.AuxInt = int64ToAuxInt(1 << 16)
  1169  		v0.AddArg(x)
  1170  		v.AddArg(v0)
  1171  		return true
  1172  	}
  1173  }
  1174  func rewriteValueRISCV64_OpCtz8(v *Value) bool {
  1175  	v_0 := v.Args[0]
  1176  	b := v.Block
  1177  	typ := &b.Func.Config.Types
  1178  	// match: (Ctz8 x)
  1179  	// result: (CTZW (ORI <typ.UInt32> [1<<8] x))
  1180  	for {
  1181  		x := v_0
  1182  		v.reset(OpRISCV64CTZW)
  1183  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1184  		v0.AuxInt = int64ToAuxInt(1 << 8)
  1185  		v0.AddArg(x)
  1186  		v.AddArg(v0)
  1187  		return true
  1188  	}
  1189  }
  1190  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
  1191  	v_1 := v.Args[1]
  1192  	v_0 := v.Args[0]
  1193  	b := v.Block
  1194  	typ := &b.Func.Config.Types
  1195  	// match: (Div16 x y [false])
  1196  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  1197  	for {
  1198  		if auxIntToBool(v.AuxInt) != false {
  1199  			break
  1200  		}
  1201  		x := v_0
  1202  		y := v_1
  1203  		v.reset(OpRISCV64DIVW)
  1204  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1205  		v0.AddArg(x)
  1206  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1207  		v1.AddArg(y)
  1208  		v.AddArg2(v0, v1)
  1209  		return true
  1210  	}
  1211  	return false
  1212  }
  1213  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1214  	v_1 := v.Args[1]
  1215  	v_0 := v.Args[0]
  1216  	b := v.Block
  1217  	typ := &b.Func.Config.Types
  1218  	// match: (Div16u x y)
  1219  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1220  	for {
  1221  		x := v_0
  1222  		y := v_1
  1223  		v.reset(OpRISCV64DIVUW)
  1224  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1225  		v0.AddArg(x)
  1226  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1227  		v1.AddArg(y)
  1228  		v.AddArg2(v0, v1)
  1229  		return true
  1230  	}
  1231  }
  1232  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1233  	v_1 := v.Args[1]
  1234  	v_0 := v.Args[0]
  1235  	// match: (Div32 x y [false])
  1236  	// result: (DIVW x y)
  1237  	for {
  1238  		if auxIntToBool(v.AuxInt) != false {
  1239  			break
  1240  		}
  1241  		x := v_0
  1242  		y := v_1
  1243  		v.reset(OpRISCV64DIVW)
  1244  		v.AddArg2(x, y)
  1245  		return true
  1246  	}
  1247  	return false
  1248  }
  1249  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1250  	v_1 := v.Args[1]
  1251  	v_0 := v.Args[0]
  1252  	// match: (Div64 x y [false])
  1253  	// result: (DIV x y)
  1254  	for {
  1255  		if auxIntToBool(v.AuxInt) != false {
  1256  			break
  1257  		}
  1258  		x := v_0
  1259  		y := v_1
  1260  		v.reset(OpRISCV64DIV)
  1261  		v.AddArg2(x, y)
  1262  		return true
  1263  	}
  1264  	return false
  1265  }
  1266  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1267  	v_1 := v.Args[1]
  1268  	v_0 := v.Args[0]
  1269  	b := v.Block
  1270  	typ := &b.Func.Config.Types
  1271  	// match: (Div8 x y)
  1272  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1273  	for {
  1274  		x := v_0
  1275  		y := v_1
  1276  		v.reset(OpRISCV64DIVW)
  1277  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1278  		v0.AddArg(x)
  1279  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1280  		v1.AddArg(y)
  1281  		v.AddArg2(v0, v1)
  1282  		return true
  1283  	}
  1284  }
  1285  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1286  	v_1 := v.Args[1]
  1287  	v_0 := v.Args[0]
  1288  	b := v.Block
  1289  	typ := &b.Func.Config.Types
  1290  	// match: (Div8u x y)
  1291  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1292  	for {
  1293  		x := v_0
  1294  		y := v_1
  1295  		v.reset(OpRISCV64DIVUW)
  1296  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1297  		v0.AddArg(x)
  1298  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1299  		v1.AddArg(y)
  1300  		v.AddArg2(v0, v1)
  1301  		return true
  1302  	}
  1303  }
  1304  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1305  	v_1 := v.Args[1]
  1306  	v_0 := v.Args[0]
  1307  	b := v.Block
  1308  	typ := &b.Func.Config.Types
  1309  	// match: (Eq16 x y)
  1310  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1311  	for {
  1312  		x := v_0
  1313  		y := v_1
  1314  		v.reset(OpRISCV64SEQZ)
  1315  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1316  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1317  		v1.AddArg(x)
  1318  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1319  		v2.AddArg(y)
  1320  		v0.AddArg2(v1, v2)
  1321  		v.AddArg(v0)
  1322  		return true
  1323  	}
  1324  }
  1325  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1326  	v_1 := v.Args[1]
  1327  	v_0 := v.Args[0]
  1328  	b := v.Block
  1329  	typ := &b.Func.Config.Types
  1330  	// match: (Eq32 x y)
  1331  	// cond: x.Type.IsSigned()
  1332  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1333  	for {
  1334  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1335  			x := v_0
  1336  			y := v_1
  1337  			if !(x.Type.IsSigned()) {
  1338  				continue
  1339  			}
  1340  			v.reset(OpRISCV64SEQZ)
  1341  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1342  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1343  			v1.AddArg(x)
  1344  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1345  			v2.AddArg(y)
  1346  			v0.AddArg2(v1, v2)
  1347  			v.AddArg(v0)
  1348  			return true
  1349  		}
  1350  		break
  1351  	}
  1352  	// match: (Eq32 x y)
  1353  	// cond: !x.Type.IsSigned()
  1354  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1355  	for {
  1356  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1357  			x := v_0
  1358  			y := v_1
  1359  			if !(!x.Type.IsSigned()) {
  1360  				continue
  1361  			}
  1362  			v.reset(OpRISCV64SEQZ)
  1363  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1364  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1365  			v1.AddArg(x)
  1366  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1367  			v2.AddArg(y)
  1368  			v0.AddArg2(v1, v2)
  1369  			v.AddArg(v0)
  1370  			return true
  1371  		}
  1372  		break
  1373  	}
  1374  	return false
  1375  }
  1376  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1377  	v_1 := v.Args[1]
  1378  	v_0 := v.Args[0]
  1379  	b := v.Block
  1380  	// match: (Eq64 x y)
  1381  	// result: (SEQZ (SUB <x.Type> x y))
  1382  	for {
  1383  		x := v_0
  1384  		y := v_1
  1385  		v.reset(OpRISCV64SEQZ)
  1386  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1387  		v0.AddArg2(x, y)
  1388  		v.AddArg(v0)
  1389  		return true
  1390  	}
  1391  }
  1392  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1393  	v_1 := v.Args[1]
  1394  	v_0 := v.Args[0]
  1395  	b := v.Block
  1396  	typ := &b.Func.Config.Types
  1397  	// match: (Eq8 x y)
  1398  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1399  	for {
  1400  		x := v_0
  1401  		y := v_1
  1402  		v.reset(OpRISCV64SEQZ)
  1403  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1404  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1405  		v1.AddArg(x)
  1406  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1407  		v2.AddArg(y)
  1408  		v0.AddArg2(v1, v2)
  1409  		v.AddArg(v0)
  1410  		return true
  1411  	}
  1412  }
  1413  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1414  	v_1 := v.Args[1]
  1415  	v_0 := v.Args[0]
  1416  	b := v.Block
  1417  	typ := &b.Func.Config.Types
  1418  	// match: (EqB x y)
  1419  	// result: (SEQZ (SUB <typ.Bool> x y))
  1420  	for {
  1421  		x := v_0
  1422  		y := v_1
  1423  		v.reset(OpRISCV64SEQZ)
  1424  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1425  		v0.AddArg2(x, y)
  1426  		v.AddArg(v0)
  1427  		return true
  1428  	}
  1429  }
  1430  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1431  	v_1 := v.Args[1]
  1432  	v_0 := v.Args[0]
  1433  	b := v.Block
  1434  	typ := &b.Func.Config.Types
  1435  	// match: (EqPtr x y)
  1436  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1437  	for {
  1438  		x := v_0
  1439  		y := v_1
  1440  		v.reset(OpRISCV64SEQZ)
  1441  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1442  		v0.AddArg2(x, y)
  1443  		v.AddArg(v0)
  1444  		return true
  1445  	}
  1446  }
  1447  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1448  	v_1 := v.Args[1]
  1449  	v_0 := v.Args[0]
  1450  	b := v.Block
  1451  	typ := &b.Func.Config.Types
  1452  	// match: (Hmul32 x y)
  1453  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1454  	for {
  1455  		x := v_0
  1456  		y := v_1
  1457  		v.reset(OpRISCV64SRAI)
  1458  		v.AuxInt = int64ToAuxInt(32)
  1459  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1460  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1461  		v1.AddArg(x)
  1462  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1463  		v2.AddArg(y)
  1464  		v0.AddArg2(v1, v2)
  1465  		v.AddArg(v0)
  1466  		return true
  1467  	}
  1468  }
  1469  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1470  	v_1 := v.Args[1]
  1471  	v_0 := v.Args[0]
  1472  	b := v.Block
  1473  	typ := &b.Func.Config.Types
  1474  	// match: (Hmul32u x y)
  1475  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1476  	for {
  1477  		x := v_0
  1478  		y := v_1
  1479  		v.reset(OpRISCV64SRLI)
  1480  		v.AuxInt = int64ToAuxInt(32)
  1481  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1482  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1483  		v1.AddArg(x)
  1484  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1485  		v2.AddArg(y)
  1486  		v0.AddArg2(v1, v2)
  1487  		v.AddArg(v0)
  1488  		return true
  1489  	}
  1490  }
  1491  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1492  	v_1 := v.Args[1]
  1493  	v_0 := v.Args[0]
  1494  	b := v.Block
  1495  	typ := &b.Func.Config.Types
  1496  	// match: (Leq16 x y)
  1497  	// result: (Not (Less16 y x))
  1498  	for {
  1499  		x := v_0
  1500  		y := v_1
  1501  		v.reset(OpNot)
  1502  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1503  		v0.AddArg2(y, x)
  1504  		v.AddArg(v0)
  1505  		return true
  1506  	}
  1507  }
  1508  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1509  	v_1 := v.Args[1]
  1510  	v_0 := v.Args[0]
  1511  	b := v.Block
  1512  	typ := &b.Func.Config.Types
  1513  	// match: (Leq16U x y)
  1514  	// result: (Not (Less16U y x))
  1515  	for {
  1516  		x := v_0
  1517  		y := v_1
  1518  		v.reset(OpNot)
  1519  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1520  		v0.AddArg2(y, x)
  1521  		v.AddArg(v0)
  1522  		return true
  1523  	}
  1524  }
  1525  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1526  	v_1 := v.Args[1]
  1527  	v_0 := v.Args[0]
  1528  	b := v.Block
  1529  	typ := &b.Func.Config.Types
  1530  	// match: (Leq32 x y)
  1531  	// result: (Not (Less32 y x))
  1532  	for {
  1533  		x := v_0
  1534  		y := v_1
  1535  		v.reset(OpNot)
  1536  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1537  		v0.AddArg2(y, x)
  1538  		v.AddArg(v0)
  1539  		return true
  1540  	}
  1541  }
  1542  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1543  	v_1 := v.Args[1]
  1544  	v_0 := v.Args[0]
  1545  	b := v.Block
  1546  	typ := &b.Func.Config.Types
  1547  	// match: (Leq32U x y)
  1548  	// result: (Not (Less32U y x))
  1549  	for {
  1550  		x := v_0
  1551  		y := v_1
  1552  		v.reset(OpNot)
  1553  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1554  		v0.AddArg2(y, x)
  1555  		v.AddArg(v0)
  1556  		return true
  1557  	}
  1558  }
  1559  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1560  	v_1 := v.Args[1]
  1561  	v_0 := v.Args[0]
  1562  	b := v.Block
  1563  	typ := &b.Func.Config.Types
  1564  	// match: (Leq64 x y)
  1565  	// result: (Not (Less64 y x))
  1566  	for {
  1567  		x := v_0
  1568  		y := v_1
  1569  		v.reset(OpNot)
  1570  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1571  		v0.AddArg2(y, x)
  1572  		v.AddArg(v0)
  1573  		return true
  1574  	}
  1575  }
  1576  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1577  	v_1 := v.Args[1]
  1578  	v_0 := v.Args[0]
  1579  	b := v.Block
  1580  	typ := &b.Func.Config.Types
  1581  	// match: (Leq64U x y)
  1582  	// result: (Not (Less64U y x))
  1583  	for {
  1584  		x := v_0
  1585  		y := v_1
  1586  		v.reset(OpNot)
  1587  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1588  		v0.AddArg2(y, x)
  1589  		v.AddArg(v0)
  1590  		return true
  1591  	}
  1592  }
  1593  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1594  	v_1 := v.Args[1]
  1595  	v_0 := v.Args[0]
  1596  	b := v.Block
  1597  	typ := &b.Func.Config.Types
  1598  	// match: (Leq8 x y)
  1599  	// result: (Not (Less8 y x))
  1600  	for {
  1601  		x := v_0
  1602  		y := v_1
  1603  		v.reset(OpNot)
  1604  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1605  		v0.AddArg2(y, x)
  1606  		v.AddArg(v0)
  1607  		return true
  1608  	}
  1609  }
  1610  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1611  	v_1 := v.Args[1]
  1612  	v_0 := v.Args[0]
  1613  	b := v.Block
  1614  	typ := &b.Func.Config.Types
  1615  	// match: (Leq8U x y)
  1616  	// result: (Not (Less8U y x))
  1617  	for {
  1618  		x := v_0
  1619  		y := v_1
  1620  		v.reset(OpNot)
  1621  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1622  		v0.AddArg2(y, x)
  1623  		v.AddArg(v0)
  1624  		return true
  1625  	}
  1626  }
  1627  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1628  	v_1 := v.Args[1]
  1629  	v_0 := v.Args[0]
  1630  	b := v.Block
  1631  	typ := &b.Func.Config.Types
  1632  	// match: (Less16 x y)
  1633  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1634  	for {
  1635  		x := v_0
  1636  		y := v_1
  1637  		v.reset(OpRISCV64SLT)
  1638  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1639  		v0.AddArg(x)
  1640  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1641  		v1.AddArg(y)
  1642  		v.AddArg2(v0, v1)
  1643  		return true
  1644  	}
  1645  }
  1646  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1647  	v_1 := v.Args[1]
  1648  	v_0 := v.Args[0]
  1649  	b := v.Block
  1650  	typ := &b.Func.Config.Types
  1651  	// match: (Less16U x y)
  1652  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1653  	for {
  1654  		x := v_0
  1655  		y := v_1
  1656  		v.reset(OpRISCV64SLTU)
  1657  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1658  		v0.AddArg(x)
  1659  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1660  		v1.AddArg(y)
  1661  		v.AddArg2(v0, v1)
  1662  		return true
  1663  	}
  1664  }
  1665  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1666  	v_1 := v.Args[1]
  1667  	v_0 := v.Args[0]
  1668  	b := v.Block
  1669  	typ := &b.Func.Config.Types
  1670  	// match: (Less32 x y)
  1671  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1672  	for {
  1673  		x := v_0
  1674  		y := v_1
  1675  		v.reset(OpRISCV64SLT)
  1676  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1677  		v0.AddArg(x)
  1678  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1679  		v1.AddArg(y)
  1680  		v.AddArg2(v0, v1)
  1681  		return true
  1682  	}
  1683  }
  1684  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1685  	v_1 := v.Args[1]
  1686  	v_0 := v.Args[0]
  1687  	b := v.Block
  1688  	typ := &b.Func.Config.Types
  1689  	// match: (Less32U x y)
  1690  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1691  	for {
  1692  		x := v_0
  1693  		y := v_1
  1694  		v.reset(OpRISCV64SLTU)
  1695  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1696  		v0.AddArg(x)
  1697  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1698  		v1.AddArg(y)
  1699  		v.AddArg2(v0, v1)
  1700  		return true
  1701  	}
  1702  }
  1703  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1704  	v_1 := v.Args[1]
  1705  	v_0 := v.Args[0]
  1706  	b := v.Block
  1707  	typ := &b.Func.Config.Types
  1708  	// match: (Less8 x y)
  1709  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1710  	for {
  1711  		x := v_0
  1712  		y := v_1
  1713  		v.reset(OpRISCV64SLT)
  1714  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1715  		v0.AddArg(x)
  1716  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1717  		v1.AddArg(y)
  1718  		v.AddArg2(v0, v1)
  1719  		return true
  1720  	}
  1721  }
  1722  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1723  	v_1 := v.Args[1]
  1724  	v_0 := v.Args[0]
  1725  	b := v.Block
  1726  	typ := &b.Func.Config.Types
  1727  	// match: (Less8U x y)
  1728  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1729  	for {
  1730  		x := v_0
  1731  		y := v_1
  1732  		v.reset(OpRISCV64SLTU)
  1733  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1734  		v0.AddArg(x)
  1735  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1736  		v1.AddArg(y)
  1737  		v.AddArg2(v0, v1)
  1738  		return true
  1739  	}
  1740  }
  1741  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1742  	v_1 := v.Args[1]
  1743  	v_0 := v.Args[0]
  1744  	// match: (Load <t> ptr mem)
  1745  	// cond: t.IsBoolean()
  1746  	// result: (MOVBUload ptr mem)
  1747  	for {
  1748  		t := v.Type
  1749  		ptr := v_0
  1750  		mem := v_1
  1751  		if !(t.IsBoolean()) {
  1752  			break
  1753  		}
  1754  		v.reset(OpRISCV64MOVBUload)
  1755  		v.AddArg2(ptr, mem)
  1756  		return true
  1757  	}
  1758  	// match: (Load <t> ptr mem)
  1759  	// cond: ( is8BitInt(t) && t.IsSigned())
  1760  	// result: (MOVBload ptr mem)
  1761  	for {
  1762  		t := v.Type
  1763  		ptr := v_0
  1764  		mem := v_1
  1765  		if !(is8BitInt(t) && t.IsSigned()) {
  1766  			break
  1767  		}
  1768  		v.reset(OpRISCV64MOVBload)
  1769  		v.AddArg2(ptr, mem)
  1770  		return true
  1771  	}
  1772  	// match: (Load <t> ptr mem)
  1773  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1774  	// result: (MOVBUload ptr mem)
  1775  	for {
  1776  		t := v.Type
  1777  		ptr := v_0
  1778  		mem := v_1
  1779  		if !(is8BitInt(t) && !t.IsSigned()) {
  1780  			break
  1781  		}
  1782  		v.reset(OpRISCV64MOVBUload)
  1783  		v.AddArg2(ptr, mem)
  1784  		return true
  1785  	}
  1786  	// match: (Load <t> ptr mem)
  1787  	// cond: (is16BitInt(t) && t.IsSigned())
  1788  	// result: (MOVHload ptr mem)
  1789  	for {
  1790  		t := v.Type
  1791  		ptr := v_0
  1792  		mem := v_1
  1793  		if !(is16BitInt(t) && t.IsSigned()) {
  1794  			break
  1795  		}
  1796  		v.reset(OpRISCV64MOVHload)
  1797  		v.AddArg2(ptr, mem)
  1798  		return true
  1799  	}
  1800  	// match: (Load <t> ptr mem)
  1801  	// cond: (is16BitInt(t) && !t.IsSigned())
  1802  	// result: (MOVHUload ptr mem)
  1803  	for {
  1804  		t := v.Type
  1805  		ptr := v_0
  1806  		mem := v_1
  1807  		if !(is16BitInt(t) && !t.IsSigned()) {
  1808  			break
  1809  		}
  1810  		v.reset(OpRISCV64MOVHUload)
  1811  		v.AddArg2(ptr, mem)
  1812  		return true
  1813  	}
  1814  	// match: (Load <t> ptr mem)
  1815  	// cond: (is32BitInt(t) && t.IsSigned())
  1816  	// result: (MOVWload ptr mem)
  1817  	for {
  1818  		t := v.Type
  1819  		ptr := v_0
  1820  		mem := v_1
  1821  		if !(is32BitInt(t) && t.IsSigned()) {
  1822  			break
  1823  		}
  1824  		v.reset(OpRISCV64MOVWload)
  1825  		v.AddArg2(ptr, mem)
  1826  		return true
  1827  	}
  1828  	// match: (Load <t> ptr mem)
  1829  	// cond: (is32BitInt(t) && !t.IsSigned())
  1830  	// result: (MOVWUload ptr mem)
  1831  	for {
  1832  		t := v.Type
  1833  		ptr := v_0
  1834  		mem := v_1
  1835  		if !(is32BitInt(t) && !t.IsSigned()) {
  1836  			break
  1837  		}
  1838  		v.reset(OpRISCV64MOVWUload)
  1839  		v.AddArg2(ptr, mem)
  1840  		return true
  1841  	}
  1842  	// match: (Load <t> ptr mem)
  1843  	// cond: (is64BitInt(t) || isPtr(t))
  1844  	// result: (MOVDload ptr mem)
  1845  	for {
  1846  		t := v.Type
  1847  		ptr := v_0
  1848  		mem := v_1
  1849  		if !(is64BitInt(t) || isPtr(t)) {
  1850  			break
  1851  		}
  1852  		v.reset(OpRISCV64MOVDload)
  1853  		v.AddArg2(ptr, mem)
  1854  		return true
  1855  	}
  1856  	// match: (Load <t> ptr mem)
  1857  	// cond: is32BitFloat(t)
  1858  	// result: (FMOVWload ptr mem)
  1859  	for {
  1860  		t := v.Type
  1861  		ptr := v_0
  1862  		mem := v_1
  1863  		if !(is32BitFloat(t)) {
  1864  			break
  1865  		}
  1866  		v.reset(OpRISCV64FMOVWload)
  1867  		v.AddArg2(ptr, mem)
  1868  		return true
  1869  	}
  1870  	// match: (Load <t> ptr mem)
  1871  	// cond: is64BitFloat(t)
  1872  	// result: (FMOVDload ptr mem)
  1873  	for {
  1874  		t := v.Type
  1875  		ptr := v_0
  1876  		mem := v_1
  1877  		if !(is64BitFloat(t)) {
  1878  			break
  1879  		}
  1880  		v.reset(OpRISCV64FMOVDload)
  1881  		v.AddArg2(ptr, mem)
  1882  		return true
  1883  	}
  1884  	return false
  1885  }
  1886  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1887  	v_1 := v.Args[1]
  1888  	v_0 := v.Args[0]
  1889  	b := v.Block
  1890  	typ := &b.Func.Config.Types
  1891  	// match: (LocalAddr <t> {sym} base mem)
  1892  	// cond: t.Elem().HasPointers()
  1893  	// result: (MOVaddr {sym} (SPanchored base mem))
  1894  	for {
  1895  		t := v.Type
  1896  		sym := auxToSym(v.Aux)
  1897  		base := v_0
  1898  		mem := v_1
  1899  		if !(t.Elem().HasPointers()) {
  1900  			break
  1901  		}
  1902  		v.reset(OpRISCV64MOVaddr)
  1903  		v.Aux = symToAux(sym)
  1904  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1905  		v0.AddArg2(base, mem)
  1906  		v.AddArg(v0)
  1907  		return true
  1908  	}
  1909  	// match: (LocalAddr <t> {sym} base _)
  1910  	// cond: !t.Elem().HasPointers()
  1911  	// result: (MOVaddr {sym} base)
  1912  	for {
  1913  		t := v.Type
  1914  		sym := auxToSym(v.Aux)
  1915  		base := v_0
  1916  		if !(!t.Elem().HasPointers()) {
  1917  			break
  1918  		}
  1919  		v.reset(OpRISCV64MOVaddr)
  1920  		v.Aux = symToAux(sym)
  1921  		v.AddArg(base)
  1922  		return true
  1923  	}
  1924  	return false
  1925  }
  1926  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1927  	v_1 := v.Args[1]
  1928  	v_0 := v.Args[0]
  1929  	b := v.Block
  1930  	typ := &b.Func.Config.Types
  1931  	// match: (Lsh16x16 <t> x y)
  1932  	// cond: !shiftIsBounded(v)
  1933  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1934  	for {
  1935  		t := v.Type
  1936  		x := v_0
  1937  		y := v_1
  1938  		if !(!shiftIsBounded(v)) {
  1939  			break
  1940  		}
  1941  		v.reset(OpRISCV64AND)
  1942  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1943  		v0.AddArg2(x, y)
  1944  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1945  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1946  		v2.AuxInt = int64ToAuxInt(64)
  1947  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1948  		v3.AddArg(y)
  1949  		v2.AddArg(v3)
  1950  		v1.AddArg(v2)
  1951  		v.AddArg2(v0, v1)
  1952  		return true
  1953  	}
  1954  	// match: (Lsh16x16 x y)
  1955  	// cond: shiftIsBounded(v)
  1956  	// result: (SLL x y)
  1957  	for {
  1958  		x := v_0
  1959  		y := v_1
  1960  		if !(shiftIsBounded(v)) {
  1961  			break
  1962  		}
  1963  		v.reset(OpRISCV64SLL)
  1964  		v.AddArg2(x, y)
  1965  		return true
  1966  	}
  1967  	return false
  1968  }
  1969  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1970  	v_1 := v.Args[1]
  1971  	v_0 := v.Args[0]
  1972  	b := v.Block
  1973  	typ := &b.Func.Config.Types
  1974  	// match: (Lsh16x32 <t> x y)
  1975  	// cond: !shiftIsBounded(v)
  1976  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1977  	for {
  1978  		t := v.Type
  1979  		x := v_0
  1980  		y := v_1
  1981  		if !(!shiftIsBounded(v)) {
  1982  			break
  1983  		}
  1984  		v.reset(OpRISCV64AND)
  1985  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1986  		v0.AddArg2(x, y)
  1987  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1988  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1989  		v2.AuxInt = int64ToAuxInt(64)
  1990  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1991  		v3.AddArg(y)
  1992  		v2.AddArg(v3)
  1993  		v1.AddArg(v2)
  1994  		v.AddArg2(v0, v1)
  1995  		return true
  1996  	}
  1997  	// match: (Lsh16x32 x y)
  1998  	// cond: shiftIsBounded(v)
  1999  	// result: (SLL x y)
  2000  	for {
  2001  		x := v_0
  2002  		y := v_1
  2003  		if !(shiftIsBounded(v)) {
  2004  			break
  2005  		}
  2006  		v.reset(OpRISCV64SLL)
  2007  		v.AddArg2(x, y)
  2008  		return true
  2009  	}
  2010  	return false
  2011  }
  2012  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  2013  	v_1 := v.Args[1]
  2014  	v_0 := v.Args[0]
  2015  	b := v.Block
  2016  	// match: (Lsh16x64 <t> x y)
  2017  	// cond: !shiftIsBounded(v)
  2018  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  2019  	for {
  2020  		t := v.Type
  2021  		x := v_0
  2022  		y := v_1
  2023  		if !(!shiftIsBounded(v)) {
  2024  			break
  2025  		}
  2026  		v.reset(OpRISCV64AND)
  2027  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2028  		v0.AddArg2(x, y)
  2029  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2030  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2031  		v2.AuxInt = int64ToAuxInt(64)
  2032  		v2.AddArg(y)
  2033  		v1.AddArg(v2)
  2034  		v.AddArg2(v0, v1)
  2035  		return true
  2036  	}
  2037  	// match: (Lsh16x64 x y)
  2038  	// cond: shiftIsBounded(v)
  2039  	// result: (SLL x y)
  2040  	for {
  2041  		x := v_0
  2042  		y := v_1
  2043  		if !(shiftIsBounded(v)) {
  2044  			break
  2045  		}
  2046  		v.reset(OpRISCV64SLL)
  2047  		v.AddArg2(x, y)
  2048  		return true
  2049  	}
  2050  	return false
  2051  }
  2052  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  2053  	v_1 := v.Args[1]
  2054  	v_0 := v.Args[0]
  2055  	b := v.Block
  2056  	typ := &b.Func.Config.Types
  2057  	// match: (Lsh16x8 <t> x y)
  2058  	// cond: !shiftIsBounded(v)
  2059  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2060  	for {
  2061  		t := v.Type
  2062  		x := v_0
  2063  		y := v_1
  2064  		if !(!shiftIsBounded(v)) {
  2065  			break
  2066  		}
  2067  		v.reset(OpRISCV64AND)
  2068  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2069  		v0.AddArg2(x, y)
  2070  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2071  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2072  		v2.AuxInt = int64ToAuxInt(64)
  2073  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2074  		v3.AddArg(y)
  2075  		v2.AddArg(v3)
  2076  		v1.AddArg(v2)
  2077  		v.AddArg2(v0, v1)
  2078  		return true
  2079  	}
  2080  	// match: (Lsh16x8 x y)
  2081  	// cond: shiftIsBounded(v)
  2082  	// result: (SLL x y)
  2083  	for {
  2084  		x := v_0
  2085  		y := v_1
  2086  		if !(shiftIsBounded(v)) {
  2087  			break
  2088  		}
  2089  		v.reset(OpRISCV64SLL)
  2090  		v.AddArg2(x, y)
  2091  		return true
  2092  	}
  2093  	return false
  2094  }
  2095  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  2096  	v_1 := v.Args[1]
  2097  	v_0 := v.Args[0]
  2098  	b := v.Block
  2099  	typ := &b.Func.Config.Types
  2100  	// match: (Lsh32x16 <t> x y)
  2101  	// cond: !shiftIsBounded(v)
  2102  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2103  	for {
  2104  		t := v.Type
  2105  		x := v_0
  2106  		y := v_1
  2107  		if !(!shiftIsBounded(v)) {
  2108  			break
  2109  		}
  2110  		v.reset(OpRISCV64AND)
  2111  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2112  		v0.AddArg2(x, y)
  2113  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2114  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2115  		v2.AuxInt = int64ToAuxInt(64)
  2116  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2117  		v3.AddArg(y)
  2118  		v2.AddArg(v3)
  2119  		v1.AddArg(v2)
  2120  		v.AddArg2(v0, v1)
  2121  		return true
  2122  	}
  2123  	// match: (Lsh32x16 x y)
  2124  	// cond: shiftIsBounded(v)
  2125  	// result: (SLL x y)
  2126  	for {
  2127  		x := v_0
  2128  		y := v_1
  2129  		if !(shiftIsBounded(v)) {
  2130  			break
  2131  		}
  2132  		v.reset(OpRISCV64SLL)
  2133  		v.AddArg2(x, y)
  2134  		return true
  2135  	}
  2136  	return false
  2137  }
  2138  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  2139  	v_1 := v.Args[1]
  2140  	v_0 := v.Args[0]
  2141  	b := v.Block
  2142  	typ := &b.Func.Config.Types
  2143  	// match: (Lsh32x32 <t> x y)
  2144  	// cond: !shiftIsBounded(v)
  2145  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2146  	for {
  2147  		t := v.Type
  2148  		x := v_0
  2149  		y := v_1
  2150  		if !(!shiftIsBounded(v)) {
  2151  			break
  2152  		}
  2153  		v.reset(OpRISCV64AND)
  2154  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2155  		v0.AddArg2(x, y)
  2156  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2157  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2158  		v2.AuxInt = int64ToAuxInt(64)
  2159  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2160  		v3.AddArg(y)
  2161  		v2.AddArg(v3)
  2162  		v1.AddArg(v2)
  2163  		v.AddArg2(v0, v1)
  2164  		return true
  2165  	}
  2166  	// match: (Lsh32x32 x y)
  2167  	// cond: shiftIsBounded(v)
  2168  	// result: (SLL x y)
  2169  	for {
  2170  		x := v_0
  2171  		y := v_1
  2172  		if !(shiftIsBounded(v)) {
  2173  			break
  2174  		}
  2175  		v.reset(OpRISCV64SLL)
  2176  		v.AddArg2(x, y)
  2177  		return true
  2178  	}
  2179  	return false
  2180  }
  2181  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  2182  	v_1 := v.Args[1]
  2183  	v_0 := v.Args[0]
  2184  	b := v.Block
  2185  	// match: (Lsh32x64 <t> x y)
  2186  	// cond: !shiftIsBounded(v)
  2187  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  2188  	for {
  2189  		t := v.Type
  2190  		x := v_0
  2191  		y := v_1
  2192  		if !(!shiftIsBounded(v)) {
  2193  			break
  2194  		}
  2195  		v.reset(OpRISCV64AND)
  2196  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2197  		v0.AddArg2(x, y)
  2198  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2199  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2200  		v2.AuxInt = int64ToAuxInt(64)
  2201  		v2.AddArg(y)
  2202  		v1.AddArg(v2)
  2203  		v.AddArg2(v0, v1)
  2204  		return true
  2205  	}
  2206  	// match: (Lsh32x64 x y)
  2207  	// cond: shiftIsBounded(v)
  2208  	// result: (SLL x y)
  2209  	for {
  2210  		x := v_0
  2211  		y := v_1
  2212  		if !(shiftIsBounded(v)) {
  2213  			break
  2214  		}
  2215  		v.reset(OpRISCV64SLL)
  2216  		v.AddArg2(x, y)
  2217  		return true
  2218  	}
  2219  	return false
  2220  }
  2221  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2222  	v_1 := v.Args[1]
  2223  	v_0 := v.Args[0]
  2224  	b := v.Block
  2225  	typ := &b.Func.Config.Types
  2226  	// match: (Lsh32x8 <t> x y)
  2227  	// cond: !shiftIsBounded(v)
  2228  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2229  	for {
  2230  		t := v.Type
  2231  		x := v_0
  2232  		y := v_1
  2233  		if !(!shiftIsBounded(v)) {
  2234  			break
  2235  		}
  2236  		v.reset(OpRISCV64AND)
  2237  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2238  		v0.AddArg2(x, y)
  2239  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2240  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2241  		v2.AuxInt = int64ToAuxInt(64)
  2242  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2243  		v3.AddArg(y)
  2244  		v2.AddArg(v3)
  2245  		v1.AddArg(v2)
  2246  		v.AddArg2(v0, v1)
  2247  		return true
  2248  	}
  2249  	// match: (Lsh32x8 x y)
  2250  	// cond: shiftIsBounded(v)
  2251  	// result: (SLL x y)
  2252  	for {
  2253  		x := v_0
  2254  		y := v_1
  2255  		if !(shiftIsBounded(v)) {
  2256  			break
  2257  		}
  2258  		v.reset(OpRISCV64SLL)
  2259  		v.AddArg2(x, y)
  2260  		return true
  2261  	}
  2262  	return false
  2263  }
  2264  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2265  	v_1 := v.Args[1]
  2266  	v_0 := v.Args[0]
  2267  	b := v.Block
  2268  	typ := &b.Func.Config.Types
  2269  	// match: (Lsh64x16 <t> x y)
  2270  	// cond: !shiftIsBounded(v)
  2271  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2272  	for {
  2273  		t := v.Type
  2274  		x := v_0
  2275  		y := v_1
  2276  		if !(!shiftIsBounded(v)) {
  2277  			break
  2278  		}
  2279  		v.reset(OpRISCV64AND)
  2280  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2281  		v0.AddArg2(x, y)
  2282  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2283  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2284  		v2.AuxInt = int64ToAuxInt(64)
  2285  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2286  		v3.AddArg(y)
  2287  		v2.AddArg(v3)
  2288  		v1.AddArg(v2)
  2289  		v.AddArg2(v0, v1)
  2290  		return true
  2291  	}
  2292  	// match: (Lsh64x16 x y)
  2293  	// cond: shiftIsBounded(v)
  2294  	// result: (SLL x y)
  2295  	for {
  2296  		x := v_0
  2297  		y := v_1
  2298  		if !(shiftIsBounded(v)) {
  2299  			break
  2300  		}
  2301  		v.reset(OpRISCV64SLL)
  2302  		v.AddArg2(x, y)
  2303  		return true
  2304  	}
  2305  	return false
  2306  }
  2307  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2308  	v_1 := v.Args[1]
  2309  	v_0 := v.Args[0]
  2310  	b := v.Block
  2311  	typ := &b.Func.Config.Types
  2312  	// match: (Lsh64x32 <t> x y)
  2313  	// cond: !shiftIsBounded(v)
  2314  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2315  	for {
  2316  		t := v.Type
  2317  		x := v_0
  2318  		y := v_1
  2319  		if !(!shiftIsBounded(v)) {
  2320  			break
  2321  		}
  2322  		v.reset(OpRISCV64AND)
  2323  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2324  		v0.AddArg2(x, y)
  2325  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2326  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2327  		v2.AuxInt = int64ToAuxInt(64)
  2328  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2329  		v3.AddArg(y)
  2330  		v2.AddArg(v3)
  2331  		v1.AddArg(v2)
  2332  		v.AddArg2(v0, v1)
  2333  		return true
  2334  	}
  2335  	// match: (Lsh64x32 x y)
  2336  	// cond: shiftIsBounded(v)
  2337  	// result: (SLL x y)
  2338  	for {
  2339  		x := v_0
  2340  		y := v_1
  2341  		if !(shiftIsBounded(v)) {
  2342  			break
  2343  		}
  2344  		v.reset(OpRISCV64SLL)
  2345  		v.AddArg2(x, y)
  2346  		return true
  2347  	}
  2348  	return false
  2349  }
  2350  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2351  	v_1 := v.Args[1]
  2352  	v_0 := v.Args[0]
  2353  	b := v.Block
  2354  	// match: (Lsh64x64 <t> x y)
  2355  	// cond: !shiftIsBounded(v)
  2356  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2357  	for {
  2358  		t := v.Type
  2359  		x := v_0
  2360  		y := v_1
  2361  		if !(!shiftIsBounded(v)) {
  2362  			break
  2363  		}
  2364  		v.reset(OpRISCV64AND)
  2365  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2366  		v0.AddArg2(x, y)
  2367  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2368  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2369  		v2.AuxInt = int64ToAuxInt(64)
  2370  		v2.AddArg(y)
  2371  		v1.AddArg(v2)
  2372  		v.AddArg2(v0, v1)
  2373  		return true
  2374  	}
  2375  	// match: (Lsh64x64 x y)
  2376  	// cond: shiftIsBounded(v)
  2377  	// result: (SLL x y)
  2378  	for {
  2379  		x := v_0
  2380  		y := v_1
  2381  		if !(shiftIsBounded(v)) {
  2382  			break
  2383  		}
  2384  		v.reset(OpRISCV64SLL)
  2385  		v.AddArg2(x, y)
  2386  		return true
  2387  	}
  2388  	return false
  2389  }
  2390  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2391  	v_1 := v.Args[1]
  2392  	v_0 := v.Args[0]
  2393  	b := v.Block
  2394  	typ := &b.Func.Config.Types
  2395  	// match: (Lsh64x8 <t> x y)
  2396  	// cond: !shiftIsBounded(v)
  2397  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2398  	for {
  2399  		t := v.Type
  2400  		x := v_0
  2401  		y := v_1
  2402  		if !(!shiftIsBounded(v)) {
  2403  			break
  2404  		}
  2405  		v.reset(OpRISCV64AND)
  2406  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2407  		v0.AddArg2(x, y)
  2408  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2409  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2410  		v2.AuxInt = int64ToAuxInt(64)
  2411  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2412  		v3.AddArg(y)
  2413  		v2.AddArg(v3)
  2414  		v1.AddArg(v2)
  2415  		v.AddArg2(v0, v1)
  2416  		return true
  2417  	}
  2418  	// match: (Lsh64x8 x y)
  2419  	// cond: shiftIsBounded(v)
  2420  	// result: (SLL x y)
  2421  	for {
  2422  		x := v_0
  2423  		y := v_1
  2424  		if !(shiftIsBounded(v)) {
  2425  			break
  2426  		}
  2427  		v.reset(OpRISCV64SLL)
  2428  		v.AddArg2(x, y)
  2429  		return true
  2430  	}
  2431  	return false
  2432  }
  2433  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2434  	v_1 := v.Args[1]
  2435  	v_0 := v.Args[0]
  2436  	b := v.Block
  2437  	typ := &b.Func.Config.Types
  2438  	// match: (Lsh8x16 <t> x y)
  2439  	// cond: !shiftIsBounded(v)
  2440  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2441  	for {
  2442  		t := v.Type
  2443  		x := v_0
  2444  		y := v_1
  2445  		if !(!shiftIsBounded(v)) {
  2446  			break
  2447  		}
  2448  		v.reset(OpRISCV64AND)
  2449  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2450  		v0.AddArg2(x, y)
  2451  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2452  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2453  		v2.AuxInt = int64ToAuxInt(64)
  2454  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2455  		v3.AddArg(y)
  2456  		v2.AddArg(v3)
  2457  		v1.AddArg(v2)
  2458  		v.AddArg2(v0, v1)
  2459  		return true
  2460  	}
  2461  	// match: (Lsh8x16 x y)
  2462  	// cond: shiftIsBounded(v)
  2463  	// result: (SLL x y)
  2464  	for {
  2465  		x := v_0
  2466  		y := v_1
  2467  		if !(shiftIsBounded(v)) {
  2468  			break
  2469  		}
  2470  		v.reset(OpRISCV64SLL)
  2471  		v.AddArg2(x, y)
  2472  		return true
  2473  	}
  2474  	return false
  2475  }
  2476  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2477  	v_1 := v.Args[1]
  2478  	v_0 := v.Args[0]
  2479  	b := v.Block
  2480  	typ := &b.Func.Config.Types
  2481  	// match: (Lsh8x32 <t> x y)
  2482  	// cond: !shiftIsBounded(v)
  2483  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2484  	for {
  2485  		t := v.Type
  2486  		x := v_0
  2487  		y := v_1
  2488  		if !(!shiftIsBounded(v)) {
  2489  			break
  2490  		}
  2491  		v.reset(OpRISCV64AND)
  2492  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2493  		v0.AddArg2(x, y)
  2494  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2495  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2496  		v2.AuxInt = int64ToAuxInt(64)
  2497  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2498  		v3.AddArg(y)
  2499  		v2.AddArg(v3)
  2500  		v1.AddArg(v2)
  2501  		v.AddArg2(v0, v1)
  2502  		return true
  2503  	}
  2504  	// match: (Lsh8x32 x y)
  2505  	// cond: shiftIsBounded(v)
  2506  	// result: (SLL x y)
  2507  	for {
  2508  		x := v_0
  2509  		y := v_1
  2510  		if !(shiftIsBounded(v)) {
  2511  			break
  2512  		}
  2513  		v.reset(OpRISCV64SLL)
  2514  		v.AddArg2(x, y)
  2515  		return true
  2516  	}
  2517  	return false
  2518  }
  2519  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2520  	v_1 := v.Args[1]
  2521  	v_0 := v.Args[0]
  2522  	b := v.Block
  2523  	// match: (Lsh8x64 <t> x y)
  2524  	// cond: !shiftIsBounded(v)
  2525  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2526  	for {
  2527  		t := v.Type
  2528  		x := v_0
  2529  		y := v_1
  2530  		if !(!shiftIsBounded(v)) {
  2531  			break
  2532  		}
  2533  		v.reset(OpRISCV64AND)
  2534  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2535  		v0.AddArg2(x, y)
  2536  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2537  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2538  		v2.AuxInt = int64ToAuxInt(64)
  2539  		v2.AddArg(y)
  2540  		v1.AddArg(v2)
  2541  		v.AddArg2(v0, v1)
  2542  		return true
  2543  	}
  2544  	// match: (Lsh8x64 x y)
  2545  	// cond: shiftIsBounded(v)
  2546  	// result: (SLL x y)
  2547  	for {
  2548  		x := v_0
  2549  		y := v_1
  2550  		if !(shiftIsBounded(v)) {
  2551  			break
  2552  		}
  2553  		v.reset(OpRISCV64SLL)
  2554  		v.AddArg2(x, y)
  2555  		return true
  2556  	}
  2557  	return false
  2558  }
  2559  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2560  	v_1 := v.Args[1]
  2561  	v_0 := v.Args[0]
  2562  	b := v.Block
  2563  	typ := &b.Func.Config.Types
  2564  	// match: (Lsh8x8 <t> x y)
  2565  	// cond: !shiftIsBounded(v)
  2566  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2567  	for {
  2568  		t := v.Type
  2569  		x := v_0
  2570  		y := v_1
  2571  		if !(!shiftIsBounded(v)) {
  2572  			break
  2573  		}
  2574  		v.reset(OpRISCV64AND)
  2575  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2576  		v0.AddArg2(x, y)
  2577  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2578  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2579  		v2.AuxInt = int64ToAuxInt(64)
  2580  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2581  		v3.AddArg(y)
  2582  		v2.AddArg(v3)
  2583  		v1.AddArg(v2)
  2584  		v.AddArg2(v0, v1)
  2585  		return true
  2586  	}
  2587  	// match: (Lsh8x8 x y)
  2588  	// cond: shiftIsBounded(v)
  2589  	// result: (SLL x y)
  2590  	for {
  2591  		x := v_0
  2592  		y := v_1
  2593  		if !(shiftIsBounded(v)) {
  2594  			break
  2595  		}
  2596  		v.reset(OpRISCV64SLL)
  2597  		v.AddArg2(x, y)
  2598  		return true
  2599  	}
  2600  	return false
  2601  }
  2602  func rewriteValueRISCV64_OpMax64(v *Value) bool {
  2603  	v_1 := v.Args[1]
  2604  	v_0 := v.Args[0]
  2605  	// match: (Max64 x y)
  2606  	// cond: buildcfg.GORISCV64 >= 22
  2607  	// result: (MAX x y)
  2608  	for {
  2609  		x := v_0
  2610  		y := v_1
  2611  		if !(buildcfg.GORISCV64 >= 22) {
  2612  			break
  2613  		}
  2614  		v.reset(OpRISCV64MAX)
  2615  		v.AddArg2(x, y)
  2616  		return true
  2617  	}
  2618  	return false
  2619  }
  2620  func rewriteValueRISCV64_OpMax64u(v *Value) bool {
  2621  	v_1 := v.Args[1]
  2622  	v_0 := v.Args[0]
  2623  	// match: (Max64u x y)
  2624  	// cond: buildcfg.GORISCV64 >= 22
  2625  	// result: (MAXU x y)
  2626  	for {
  2627  		x := v_0
  2628  		y := v_1
  2629  		if !(buildcfg.GORISCV64 >= 22) {
  2630  			break
  2631  		}
  2632  		v.reset(OpRISCV64MAXU)
  2633  		v.AddArg2(x, y)
  2634  		return true
  2635  	}
  2636  	return false
  2637  }
  2638  func rewriteValueRISCV64_OpMin64(v *Value) bool {
  2639  	v_1 := v.Args[1]
  2640  	v_0 := v.Args[0]
  2641  	// match: (Min64 x y)
  2642  	// cond: buildcfg.GORISCV64 >= 22
  2643  	// result: (MIN x y)
  2644  	for {
  2645  		x := v_0
  2646  		y := v_1
  2647  		if !(buildcfg.GORISCV64 >= 22) {
  2648  			break
  2649  		}
  2650  		v.reset(OpRISCV64MIN)
  2651  		v.AddArg2(x, y)
  2652  		return true
  2653  	}
  2654  	return false
  2655  }
  2656  func rewriteValueRISCV64_OpMin64u(v *Value) bool {
  2657  	v_1 := v.Args[1]
  2658  	v_0 := v.Args[0]
  2659  	// match: (Min64u x y)
  2660  	// cond: buildcfg.GORISCV64 >= 22
  2661  	// result: (MINU x y)
  2662  	for {
  2663  		x := v_0
  2664  		y := v_1
  2665  		if !(buildcfg.GORISCV64 >= 22) {
  2666  			break
  2667  		}
  2668  		v.reset(OpRISCV64MINU)
  2669  		v.AddArg2(x, y)
  2670  		return true
  2671  	}
  2672  	return false
  2673  }
  2674  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2675  	v_1 := v.Args[1]
  2676  	v_0 := v.Args[0]
  2677  	b := v.Block
  2678  	typ := &b.Func.Config.Types
  2679  	// match: (Mod16 x y [false])
  2680  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2681  	for {
  2682  		if auxIntToBool(v.AuxInt) != false {
  2683  			break
  2684  		}
  2685  		x := v_0
  2686  		y := v_1
  2687  		v.reset(OpRISCV64REMW)
  2688  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2689  		v0.AddArg(x)
  2690  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2691  		v1.AddArg(y)
  2692  		v.AddArg2(v0, v1)
  2693  		return true
  2694  	}
  2695  	return false
  2696  }
  2697  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2698  	v_1 := v.Args[1]
  2699  	v_0 := v.Args[0]
  2700  	b := v.Block
  2701  	typ := &b.Func.Config.Types
  2702  	// match: (Mod16u x y)
  2703  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2704  	for {
  2705  		x := v_0
  2706  		y := v_1
  2707  		v.reset(OpRISCV64REMUW)
  2708  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2709  		v0.AddArg(x)
  2710  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2711  		v1.AddArg(y)
  2712  		v.AddArg2(v0, v1)
  2713  		return true
  2714  	}
  2715  }
  2716  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2717  	v_1 := v.Args[1]
  2718  	v_0 := v.Args[0]
  2719  	// match: (Mod32 x y [false])
  2720  	// result: (REMW x y)
  2721  	for {
  2722  		if auxIntToBool(v.AuxInt) != false {
  2723  			break
  2724  		}
  2725  		x := v_0
  2726  		y := v_1
  2727  		v.reset(OpRISCV64REMW)
  2728  		v.AddArg2(x, y)
  2729  		return true
  2730  	}
  2731  	return false
  2732  }
  2733  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2734  	v_1 := v.Args[1]
  2735  	v_0 := v.Args[0]
  2736  	// match: (Mod64 x y [false])
  2737  	// result: (REM x y)
  2738  	for {
  2739  		if auxIntToBool(v.AuxInt) != false {
  2740  			break
  2741  		}
  2742  		x := v_0
  2743  		y := v_1
  2744  		v.reset(OpRISCV64REM)
  2745  		v.AddArg2(x, y)
  2746  		return true
  2747  	}
  2748  	return false
  2749  }
  2750  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2751  	v_1 := v.Args[1]
  2752  	v_0 := v.Args[0]
  2753  	b := v.Block
  2754  	typ := &b.Func.Config.Types
  2755  	// match: (Mod8 x y)
  2756  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2757  	for {
  2758  		x := v_0
  2759  		y := v_1
  2760  		v.reset(OpRISCV64REMW)
  2761  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2762  		v0.AddArg(x)
  2763  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2764  		v1.AddArg(y)
  2765  		v.AddArg2(v0, v1)
  2766  		return true
  2767  	}
  2768  }
  2769  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2770  	v_1 := v.Args[1]
  2771  	v_0 := v.Args[0]
  2772  	b := v.Block
  2773  	typ := &b.Func.Config.Types
  2774  	// match: (Mod8u x y)
  2775  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2776  	for {
  2777  		x := v_0
  2778  		y := v_1
  2779  		v.reset(OpRISCV64REMUW)
  2780  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2781  		v0.AddArg(x)
  2782  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2783  		v1.AddArg(y)
  2784  		v.AddArg2(v0, v1)
  2785  		return true
  2786  	}
  2787  }
  2788  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2789  	v_2 := v.Args[2]
  2790  	v_1 := v.Args[1]
  2791  	v_0 := v.Args[0]
  2792  	b := v.Block
  2793  	config := b.Func.Config
  2794  	typ := &b.Func.Config.Types
  2795  	// match: (Move [0] _ _ mem)
  2796  	// result: mem
  2797  	for {
  2798  		if auxIntToInt64(v.AuxInt) != 0 {
  2799  			break
  2800  		}
  2801  		mem := v_2
  2802  		v.copyOf(mem)
  2803  		return true
  2804  	}
  2805  	// match: (Move [1] dst src mem)
  2806  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2807  	for {
  2808  		if auxIntToInt64(v.AuxInt) != 1 {
  2809  			break
  2810  		}
  2811  		dst := v_0
  2812  		src := v_1
  2813  		mem := v_2
  2814  		v.reset(OpRISCV64MOVBstore)
  2815  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2816  		v0.AddArg2(src, mem)
  2817  		v.AddArg3(dst, v0, mem)
  2818  		return true
  2819  	}
  2820  	// match: (Move [2] {t} dst src mem)
  2821  	// cond: t.Alignment()%2 == 0
  2822  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2823  	for {
  2824  		if auxIntToInt64(v.AuxInt) != 2 {
  2825  			break
  2826  		}
  2827  		t := auxToType(v.Aux)
  2828  		dst := v_0
  2829  		src := v_1
  2830  		mem := v_2
  2831  		if !(t.Alignment()%2 == 0) {
  2832  			break
  2833  		}
  2834  		v.reset(OpRISCV64MOVHstore)
  2835  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2836  		v0.AddArg2(src, mem)
  2837  		v.AddArg3(dst, v0, mem)
  2838  		return true
  2839  	}
  2840  	// match: (Move [2] dst src mem)
  2841  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2842  	for {
  2843  		if auxIntToInt64(v.AuxInt) != 2 {
  2844  			break
  2845  		}
  2846  		dst := v_0
  2847  		src := v_1
  2848  		mem := v_2
  2849  		v.reset(OpRISCV64MOVBstore)
  2850  		v.AuxInt = int32ToAuxInt(1)
  2851  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2852  		v0.AuxInt = int32ToAuxInt(1)
  2853  		v0.AddArg2(src, mem)
  2854  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2855  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2856  		v2.AddArg2(src, mem)
  2857  		v1.AddArg3(dst, v2, mem)
  2858  		v.AddArg3(dst, v0, v1)
  2859  		return true
  2860  	}
  2861  	// match: (Move [4] {t} dst src mem)
  2862  	// cond: t.Alignment()%4 == 0
  2863  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2864  	for {
  2865  		if auxIntToInt64(v.AuxInt) != 4 {
  2866  			break
  2867  		}
  2868  		t := auxToType(v.Aux)
  2869  		dst := v_0
  2870  		src := v_1
  2871  		mem := v_2
  2872  		if !(t.Alignment()%4 == 0) {
  2873  			break
  2874  		}
  2875  		v.reset(OpRISCV64MOVWstore)
  2876  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2877  		v0.AddArg2(src, mem)
  2878  		v.AddArg3(dst, v0, mem)
  2879  		return true
  2880  	}
  2881  	// match: (Move [4] {t} dst src mem)
  2882  	// cond: t.Alignment()%2 == 0
  2883  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2884  	for {
  2885  		if auxIntToInt64(v.AuxInt) != 4 {
  2886  			break
  2887  		}
  2888  		t := auxToType(v.Aux)
  2889  		dst := v_0
  2890  		src := v_1
  2891  		mem := v_2
  2892  		if !(t.Alignment()%2 == 0) {
  2893  			break
  2894  		}
  2895  		v.reset(OpRISCV64MOVHstore)
  2896  		v.AuxInt = int32ToAuxInt(2)
  2897  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2898  		v0.AuxInt = int32ToAuxInt(2)
  2899  		v0.AddArg2(src, mem)
  2900  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2901  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2902  		v2.AddArg2(src, mem)
  2903  		v1.AddArg3(dst, v2, mem)
  2904  		v.AddArg3(dst, v0, v1)
  2905  		return true
  2906  	}
  2907  	// match: (Move [4] dst src mem)
  2908  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2909  	for {
  2910  		if auxIntToInt64(v.AuxInt) != 4 {
  2911  			break
  2912  		}
  2913  		dst := v_0
  2914  		src := v_1
  2915  		mem := v_2
  2916  		v.reset(OpRISCV64MOVBstore)
  2917  		v.AuxInt = int32ToAuxInt(3)
  2918  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2919  		v0.AuxInt = int32ToAuxInt(3)
  2920  		v0.AddArg2(src, mem)
  2921  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2922  		v1.AuxInt = int32ToAuxInt(2)
  2923  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2924  		v2.AuxInt = int32ToAuxInt(2)
  2925  		v2.AddArg2(src, mem)
  2926  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2927  		v3.AuxInt = int32ToAuxInt(1)
  2928  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2929  		v4.AuxInt = int32ToAuxInt(1)
  2930  		v4.AddArg2(src, mem)
  2931  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2932  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2933  		v6.AddArg2(src, mem)
  2934  		v5.AddArg3(dst, v6, mem)
  2935  		v3.AddArg3(dst, v4, v5)
  2936  		v1.AddArg3(dst, v2, v3)
  2937  		v.AddArg3(dst, v0, v1)
  2938  		return true
  2939  	}
  2940  	// match: (Move [8] {t} dst src mem)
  2941  	// cond: t.Alignment()%8 == 0
  2942  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2943  	for {
  2944  		if auxIntToInt64(v.AuxInt) != 8 {
  2945  			break
  2946  		}
  2947  		t := auxToType(v.Aux)
  2948  		dst := v_0
  2949  		src := v_1
  2950  		mem := v_2
  2951  		if !(t.Alignment()%8 == 0) {
  2952  			break
  2953  		}
  2954  		v.reset(OpRISCV64MOVDstore)
  2955  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2956  		v0.AddArg2(src, mem)
  2957  		v.AddArg3(dst, v0, mem)
  2958  		return true
  2959  	}
  2960  	// match: (Move [8] {t} dst src mem)
  2961  	// cond: t.Alignment()%4 == 0
  2962  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2963  	for {
  2964  		if auxIntToInt64(v.AuxInt) != 8 {
  2965  			break
  2966  		}
  2967  		t := auxToType(v.Aux)
  2968  		dst := v_0
  2969  		src := v_1
  2970  		mem := v_2
  2971  		if !(t.Alignment()%4 == 0) {
  2972  			break
  2973  		}
  2974  		v.reset(OpRISCV64MOVWstore)
  2975  		v.AuxInt = int32ToAuxInt(4)
  2976  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2977  		v0.AuxInt = int32ToAuxInt(4)
  2978  		v0.AddArg2(src, mem)
  2979  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2980  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2981  		v2.AddArg2(src, mem)
  2982  		v1.AddArg3(dst, v2, mem)
  2983  		v.AddArg3(dst, v0, v1)
  2984  		return true
  2985  	}
  2986  	// match: (Move [8] {t} dst src mem)
  2987  	// cond: t.Alignment()%2 == 0
  2988  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2989  	for {
  2990  		if auxIntToInt64(v.AuxInt) != 8 {
  2991  			break
  2992  		}
  2993  		t := auxToType(v.Aux)
  2994  		dst := v_0
  2995  		src := v_1
  2996  		mem := v_2
  2997  		if !(t.Alignment()%2 == 0) {
  2998  			break
  2999  		}
  3000  		v.reset(OpRISCV64MOVHstore)
  3001  		v.AuxInt = int32ToAuxInt(6)
  3002  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3003  		v0.AuxInt = int32ToAuxInt(6)
  3004  		v0.AddArg2(src, mem)
  3005  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3006  		v1.AuxInt = int32ToAuxInt(4)
  3007  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3008  		v2.AuxInt = int32ToAuxInt(4)
  3009  		v2.AddArg2(src, mem)
  3010  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3011  		v3.AuxInt = int32ToAuxInt(2)
  3012  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3013  		v4.AuxInt = int32ToAuxInt(2)
  3014  		v4.AddArg2(src, mem)
  3015  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3016  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3017  		v6.AddArg2(src, mem)
  3018  		v5.AddArg3(dst, v6, mem)
  3019  		v3.AddArg3(dst, v4, v5)
  3020  		v1.AddArg3(dst, v2, v3)
  3021  		v.AddArg3(dst, v0, v1)
  3022  		return true
  3023  	}
  3024  	// match: (Move [3] dst src mem)
  3025  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  3026  	for {
  3027  		if auxIntToInt64(v.AuxInt) != 3 {
  3028  			break
  3029  		}
  3030  		dst := v_0
  3031  		src := v_1
  3032  		mem := v_2
  3033  		v.reset(OpRISCV64MOVBstore)
  3034  		v.AuxInt = int32ToAuxInt(2)
  3035  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3036  		v0.AuxInt = int32ToAuxInt(2)
  3037  		v0.AddArg2(src, mem)
  3038  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3039  		v1.AuxInt = int32ToAuxInt(1)
  3040  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3041  		v2.AuxInt = int32ToAuxInt(1)
  3042  		v2.AddArg2(src, mem)
  3043  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3044  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3045  		v4.AddArg2(src, mem)
  3046  		v3.AddArg3(dst, v4, mem)
  3047  		v1.AddArg3(dst, v2, v3)
  3048  		v.AddArg3(dst, v0, v1)
  3049  		return true
  3050  	}
  3051  	// match: (Move [6] {t} dst src mem)
  3052  	// cond: t.Alignment()%2 == 0
  3053  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  3054  	for {
  3055  		if auxIntToInt64(v.AuxInt) != 6 {
  3056  			break
  3057  		}
  3058  		t := auxToType(v.Aux)
  3059  		dst := v_0
  3060  		src := v_1
  3061  		mem := v_2
  3062  		if !(t.Alignment()%2 == 0) {
  3063  			break
  3064  		}
  3065  		v.reset(OpRISCV64MOVHstore)
  3066  		v.AuxInt = int32ToAuxInt(4)
  3067  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3068  		v0.AuxInt = int32ToAuxInt(4)
  3069  		v0.AddArg2(src, mem)
  3070  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3071  		v1.AuxInt = int32ToAuxInt(2)
  3072  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3073  		v2.AuxInt = int32ToAuxInt(2)
  3074  		v2.AddArg2(src, mem)
  3075  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3076  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3077  		v4.AddArg2(src, mem)
  3078  		v3.AddArg3(dst, v4, mem)
  3079  		v1.AddArg3(dst, v2, v3)
  3080  		v.AddArg3(dst, v0, v1)
  3081  		return true
  3082  	}
  3083  	// match: (Move [12] {t} dst src mem)
  3084  	// cond: t.Alignment()%4 == 0
  3085  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  3086  	for {
  3087  		if auxIntToInt64(v.AuxInt) != 12 {
  3088  			break
  3089  		}
  3090  		t := auxToType(v.Aux)
  3091  		dst := v_0
  3092  		src := v_1
  3093  		mem := v_2
  3094  		if !(t.Alignment()%4 == 0) {
  3095  			break
  3096  		}
  3097  		v.reset(OpRISCV64MOVWstore)
  3098  		v.AuxInt = int32ToAuxInt(8)
  3099  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3100  		v0.AuxInt = int32ToAuxInt(8)
  3101  		v0.AddArg2(src, mem)
  3102  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3103  		v1.AuxInt = int32ToAuxInt(4)
  3104  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3105  		v2.AuxInt = int32ToAuxInt(4)
  3106  		v2.AddArg2(src, mem)
  3107  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3108  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3109  		v4.AddArg2(src, mem)
  3110  		v3.AddArg3(dst, v4, mem)
  3111  		v1.AddArg3(dst, v2, v3)
  3112  		v.AddArg3(dst, v0, v1)
  3113  		return true
  3114  	}
  3115  	// match: (Move [16] {t} dst src mem)
  3116  	// cond: t.Alignment()%8 == 0
  3117  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
  3118  	for {
  3119  		if auxIntToInt64(v.AuxInt) != 16 {
  3120  			break
  3121  		}
  3122  		t := auxToType(v.Aux)
  3123  		dst := v_0
  3124  		src := v_1
  3125  		mem := v_2
  3126  		if !(t.Alignment()%8 == 0) {
  3127  			break
  3128  		}
  3129  		v.reset(OpRISCV64MOVDstore)
  3130  		v.AuxInt = int32ToAuxInt(8)
  3131  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3132  		v0.AuxInt = int32ToAuxInt(8)
  3133  		v0.AddArg2(src, mem)
  3134  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3135  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3136  		v2.AddArg2(src, mem)
  3137  		v1.AddArg3(dst, v2, mem)
  3138  		v.AddArg3(dst, v0, v1)
  3139  		return true
  3140  	}
  3141  	// match: (Move [24] {t} dst src mem)
  3142  	// cond: t.Alignment()%8 == 0
  3143  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
  3144  	for {
  3145  		if auxIntToInt64(v.AuxInt) != 24 {
  3146  			break
  3147  		}
  3148  		t := auxToType(v.Aux)
  3149  		dst := v_0
  3150  		src := v_1
  3151  		mem := v_2
  3152  		if !(t.Alignment()%8 == 0) {
  3153  			break
  3154  		}
  3155  		v.reset(OpRISCV64MOVDstore)
  3156  		v.AuxInt = int32ToAuxInt(16)
  3157  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3158  		v0.AuxInt = int32ToAuxInt(16)
  3159  		v0.AddArg2(src, mem)
  3160  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3161  		v1.AuxInt = int32ToAuxInt(8)
  3162  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3163  		v2.AuxInt = int32ToAuxInt(8)
  3164  		v2.AddArg2(src, mem)
  3165  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3166  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3167  		v4.AddArg2(src, mem)
  3168  		v3.AddArg3(dst, v4, mem)
  3169  		v1.AddArg3(dst, v2, v3)
  3170  		v.AddArg3(dst, v0, v1)
  3171  		return true
  3172  	}
  3173  	// match: (Move [32] {t} dst src mem)
  3174  	// cond: t.Alignment()%8 == 0
  3175  	// result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
  3176  	for {
  3177  		if auxIntToInt64(v.AuxInt) != 32 {
  3178  			break
  3179  		}
  3180  		t := auxToType(v.Aux)
  3181  		dst := v_0
  3182  		src := v_1
  3183  		mem := v_2
  3184  		if !(t.Alignment()%8 == 0) {
  3185  			break
  3186  		}
  3187  		v.reset(OpRISCV64MOVDstore)
  3188  		v.AuxInt = int32ToAuxInt(24)
  3189  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3190  		v0.AuxInt = int32ToAuxInt(24)
  3191  		v0.AddArg2(src, mem)
  3192  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3193  		v1.AuxInt = int32ToAuxInt(16)
  3194  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3195  		v2.AuxInt = int32ToAuxInt(16)
  3196  		v2.AddArg2(src, mem)
  3197  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3198  		v3.AuxInt = int32ToAuxInt(8)
  3199  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3200  		v4.AuxInt = int32ToAuxInt(8)
  3201  		v4.AddArg2(src, mem)
  3202  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3203  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3204  		v6.AddArg2(src, mem)
  3205  		v5.AddArg3(dst, v6, mem)
  3206  		v3.AddArg3(dst, v4, v5)
  3207  		v1.AddArg3(dst, v2, v3)
  3208  		v.AddArg3(dst, v0, v1)
  3209  		return true
  3210  	}
  3211  	// match: (Move [s] {t} dst src mem)
  3212  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)
  3213  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  3214  	for {
  3215  		s := auxIntToInt64(v.AuxInt)
  3216  		t := auxToType(v.Aux)
  3217  		dst := v_0
  3218  		src := v_1
  3219  		mem := v_2
  3220  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
  3221  			break
  3222  		}
  3223  		v.reset(OpRISCV64DUFFCOPY)
  3224  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  3225  		v.AddArg3(dst, src, mem)
  3226  		return true
  3227  	}
  3228  	// match: (Move [s] {t} dst src mem)
  3229  	// cond: (s <= 16 || logLargeCopy(v, s))
  3230  	// result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
  3231  	for {
  3232  		s := auxIntToInt64(v.AuxInt)
  3233  		t := auxToType(v.Aux)
  3234  		dst := v_0
  3235  		src := v_1
  3236  		mem := v_2
  3237  		if !(s <= 16 || logLargeCopy(v, s)) {
  3238  			break
  3239  		}
  3240  		v.reset(OpRISCV64LoweredMove)
  3241  		v.AuxInt = int64ToAuxInt(t.Alignment())
  3242  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
  3243  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  3244  		v0.AddArg(src)
  3245  		v.AddArg4(dst, src, v0, mem)
  3246  		return true
  3247  	}
  3248  	return false
  3249  }
  3250  func rewriteValueRISCV64_OpMul16(v *Value) bool {
  3251  	v_1 := v.Args[1]
  3252  	v_0 := v.Args[0]
  3253  	b := v.Block
  3254  	typ := &b.Func.Config.Types
  3255  	// match: (Mul16 x y)
  3256  	// result: (MULW (SignExt16to32 x) (SignExt16to32 y))
  3257  	for {
  3258  		x := v_0
  3259  		y := v_1
  3260  		v.reset(OpRISCV64MULW)
  3261  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3262  		v0.AddArg(x)
  3263  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3264  		v1.AddArg(y)
  3265  		v.AddArg2(v0, v1)
  3266  		return true
  3267  	}
  3268  }
  3269  func rewriteValueRISCV64_OpMul8(v *Value) bool {
  3270  	v_1 := v.Args[1]
  3271  	v_0 := v.Args[0]
  3272  	b := v.Block
  3273  	typ := &b.Func.Config.Types
  3274  	// match: (Mul8 x y)
  3275  	// result: (MULW (SignExt8to32 x) (SignExt8to32 y))
  3276  	for {
  3277  		x := v_0
  3278  		y := v_1
  3279  		v.reset(OpRISCV64MULW)
  3280  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3281  		v0.AddArg(x)
  3282  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3283  		v1.AddArg(y)
  3284  		v.AddArg2(v0, v1)
  3285  		return true
  3286  	}
  3287  }
  3288  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3289  	v_1 := v.Args[1]
  3290  	v_0 := v.Args[0]
  3291  	b := v.Block
  3292  	typ := &b.Func.Config.Types
  3293  	// match: (Neq16 x y)
  3294  	// result: (Not (Eq16 x y))
  3295  	for {
  3296  		x := v_0
  3297  		y := v_1
  3298  		v.reset(OpNot)
  3299  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3300  		v0.AddArg2(x, y)
  3301  		v.AddArg(v0)
  3302  		return true
  3303  	}
  3304  }
  3305  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3306  	v_1 := v.Args[1]
  3307  	v_0 := v.Args[0]
  3308  	b := v.Block
  3309  	typ := &b.Func.Config.Types
  3310  	// match: (Neq32 x y)
  3311  	// result: (Not (Eq32 x y))
  3312  	for {
  3313  		x := v_0
  3314  		y := v_1
  3315  		v.reset(OpNot)
  3316  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3317  		v0.AddArg2(x, y)
  3318  		v.AddArg(v0)
  3319  		return true
  3320  	}
  3321  }
  3322  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3323  	v_1 := v.Args[1]
  3324  	v_0 := v.Args[0]
  3325  	b := v.Block
  3326  	typ := &b.Func.Config.Types
  3327  	// match: (Neq64 x y)
  3328  	// result: (Not (Eq64 x y))
  3329  	for {
  3330  		x := v_0
  3331  		y := v_1
  3332  		v.reset(OpNot)
  3333  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3334  		v0.AddArg2(x, y)
  3335  		v.AddArg(v0)
  3336  		return true
  3337  	}
  3338  }
  3339  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3340  	v_1 := v.Args[1]
  3341  	v_0 := v.Args[0]
  3342  	b := v.Block
  3343  	typ := &b.Func.Config.Types
  3344  	// match: (Neq8 x y)
  3345  	// result: (Not (Eq8 x y))
  3346  	for {
  3347  		x := v_0
  3348  		y := v_1
  3349  		v.reset(OpNot)
  3350  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3351  		v0.AddArg2(x, y)
  3352  		v.AddArg(v0)
  3353  		return true
  3354  	}
  3355  }
  3356  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3357  	v_1 := v.Args[1]
  3358  	v_0 := v.Args[0]
  3359  	b := v.Block
  3360  	typ := &b.Func.Config.Types
  3361  	// match: (NeqB x y)
  3362  	// result: (SNEZ (SUB <typ.Bool> x y))
  3363  	for {
  3364  		x := v_0
  3365  		y := v_1
  3366  		v.reset(OpRISCV64SNEZ)
  3367  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3368  		v0.AddArg2(x, y)
  3369  		v.AddArg(v0)
  3370  		return true
  3371  	}
  3372  }
  3373  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3374  	v_1 := v.Args[1]
  3375  	v_0 := v.Args[0]
  3376  	b := v.Block
  3377  	typ := &b.Func.Config.Types
  3378  	// match: (NeqPtr x y)
  3379  	// result: (Not (EqPtr x y))
  3380  	for {
  3381  		x := v_0
  3382  		y := v_1
  3383  		v.reset(OpNot)
  3384  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3385  		v0.AddArg2(x, y)
  3386  		v.AddArg(v0)
  3387  		return true
  3388  	}
  3389  }
  3390  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3391  	v_0 := v.Args[0]
  3392  	b := v.Block
  3393  	typ := &b.Func.Config.Types
  3394  	// match: (OffPtr [off] ptr:(SP))
  3395  	// cond: is32Bit(off)
  3396  	// result: (MOVaddr [int32(off)] ptr)
  3397  	for {
  3398  		off := auxIntToInt64(v.AuxInt)
  3399  		ptr := v_0
  3400  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3401  			break
  3402  		}
  3403  		v.reset(OpRISCV64MOVaddr)
  3404  		v.AuxInt = int32ToAuxInt(int32(off))
  3405  		v.AddArg(ptr)
  3406  		return true
  3407  	}
  3408  	// match: (OffPtr [off] ptr)
  3409  	// cond: is32Bit(off)
  3410  	// result: (ADDI [off] ptr)
  3411  	for {
  3412  		off := auxIntToInt64(v.AuxInt)
  3413  		ptr := v_0
  3414  		if !(is32Bit(off)) {
  3415  			break
  3416  		}
  3417  		v.reset(OpRISCV64ADDI)
  3418  		v.AuxInt = int64ToAuxInt(off)
  3419  		v.AddArg(ptr)
  3420  		return true
  3421  	}
  3422  	// match: (OffPtr [off] ptr)
  3423  	// result: (ADD (MOVDconst [off]) ptr)
  3424  	for {
  3425  		off := auxIntToInt64(v.AuxInt)
  3426  		ptr := v_0
  3427  		v.reset(OpRISCV64ADD)
  3428  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3429  		v0.AuxInt = int64ToAuxInt(off)
  3430  		v.AddArg2(v0, ptr)
  3431  		return true
  3432  	}
  3433  }
  3434  func rewriteValueRISCV64_OpPopCount16(v *Value) bool {
  3435  	v_0 := v.Args[0]
  3436  	b := v.Block
  3437  	typ := &b.Func.Config.Types
  3438  	// match: (PopCount16 x)
  3439  	// result: (CPOP (ZeroExt16to64 x))
  3440  	for {
  3441  		x := v_0
  3442  		v.reset(OpRISCV64CPOP)
  3443  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3444  		v0.AddArg(x)
  3445  		v.AddArg(v0)
  3446  		return true
  3447  	}
  3448  }
  3449  func rewriteValueRISCV64_OpPopCount8(v *Value) bool {
  3450  	v_0 := v.Args[0]
  3451  	b := v.Block
  3452  	typ := &b.Func.Config.Types
  3453  	// match: (PopCount8 x)
  3454  	// result: (CPOP (ZeroExt8to64 x))
  3455  	for {
  3456  		x := v_0
  3457  		v.reset(OpRISCV64CPOP)
  3458  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3459  		v0.AddArg(x)
  3460  		v.AddArg(v0)
  3461  		return true
  3462  	}
  3463  }
  3464  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3465  	v_1 := v.Args[1]
  3466  	v_0 := v.Args[0]
  3467  	// match: (ADD (MOVDconst <t> [val]) x)
  3468  	// cond: is32Bit(val) && !t.IsPtr()
  3469  	// result: (ADDI [val] x)
  3470  	for {
  3471  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3472  			if v_0.Op != OpRISCV64MOVDconst {
  3473  				continue
  3474  			}
  3475  			t := v_0.Type
  3476  			val := auxIntToInt64(v_0.AuxInt)
  3477  			x := v_1
  3478  			if !(is32Bit(val) && !t.IsPtr()) {
  3479  				continue
  3480  			}
  3481  			v.reset(OpRISCV64ADDI)
  3482  			v.AuxInt = int64ToAuxInt(val)
  3483  			v.AddArg(x)
  3484  			return true
  3485  		}
  3486  		break
  3487  	}
  3488  	// match: (ADD x (NEG y))
  3489  	// result: (SUB x y)
  3490  	for {
  3491  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3492  			x := v_0
  3493  			if v_1.Op != OpRISCV64NEG {
  3494  				continue
  3495  			}
  3496  			y := v_1.Args[0]
  3497  			v.reset(OpRISCV64SUB)
  3498  			v.AddArg2(x, y)
  3499  			return true
  3500  		}
  3501  		break
  3502  	}
  3503  	// match: (ADD (SLLI [1] x) y)
  3504  	// cond: buildcfg.GORISCV64 >= 22
  3505  	// result: (SH1ADD x y)
  3506  	for {
  3507  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3508  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 1 {
  3509  				continue
  3510  			}
  3511  			x := v_0.Args[0]
  3512  			y := v_1
  3513  			if !(buildcfg.GORISCV64 >= 22) {
  3514  				continue
  3515  			}
  3516  			v.reset(OpRISCV64SH1ADD)
  3517  			v.AddArg2(x, y)
  3518  			return true
  3519  		}
  3520  		break
  3521  	}
  3522  	// match: (ADD (SLLI [2] x) y)
  3523  	// cond: buildcfg.GORISCV64 >= 22
  3524  	// result: (SH2ADD x y)
  3525  	for {
  3526  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3527  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 2 {
  3528  				continue
  3529  			}
  3530  			x := v_0.Args[0]
  3531  			y := v_1
  3532  			if !(buildcfg.GORISCV64 >= 22) {
  3533  				continue
  3534  			}
  3535  			v.reset(OpRISCV64SH2ADD)
  3536  			v.AddArg2(x, y)
  3537  			return true
  3538  		}
  3539  		break
  3540  	}
  3541  	// match: (ADD (SLLI [3] x) y)
  3542  	// cond: buildcfg.GORISCV64 >= 22
  3543  	// result: (SH3ADD x y)
  3544  	for {
  3545  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3546  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 3 {
  3547  				continue
  3548  			}
  3549  			x := v_0.Args[0]
  3550  			y := v_1
  3551  			if !(buildcfg.GORISCV64 >= 22) {
  3552  				continue
  3553  			}
  3554  			v.reset(OpRISCV64SH3ADD)
  3555  			v.AddArg2(x, y)
  3556  			return true
  3557  		}
  3558  		break
  3559  	}
  3560  	return false
  3561  }
  3562  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3563  	v_0 := v.Args[0]
  3564  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3565  	// cond: is32Bit(c+int64(d))
  3566  	// result: (MOVaddr [int32(c)+d] {s} x)
  3567  	for {
  3568  		c := auxIntToInt64(v.AuxInt)
  3569  		if v_0.Op != OpRISCV64MOVaddr {
  3570  			break
  3571  		}
  3572  		d := auxIntToInt32(v_0.AuxInt)
  3573  		s := auxToSym(v_0.Aux)
  3574  		x := v_0.Args[0]
  3575  		if !(is32Bit(c + int64(d))) {
  3576  			break
  3577  		}
  3578  		v.reset(OpRISCV64MOVaddr)
  3579  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3580  		v.Aux = symToAux(s)
  3581  		v.AddArg(x)
  3582  		return true
  3583  	}
  3584  	// match: (ADDI [0] x)
  3585  	// result: x
  3586  	for {
  3587  		if auxIntToInt64(v.AuxInt) != 0 {
  3588  			break
  3589  		}
  3590  		x := v_0
  3591  		v.copyOf(x)
  3592  		return true
  3593  	}
  3594  	// match: (ADDI [x] (MOVDconst [y]))
  3595  	// cond: is32Bit(x + y)
  3596  	// result: (MOVDconst [x + y])
  3597  	for {
  3598  		x := auxIntToInt64(v.AuxInt)
  3599  		if v_0.Op != OpRISCV64MOVDconst {
  3600  			break
  3601  		}
  3602  		y := auxIntToInt64(v_0.AuxInt)
  3603  		if !(is32Bit(x + y)) {
  3604  			break
  3605  		}
  3606  		v.reset(OpRISCV64MOVDconst)
  3607  		v.AuxInt = int64ToAuxInt(x + y)
  3608  		return true
  3609  	}
  3610  	// match: (ADDI [x] (ADDI [y] z))
  3611  	// cond: is32Bit(x + y)
  3612  	// result: (ADDI [x + y] z)
  3613  	for {
  3614  		x := auxIntToInt64(v.AuxInt)
  3615  		if v_0.Op != OpRISCV64ADDI {
  3616  			break
  3617  		}
  3618  		y := auxIntToInt64(v_0.AuxInt)
  3619  		z := v_0.Args[0]
  3620  		if !(is32Bit(x + y)) {
  3621  			break
  3622  		}
  3623  		v.reset(OpRISCV64ADDI)
  3624  		v.AuxInt = int64ToAuxInt(x + y)
  3625  		v.AddArg(z)
  3626  		return true
  3627  	}
  3628  	return false
  3629  }
  3630  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3631  	v_1 := v.Args[1]
  3632  	v_0 := v.Args[0]
  3633  	// match: (AND (MOVDconst [val]) x)
  3634  	// cond: is32Bit(val)
  3635  	// result: (ANDI [val] x)
  3636  	for {
  3637  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3638  			if v_0.Op != OpRISCV64MOVDconst {
  3639  				continue
  3640  			}
  3641  			val := auxIntToInt64(v_0.AuxInt)
  3642  			x := v_1
  3643  			if !(is32Bit(val)) {
  3644  				continue
  3645  			}
  3646  			v.reset(OpRISCV64ANDI)
  3647  			v.AuxInt = int64ToAuxInt(val)
  3648  			v.AddArg(x)
  3649  			return true
  3650  		}
  3651  		break
  3652  	}
  3653  	// match: (AND x x)
  3654  	// result: x
  3655  	for {
  3656  		x := v_0
  3657  		if x != v_1 {
  3658  			break
  3659  		}
  3660  		v.copyOf(x)
  3661  		return true
  3662  	}
  3663  	return false
  3664  }
  3665  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3666  	v_0 := v.Args[0]
  3667  	// match: (ANDI [0] x)
  3668  	// result: (MOVDconst [0])
  3669  	for {
  3670  		if auxIntToInt64(v.AuxInt) != 0 {
  3671  			break
  3672  		}
  3673  		v.reset(OpRISCV64MOVDconst)
  3674  		v.AuxInt = int64ToAuxInt(0)
  3675  		return true
  3676  	}
  3677  	// match: (ANDI [-1] x)
  3678  	// result: x
  3679  	for {
  3680  		if auxIntToInt64(v.AuxInt) != -1 {
  3681  			break
  3682  		}
  3683  		x := v_0
  3684  		v.copyOf(x)
  3685  		return true
  3686  	}
  3687  	// match: (ANDI [x] (MOVDconst [y]))
  3688  	// result: (MOVDconst [x & y])
  3689  	for {
  3690  		x := auxIntToInt64(v.AuxInt)
  3691  		if v_0.Op != OpRISCV64MOVDconst {
  3692  			break
  3693  		}
  3694  		y := auxIntToInt64(v_0.AuxInt)
  3695  		v.reset(OpRISCV64MOVDconst)
  3696  		v.AuxInt = int64ToAuxInt(x & y)
  3697  		return true
  3698  	}
  3699  	// match: (ANDI [x] (ANDI [y] z))
  3700  	// result: (ANDI [x & y] z)
  3701  	for {
  3702  		x := auxIntToInt64(v.AuxInt)
  3703  		if v_0.Op != OpRISCV64ANDI {
  3704  			break
  3705  		}
  3706  		y := auxIntToInt64(v_0.AuxInt)
  3707  		z := v_0.Args[0]
  3708  		v.reset(OpRISCV64ANDI)
  3709  		v.AuxInt = int64ToAuxInt(x & y)
  3710  		v.AddArg(z)
  3711  		return true
  3712  	}
  3713  	return false
  3714  }
  3715  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3716  	v_1 := v.Args[1]
  3717  	v_0 := v.Args[0]
  3718  	// match: (FADDD a (FMULD x y))
  3719  	// cond: a.Block.Func.useFMA(v)
  3720  	// result: (FMADDD x y a)
  3721  	for {
  3722  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3723  			a := v_0
  3724  			if v_1.Op != OpRISCV64FMULD {
  3725  				continue
  3726  			}
  3727  			y := v_1.Args[1]
  3728  			x := v_1.Args[0]
  3729  			if !(a.Block.Func.useFMA(v)) {
  3730  				continue
  3731  			}
  3732  			v.reset(OpRISCV64FMADDD)
  3733  			v.AddArg3(x, y, a)
  3734  			return true
  3735  		}
  3736  		break
  3737  	}
  3738  	return false
  3739  }
  3740  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3741  	v_1 := v.Args[1]
  3742  	v_0 := v.Args[0]
  3743  	// match: (FADDS a (FMULS x y))
  3744  	// cond: a.Block.Func.useFMA(v)
  3745  	// result: (FMADDS x y a)
  3746  	for {
  3747  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3748  			a := v_0
  3749  			if v_1.Op != OpRISCV64FMULS {
  3750  				continue
  3751  			}
  3752  			y := v_1.Args[1]
  3753  			x := v_1.Args[0]
  3754  			if !(a.Block.Func.useFMA(v)) {
  3755  				continue
  3756  			}
  3757  			v.reset(OpRISCV64FMADDS)
  3758  			v.AddArg3(x, y, a)
  3759  			return true
  3760  		}
  3761  		break
  3762  	}
  3763  	return false
  3764  }
  3765  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3766  	v_2 := v.Args[2]
  3767  	v_1 := v.Args[1]
  3768  	v_0 := v.Args[0]
  3769  	// match: (FMADDD neg:(FNEGD x) y z)
  3770  	// cond: neg.Uses == 1
  3771  	// result: (FNMSUBD x y z)
  3772  	for {
  3773  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3774  			neg := v_0
  3775  			if neg.Op != OpRISCV64FNEGD {
  3776  				continue
  3777  			}
  3778  			x := neg.Args[0]
  3779  			y := v_1
  3780  			z := v_2
  3781  			if !(neg.Uses == 1) {
  3782  				continue
  3783  			}
  3784  			v.reset(OpRISCV64FNMSUBD)
  3785  			v.AddArg3(x, y, z)
  3786  			return true
  3787  		}
  3788  		break
  3789  	}
  3790  	// match: (FMADDD x y neg:(FNEGD z))
  3791  	// cond: neg.Uses == 1
  3792  	// result: (FMSUBD x y z)
  3793  	for {
  3794  		x := v_0
  3795  		y := v_1
  3796  		neg := v_2
  3797  		if neg.Op != OpRISCV64FNEGD {
  3798  			break
  3799  		}
  3800  		z := neg.Args[0]
  3801  		if !(neg.Uses == 1) {
  3802  			break
  3803  		}
  3804  		v.reset(OpRISCV64FMSUBD)
  3805  		v.AddArg3(x, y, z)
  3806  		return true
  3807  	}
  3808  	return false
  3809  }
  3810  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3811  	v_2 := v.Args[2]
  3812  	v_1 := v.Args[1]
  3813  	v_0 := v.Args[0]
  3814  	// match: (FMADDS neg:(FNEGS x) y z)
  3815  	// cond: neg.Uses == 1
  3816  	// result: (FNMSUBS x y z)
  3817  	for {
  3818  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3819  			neg := v_0
  3820  			if neg.Op != OpRISCV64FNEGS {
  3821  				continue
  3822  			}
  3823  			x := neg.Args[0]
  3824  			y := v_1
  3825  			z := v_2
  3826  			if !(neg.Uses == 1) {
  3827  				continue
  3828  			}
  3829  			v.reset(OpRISCV64FNMSUBS)
  3830  			v.AddArg3(x, y, z)
  3831  			return true
  3832  		}
  3833  		break
  3834  	}
  3835  	// match: (FMADDS x y neg:(FNEGS z))
  3836  	// cond: neg.Uses == 1
  3837  	// result: (FMSUBS x y z)
  3838  	for {
  3839  		x := v_0
  3840  		y := v_1
  3841  		neg := v_2
  3842  		if neg.Op != OpRISCV64FNEGS {
  3843  			break
  3844  		}
  3845  		z := neg.Args[0]
  3846  		if !(neg.Uses == 1) {
  3847  			break
  3848  		}
  3849  		v.reset(OpRISCV64FMSUBS)
  3850  		v.AddArg3(x, y, z)
  3851  		return true
  3852  	}
  3853  	return false
  3854  }
  3855  func rewriteValueRISCV64_OpRISCV64FMOVDload(v *Value) bool {
  3856  	v_1 := v.Args[1]
  3857  	v_0 := v.Args[0]
  3858  	b := v.Block
  3859  	config := b.Func.Config
  3860  	// match: (FMOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  3861  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  3862  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  3863  	for {
  3864  		off1 := auxIntToInt32(v.AuxInt)
  3865  		sym1 := auxToSym(v.Aux)
  3866  		if v_0.Op != OpRISCV64MOVaddr {
  3867  			break
  3868  		}
  3869  		off2 := auxIntToInt32(v_0.AuxInt)
  3870  		sym2 := auxToSym(v_0.Aux)
  3871  		base := v_0.Args[0]
  3872  		mem := v_1
  3873  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3874  			break
  3875  		}
  3876  		v.reset(OpRISCV64FMOVDload)
  3877  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3878  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3879  		v.AddArg2(base, mem)
  3880  		return true
  3881  	}
  3882  	// match: (FMOVDload [off1] {sym} (ADDI [off2] base) mem)
  3883  	// cond: is32Bit(int64(off1)+off2)
  3884  	// result: (FMOVDload [off1+int32(off2)] {sym} base mem)
  3885  	for {
  3886  		off1 := auxIntToInt32(v.AuxInt)
  3887  		sym := auxToSym(v.Aux)
  3888  		if v_0.Op != OpRISCV64ADDI {
  3889  			break
  3890  		}
  3891  		off2 := auxIntToInt64(v_0.AuxInt)
  3892  		base := v_0.Args[0]
  3893  		mem := v_1
  3894  		if !(is32Bit(int64(off1) + off2)) {
  3895  			break
  3896  		}
  3897  		v.reset(OpRISCV64FMOVDload)
  3898  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3899  		v.Aux = symToAux(sym)
  3900  		v.AddArg2(base, mem)
  3901  		return true
  3902  	}
  3903  	// match: (FMOVDload [off] {sym} ptr1 (MOVDstore [off] {sym} ptr2 x _))
  3904  	// cond: isSamePtr(ptr1, ptr2)
  3905  	// result: (FMVDX x)
  3906  	for {
  3907  		off := auxIntToInt32(v.AuxInt)
  3908  		sym := auxToSym(v.Aux)
  3909  		ptr1 := v_0
  3910  		if v_1.Op != OpRISCV64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3911  			break
  3912  		}
  3913  		x := v_1.Args[1]
  3914  		ptr2 := v_1.Args[0]
  3915  		if !(isSamePtr(ptr1, ptr2)) {
  3916  			break
  3917  		}
  3918  		v.reset(OpRISCV64FMVDX)
  3919  		v.AddArg(x)
  3920  		return true
  3921  	}
  3922  	return false
  3923  }
  3924  func rewriteValueRISCV64_OpRISCV64FMOVDstore(v *Value) bool {
  3925  	v_2 := v.Args[2]
  3926  	v_1 := v.Args[1]
  3927  	v_0 := v.Args[0]
  3928  	b := v.Block
  3929  	config := b.Func.Config
  3930  	// match: (FMOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  3931  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  3932  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  3933  	for {
  3934  		off1 := auxIntToInt32(v.AuxInt)
  3935  		sym1 := auxToSym(v.Aux)
  3936  		if v_0.Op != OpRISCV64MOVaddr {
  3937  			break
  3938  		}
  3939  		off2 := auxIntToInt32(v_0.AuxInt)
  3940  		sym2 := auxToSym(v_0.Aux)
  3941  		base := v_0.Args[0]
  3942  		val := v_1
  3943  		mem := v_2
  3944  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3945  			break
  3946  		}
  3947  		v.reset(OpRISCV64FMOVDstore)
  3948  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3949  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3950  		v.AddArg3(base, val, mem)
  3951  		return true
  3952  	}
  3953  	// match: (FMOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  3954  	// cond: is32Bit(int64(off1)+off2)
  3955  	// result: (FMOVDstore [off1+int32(off2)] {sym} base val mem)
  3956  	for {
  3957  		off1 := auxIntToInt32(v.AuxInt)
  3958  		sym := auxToSym(v.Aux)
  3959  		if v_0.Op != OpRISCV64ADDI {
  3960  			break
  3961  		}
  3962  		off2 := auxIntToInt64(v_0.AuxInt)
  3963  		base := v_0.Args[0]
  3964  		val := v_1
  3965  		mem := v_2
  3966  		if !(is32Bit(int64(off1) + off2)) {
  3967  			break
  3968  		}
  3969  		v.reset(OpRISCV64FMOVDstore)
  3970  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3971  		v.Aux = symToAux(sym)
  3972  		v.AddArg3(base, val, mem)
  3973  		return true
  3974  	}
  3975  	return false
  3976  }
  3977  func rewriteValueRISCV64_OpRISCV64FMOVWload(v *Value) bool {
  3978  	v_1 := v.Args[1]
  3979  	v_0 := v.Args[0]
  3980  	b := v.Block
  3981  	config := b.Func.Config
  3982  	// match: (FMOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  3983  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  3984  	// result: (FMOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  3985  	for {
  3986  		off1 := auxIntToInt32(v.AuxInt)
  3987  		sym1 := auxToSym(v.Aux)
  3988  		if v_0.Op != OpRISCV64MOVaddr {
  3989  			break
  3990  		}
  3991  		off2 := auxIntToInt32(v_0.AuxInt)
  3992  		sym2 := auxToSym(v_0.Aux)
  3993  		base := v_0.Args[0]
  3994  		mem := v_1
  3995  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3996  			break
  3997  		}
  3998  		v.reset(OpRISCV64FMOVWload)
  3999  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4000  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4001  		v.AddArg2(base, mem)
  4002  		return true
  4003  	}
  4004  	// match: (FMOVWload [off1] {sym} (ADDI [off2] base) mem)
  4005  	// cond: is32Bit(int64(off1)+off2)
  4006  	// result: (FMOVWload [off1+int32(off2)] {sym} base mem)
  4007  	for {
  4008  		off1 := auxIntToInt32(v.AuxInt)
  4009  		sym := auxToSym(v.Aux)
  4010  		if v_0.Op != OpRISCV64ADDI {
  4011  			break
  4012  		}
  4013  		off2 := auxIntToInt64(v_0.AuxInt)
  4014  		base := v_0.Args[0]
  4015  		mem := v_1
  4016  		if !(is32Bit(int64(off1) + off2)) {
  4017  			break
  4018  		}
  4019  		v.reset(OpRISCV64FMOVWload)
  4020  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4021  		v.Aux = symToAux(sym)
  4022  		v.AddArg2(base, mem)
  4023  		return true
  4024  	}
  4025  	// match: (FMOVWload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  4026  	// cond: isSamePtr(ptr1, ptr2)
  4027  	// result: (FMVSX x)
  4028  	for {
  4029  		off := auxIntToInt32(v.AuxInt)
  4030  		sym := auxToSym(v.Aux)
  4031  		ptr1 := v_0
  4032  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4033  			break
  4034  		}
  4035  		x := v_1.Args[1]
  4036  		ptr2 := v_1.Args[0]
  4037  		if !(isSamePtr(ptr1, ptr2)) {
  4038  			break
  4039  		}
  4040  		v.reset(OpRISCV64FMVSX)
  4041  		v.AddArg(x)
  4042  		return true
  4043  	}
  4044  	return false
  4045  }
  4046  func rewriteValueRISCV64_OpRISCV64FMOVWstore(v *Value) bool {
  4047  	v_2 := v.Args[2]
  4048  	v_1 := v.Args[1]
  4049  	v_0 := v.Args[0]
  4050  	b := v.Block
  4051  	config := b.Func.Config
  4052  	// match: (FMOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4053  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4054  	// result: (FMOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4055  	for {
  4056  		off1 := auxIntToInt32(v.AuxInt)
  4057  		sym1 := auxToSym(v.Aux)
  4058  		if v_0.Op != OpRISCV64MOVaddr {
  4059  			break
  4060  		}
  4061  		off2 := auxIntToInt32(v_0.AuxInt)
  4062  		sym2 := auxToSym(v_0.Aux)
  4063  		base := v_0.Args[0]
  4064  		val := v_1
  4065  		mem := v_2
  4066  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4067  			break
  4068  		}
  4069  		v.reset(OpRISCV64FMOVWstore)
  4070  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4071  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4072  		v.AddArg3(base, val, mem)
  4073  		return true
  4074  	}
  4075  	// match: (FMOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  4076  	// cond: is32Bit(int64(off1)+off2)
  4077  	// result: (FMOVWstore [off1+int32(off2)] {sym} base val mem)
  4078  	for {
  4079  		off1 := auxIntToInt32(v.AuxInt)
  4080  		sym := auxToSym(v.Aux)
  4081  		if v_0.Op != OpRISCV64ADDI {
  4082  			break
  4083  		}
  4084  		off2 := auxIntToInt64(v_0.AuxInt)
  4085  		base := v_0.Args[0]
  4086  		val := v_1
  4087  		mem := v_2
  4088  		if !(is32Bit(int64(off1) + off2)) {
  4089  			break
  4090  		}
  4091  		v.reset(OpRISCV64FMOVWstore)
  4092  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4093  		v.Aux = symToAux(sym)
  4094  		v.AddArg3(base, val, mem)
  4095  		return true
  4096  	}
  4097  	return false
  4098  }
  4099  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  4100  	v_2 := v.Args[2]
  4101  	v_1 := v.Args[1]
  4102  	v_0 := v.Args[0]
  4103  	// match: (FMSUBD neg:(FNEGD x) y z)
  4104  	// cond: neg.Uses == 1
  4105  	// result: (FNMADDD x y z)
  4106  	for {
  4107  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4108  			neg := v_0
  4109  			if neg.Op != OpRISCV64FNEGD {
  4110  				continue
  4111  			}
  4112  			x := neg.Args[0]
  4113  			y := v_1
  4114  			z := v_2
  4115  			if !(neg.Uses == 1) {
  4116  				continue
  4117  			}
  4118  			v.reset(OpRISCV64FNMADDD)
  4119  			v.AddArg3(x, y, z)
  4120  			return true
  4121  		}
  4122  		break
  4123  	}
  4124  	// match: (FMSUBD x y neg:(FNEGD z))
  4125  	// cond: neg.Uses == 1
  4126  	// result: (FMADDD x y z)
  4127  	for {
  4128  		x := v_0
  4129  		y := v_1
  4130  		neg := v_2
  4131  		if neg.Op != OpRISCV64FNEGD {
  4132  			break
  4133  		}
  4134  		z := neg.Args[0]
  4135  		if !(neg.Uses == 1) {
  4136  			break
  4137  		}
  4138  		v.reset(OpRISCV64FMADDD)
  4139  		v.AddArg3(x, y, z)
  4140  		return true
  4141  	}
  4142  	return false
  4143  }
  4144  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  4145  	v_2 := v.Args[2]
  4146  	v_1 := v.Args[1]
  4147  	v_0 := v.Args[0]
  4148  	// match: (FMSUBS neg:(FNEGS x) y z)
  4149  	// cond: neg.Uses == 1
  4150  	// result: (FNMADDS x y z)
  4151  	for {
  4152  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4153  			neg := v_0
  4154  			if neg.Op != OpRISCV64FNEGS {
  4155  				continue
  4156  			}
  4157  			x := neg.Args[0]
  4158  			y := v_1
  4159  			z := v_2
  4160  			if !(neg.Uses == 1) {
  4161  				continue
  4162  			}
  4163  			v.reset(OpRISCV64FNMADDS)
  4164  			v.AddArg3(x, y, z)
  4165  			return true
  4166  		}
  4167  		break
  4168  	}
  4169  	// match: (FMSUBS x y neg:(FNEGS z))
  4170  	// cond: neg.Uses == 1
  4171  	// result: (FMADDS x y z)
  4172  	for {
  4173  		x := v_0
  4174  		y := v_1
  4175  		neg := v_2
  4176  		if neg.Op != OpRISCV64FNEGS {
  4177  			break
  4178  		}
  4179  		z := neg.Args[0]
  4180  		if !(neg.Uses == 1) {
  4181  			break
  4182  		}
  4183  		v.reset(OpRISCV64FMADDS)
  4184  		v.AddArg3(x, y, z)
  4185  		return true
  4186  	}
  4187  	return false
  4188  }
  4189  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  4190  	v_2 := v.Args[2]
  4191  	v_1 := v.Args[1]
  4192  	v_0 := v.Args[0]
  4193  	// match: (FNMADDD neg:(FNEGD x) y z)
  4194  	// cond: neg.Uses == 1
  4195  	// result: (FMSUBD x y z)
  4196  	for {
  4197  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4198  			neg := v_0
  4199  			if neg.Op != OpRISCV64FNEGD {
  4200  				continue
  4201  			}
  4202  			x := neg.Args[0]
  4203  			y := v_1
  4204  			z := v_2
  4205  			if !(neg.Uses == 1) {
  4206  				continue
  4207  			}
  4208  			v.reset(OpRISCV64FMSUBD)
  4209  			v.AddArg3(x, y, z)
  4210  			return true
  4211  		}
  4212  		break
  4213  	}
  4214  	// match: (FNMADDD x y neg:(FNEGD z))
  4215  	// cond: neg.Uses == 1
  4216  	// result: (FNMSUBD x y z)
  4217  	for {
  4218  		x := v_0
  4219  		y := v_1
  4220  		neg := v_2
  4221  		if neg.Op != OpRISCV64FNEGD {
  4222  			break
  4223  		}
  4224  		z := neg.Args[0]
  4225  		if !(neg.Uses == 1) {
  4226  			break
  4227  		}
  4228  		v.reset(OpRISCV64FNMSUBD)
  4229  		v.AddArg3(x, y, z)
  4230  		return true
  4231  	}
  4232  	return false
  4233  }
  4234  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  4235  	v_2 := v.Args[2]
  4236  	v_1 := v.Args[1]
  4237  	v_0 := v.Args[0]
  4238  	// match: (FNMADDS neg:(FNEGS x) y z)
  4239  	// cond: neg.Uses == 1
  4240  	// result: (FMSUBS x y z)
  4241  	for {
  4242  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4243  			neg := v_0
  4244  			if neg.Op != OpRISCV64FNEGS {
  4245  				continue
  4246  			}
  4247  			x := neg.Args[0]
  4248  			y := v_1
  4249  			z := v_2
  4250  			if !(neg.Uses == 1) {
  4251  				continue
  4252  			}
  4253  			v.reset(OpRISCV64FMSUBS)
  4254  			v.AddArg3(x, y, z)
  4255  			return true
  4256  		}
  4257  		break
  4258  	}
  4259  	// match: (FNMADDS x y neg:(FNEGS z))
  4260  	// cond: neg.Uses == 1
  4261  	// result: (FNMSUBS x y z)
  4262  	for {
  4263  		x := v_0
  4264  		y := v_1
  4265  		neg := v_2
  4266  		if neg.Op != OpRISCV64FNEGS {
  4267  			break
  4268  		}
  4269  		z := neg.Args[0]
  4270  		if !(neg.Uses == 1) {
  4271  			break
  4272  		}
  4273  		v.reset(OpRISCV64FNMSUBS)
  4274  		v.AddArg3(x, y, z)
  4275  		return true
  4276  	}
  4277  	return false
  4278  }
  4279  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  4280  	v_2 := v.Args[2]
  4281  	v_1 := v.Args[1]
  4282  	v_0 := v.Args[0]
  4283  	// match: (FNMSUBD neg:(FNEGD x) y z)
  4284  	// cond: neg.Uses == 1
  4285  	// result: (FMADDD x y z)
  4286  	for {
  4287  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4288  			neg := v_0
  4289  			if neg.Op != OpRISCV64FNEGD {
  4290  				continue
  4291  			}
  4292  			x := neg.Args[0]
  4293  			y := v_1
  4294  			z := v_2
  4295  			if !(neg.Uses == 1) {
  4296  				continue
  4297  			}
  4298  			v.reset(OpRISCV64FMADDD)
  4299  			v.AddArg3(x, y, z)
  4300  			return true
  4301  		}
  4302  		break
  4303  	}
  4304  	// match: (FNMSUBD x y neg:(FNEGD z))
  4305  	// cond: neg.Uses == 1
  4306  	// result: (FNMADDD x y z)
  4307  	for {
  4308  		x := v_0
  4309  		y := v_1
  4310  		neg := v_2
  4311  		if neg.Op != OpRISCV64FNEGD {
  4312  			break
  4313  		}
  4314  		z := neg.Args[0]
  4315  		if !(neg.Uses == 1) {
  4316  			break
  4317  		}
  4318  		v.reset(OpRISCV64FNMADDD)
  4319  		v.AddArg3(x, y, z)
  4320  		return true
  4321  	}
  4322  	return false
  4323  }
  4324  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  4325  	v_2 := v.Args[2]
  4326  	v_1 := v.Args[1]
  4327  	v_0 := v.Args[0]
  4328  	// match: (FNMSUBS neg:(FNEGS x) y z)
  4329  	// cond: neg.Uses == 1
  4330  	// result: (FMADDS x y z)
  4331  	for {
  4332  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4333  			neg := v_0
  4334  			if neg.Op != OpRISCV64FNEGS {
  4335  				continue
  4336  			}
  4337  			x := neg.Args[0]
  4338  			y := v_1
  4339  			z := v_2
  4340  			if !(neg.Uses == 1) {
  4341  				continue
  4342  			}
  4343  			v.reset(OpRISCV64FMADDS)
  4344  			v.AddArg3(x, y, z)
  4345  			return true
  4346  		}
  4347  		break
  4348  	}
  4349  	// match: (FNMSUBS x y neg:(FNEGS z))
  4350  	// cond: neg.Uses == 1
  4351  	// result: (FNMADDS x y z)
  4352  	for {
  4353  		x := v_0
  4354  		y := v_1
  4355  		neg := v_2
  4356  		if neg.Op != OpRISCV64FNEGS {
  4357  			break
  4358  		}
  4359  		z := neg.Args[0]
  4360  		if !(neg.Uses == 1) {
  4361  			break
  4362  		}
  4363  		v.reset(OpRISCV64FNMADDS)
  4364  		v.AddArg3(x, y, z)
  4365  		return true
  4366  	}
  4367  	return false
  4368  }
  4369  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  4370  	v_1 := v.Args[1]
  4371  	v_0 := v.Args[0]
  4372  	// match: (FSUBD a (FMULD x y))
  4373  	// cond: a.Block.Func.useFMA(v)
  4374  	// result: (FNMSUBD x y a)
  4375  	for {
  4376  		a := v_0
  4377  		if v_1.Op != OpRISCV64FMULD {
  4378  			break
  4379  		}
  4380  		y := v_1.Args[1]
  4381  		x := v_1.Args[0]
  4382  		if !(a.Block.Func.useFMA(v)) {
  4383  			break
  4384  		}
  4385  		v.reset(OpRISCV64FNMSUBD)
  4386  		v.AddArg3(x, y, a)
  4387  		return true
  4388  	}
  4389  	// match: (FSUBD (FMULD x y) a)
  4390  	// cond: a.Block.Func.useFMA(v)
  4391  	// result: (FMSUBD x y a)
  4392  	for {
  4393  		if v_0.Op != OpRISCV64FMULD {
  4394  			break
  4395  		}
  4396  		y := v_0.Args[1]
  4397  		x := v_0.Args[0]
  4398  		a := v_1
  4399  		if !(a.Block.Func.useFMA(v)) {
  4400  			break
  4401  		}
  4402  		v.reset(OpRISCV64FMSUBD)
  4403  		v.AddArg3(x, y, a)
  4404  		return true
  4405  	}
  4406  	return false
  4407  }
  4408  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  4409  	v_1 := v.Args[1]
  4410  	v_0 := v.Args[0]
  4411  	// match: (FSUBS a (FMULS x y))
  4412  	// cond: a.Block.Func.useFMA(v)
  4413  	// result: (FNMSUBS x y a)
  4414  	for {
  4415  		a := v_0
  4416  		if v_1.Op != OpRISCV64FMULS {
  4417  			break
  4418  		}
  4419  		y := v_1.Args[1]
  4420  		x := v_1.Args[0]
  4421  		if !(a.Block.Func.useFMA(v)) {
  4422  			break
  4423  		}
  4424  		v.reset(OpRISCV64FNMSUBS)
  4425  		v.AddArg3(x, y, a)
  4426  		return true
  4427  	}
  4428  	// match: (FSUBS (FMULS x y) a)
  4429  	// cond: a.Block.Func.useFMA(v)
  4430  	// result: (FMSUBS x y a)
  4431  	for {
  4432  		if v_0.Op != OpRISCV64FMULS {
  4433  			break
  4434  		}
  4435  		y := v_0.Args[1]
  4436  		x := v_0.Args[0]
  4437  		a := v_1
  4438  		if !(a.Block.Func.useFMA(v)) {
  4439  			break
  4440  		}
  4441  		v.reset(OpRISCV64FMSUBS)
  4442  		v.AddArg3(x, y, a)
  4443  		return true
  4444  	}
  4445  	return false
  4446  }
  4447  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsCR(v *Value) bool {
  4448  	v_1 := v.Args[1]
  4449  	v_0 := v.Args[0]
  4450  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVDconst [c]) mem)
  4451  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  4452  	for {
  4453  		kind := auxIntToInt64(v.AuxInt)
  4454  		p := auxToPanicBoundsC(v.Aux)
  4455  		if v_0.Op != OpRISCV64MOVDconst {
  4456  			break
  4457  		}
  4458  		c := auxIntToInt64(v_0.AuxInt)
  4459  		mem := v_1
  4460  		v.reset(OpRISCV64LoweredPanicBoundsCC)
  4461  		v.AuxInt = int64ToAuxInt(kind)
  4462  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  4463  		v.AddArg(mem)
  4464  		return true
  4465  	}
  4466  	return false
  4467  }
  4468  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRC(v *Value) bool {
  4469  	v_1 := v.Args[1]
  4470  	v_0 := v.Args[0]
  4471  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVDconst [c]) mem)
  4472  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  4473  	for {
  4474  		kind := auxIntToInt64(v.AuxInt)
  4475  		p := auxToPanicBoundsC(v.Aux)
  4476  		if v_0.Op != OpRISCV64MOVDconst {
  4477  			break
  4478  		}
  4479  		c := auxIntToInt64(v_0.AuxInt)
  4480  		mem := v_1
  4481  		v.reset(OpRISCV64LoweredPanicBoundsCC)
  4482  		v.AuxInt = int64ToAuxInt(kind)
  4483  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  4484  		v.AddArg(mem)
  4485  		return true
  4486  	}
  4487  	return false
  4488  }
  4489  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRR(v *Value) bool {
  4490  	v_2 := v.Args[2]
  4491  	v_1 := v.Args[1]
  4492  	v_0 := v.Args[0]
  4493  	// match: (LoweredPanicBoundsRR [kind] x (MOVDconst [c]) mem)
  4494  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  4495  	for {
  4496  		kind := auxIntToInt64(v.AuxInt)
  4497  		x := v_0
  4498  		if v_1.Op != OpRISCV64MOVDconst {
  4499  			break
  4500  		}
  4501  		c := auxIntToInt64(v_1.AuxInt)
  4502  		mem := v_2
  4503  		v.reset(OpRISCV64LoweredPanicBoundsRC)
  4504  		v.AuxInt = int64ToAuxInt(kind)
  4505  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  4506  		v.AddArg2(x, mem)
  4507  		return true
  4508  	}
  4509  	// match: (LoweredPanicBoundsRR [kind] (MOVDconst [c]) y mem)
  4510  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  4511  	for {
  4512  		kind := auxIntToInt64(v.AuxInt)
  4513  		if v_0.Op != OpRISCV64MOVDconst {
  4514  			break
  4515  		}
  4516  		c := auxIntToInt64(v_0.AuxInt)
  4517  		y := v_1
  4518  		mem := v_2
  4519  		v.reset(OpRISCV64LoweredPanicBoundsCR)
  4520  		v.AuxInt = int64ToAuxInt(kind)
  4521  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  4522  		v.AddArg2(y, mem)
  4523  		return true
  4524  	}
  4525  	return false
  4526  }
  4527  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  4528  	v_1 := v.Args[1]
  4529  	v_0 := v.Args[0]
  4530  	b := v.Block
  4531  	config := b.Func.Config
  4532  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4533  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4534  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4535  	for {
  4536  		off1 := auxIntToInt32(v.AuxInt)
  4537  		sym1 := auxToSym(v.Aux)
  4538  		if v_0.Op != OpRISCV64MOVaddr {
  4539  			break
  4540  		}
  4541  		off2 := auxIntToInt32(v_0.AuxInt)
  4542  		sym2 := auxToSym(v_0.Aux)
  4543  		base := v_0.Args[0]
  4544  		mem := v_1
  4545  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4546  			break
  4547  		}
  4548  		v.reset(OpRISCV64MOVBUload)
  4549  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4550  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4551  		v.AddArg2(base, mem)
  4552  		return true
  4553  	}
  4554  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  4555  	// cond: is32Bit(int64(off1)+off2)
  4556  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  4557  	for {
  4558  		off1 := auxIntToInt32(v.AuxInt)
  4559  		sym := auxToSym(v.Aux)
  4560  		if v_0.Op != OpRISCV64ADDI {
  4561  			break
  4562  		}
  4563  		off2 := auxIntToInt64(v_0.AuxInt)
  4564  		base := v_0.Args[0]
  4565  		mem := v_1
  4566  		if !(is32Bit(int64(off1) + off2)) {
  4567  			break
  4568  		}
  4569  		v.reset(OpRISCV64MOVBUload)
  4570  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4571  		v.Aux = symToAux(sym)
  4572  		v.AddArg2(base, mem)
  4573  		return true
  4574  	}
  4575  	return false
  4576  }
  4577  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  4578  	v_0 := v.Args[0]
  4579  	b := v.Block
  4580  	// match: (MOVBUreg x:(FLES _ _))
  4581  	// result: x
  4582  	for {
  4583  		x := v_0
  4584  		if x.Op != OpRISCV64FLES {
  4585  			break
  4586  		}
  4587  		v.copyOf(x)
  4588  		return true
  4589  	}
  4590  	// match: (MOVBUreg x:(FLTS _ _))
  4591  	// result: x
  4592  	for {
  4593  		x := v_0
  4594  		if x.Op != OpRISCV64FLTS {
  4595  			break
  4596  		}
  4597  		v.copyOf(x)
  4598  		return true
  4599  	}
  4600  	// match: (MOVBUreg x:(FEQS _ _))
  4601  	// result: x
  4602  	for {
  4603  		x := v_0
  4604  		if x.Op != OpRISCV64FEQS {
  4605  			break
  4606  		}
  4607  		v.copyOf(x)
  4608  		return true
  4609  	}
  4610  	// match: (MOVBUreg x:(FNES _ _))
  4611  	// result: x
  4612  	for {
  4613  		x := v_0
  4614  		if x.Op != OpRISCV64FNES {
  4615  			break
  4616  		}
  4617  		v.copyOf(x)
  4618  		return true
  4619  	}
  4620  	// match: (MOVBUreg x:(FLED _ _))
  4621  	// result: x
  4622  	for {
  4623  		x := v_0
  4624  		if x.Op != OpRISCV64FLED {
  4625  			break
  4626  		}
  4627  		v.copyOf(x)
  4628  		return true
  4629  	}
  4630  	// match: (MOVBUreg x:(FLTD _ _))
  4631  	// result: x
  4632  	for {
  4633  		x := v_0
  4634  		if x.Op != OpRISCV64FLTD {
  4635  			break
  4636  		}
  4637  		v.copyOf(x)
  4638  		return true
  4639  	}
  4640  	// match: (MOVBUreg x:(FEQD _ _))
  4641  	// result: x
  4642  	for {
  4643  		x := v_0
  4644  		if x.Op != OpRISCV64FEQD {
  4645  			break
  4646  		}
  4647  		v.copyOf(x)
  4648  		return true
  4649  	}
  4650  	// match: (MOVBUreg x:(FNED _ _))
  4651  	// result: x
  4652  	for {
  4653  		x := v_0
  4654  		if x.Op != OpRISCV64FNED {
  4655  			break
  4656  		}
  4657  		v.copyOf(x)
  4658  		return true
  4659  	}
  4660  	// match: (MOVBUreg x:(SEQZ _))
  4661  	// result: x
  4662  	for {
  4663  		x := v_0
  4664  		if x.Op != OpRISCV64SEQZ {
  4665  			break
  4666  		}
  4667  		v.copyOf(x)
  4668  		return true
  4669  	}
  4670  	// match: (MOVBUreg x:(SNEZ _))
  4671  	// result: x
  4672  	for {
  4673  		x := v_0
  4674  		if x.Op != OpRISCV64SNEZ {
  4675  			break
  4676  		}
  4677  		v.copyOf(x)
  4678  		return true
  4679  	}
  4680  	// match: (MOVBUreg x:(SLT _ _))
  4681  	// result: x
  4682  	for {
  4683  		x := v_0
  4684  		if x.Op != OpRISCV64SLT {
  4685  			break
  4686  		}
  4687  		v.copyOf(x)
  4688  		return true
  4689  	}
  4690  	// match: (MOVBUreg x:(SLTU _ _))
  4691  	// result: x
  4692  	for {
  4693  		x := v_0
  4694  		if x.Op != OpRISCV64SLTU {
  4695  			break
  4696  		}
  4697  		v.copyOf(x)
  4698  		return true
  4699  	}
  4700  	// match: (MOVBUreg x:(ANDI [c] y))
  4701  	// cond: c >= 0 && int64(uint8(c)) == c
  4702  	// result: x
  4703  	for {
  4704  		x := v_0
  4705  		if x.Op != OpRISCV64ANDI {
  4706  			break
  4707  		}
  4708  		c := auxIntToInt64(x.AuxInt)
  4709  		if !(c >= 0 && int64(uint8(c)) == c) {
  4710  			break
  4711  		}
  4712  		v.copyOf(x)
  4713  		return true
  4714  	}
  4715  	// match: (MOVBUreg (ANDI [c] x))
  4716  	// cond: c < 0
  4717  	// result: (ANDI [int64(uint8(c))] x)
  4718  	for {
  4719  		if v_0.Op != OpRISCV64ANDI {
  4720  			break
  4721  		}
  4722  		c := auxIntToInt64(v_0.AuxInt)
  4723  		x := v_0.Args[0]
  4724  		if !(c < 0) {
  4725  			break
  4726  		}
  4727  		v.reset(OpRISCV64ANDI)
  4728  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4729  		v.AddArg(x)
  4730  		return true
  4731  	}
  4732  	// match: (MOVBUreg (MOVDconst [c]))
  4733  	// result: (MOVDconst [int64(uint8(c))])
  4734  	for {
  4735  		if v_0.Op != OpRISCV64MOVDconst {
  4736  			break
  4737  		}
  4738  		c := auxIntToInt64(v_0.AuxInt)
  4739  		v.reset(OpRISCV64MOVDconst)
  4740  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4741  		return true
  4742  	}
  4743  	// match: (MOVBUreg x:(MOVBUload _ _))
  4744  	// result: (MOVDreg x)
  4745  	for {
  4746  		x := v_0
  4747  		if x.Op != OpRISCV64MOVBUload {
  4748  			break
  4749  		}
  4750  		v.reset(OpRISCV64MOVDreg)
  4751  		v.AddArg(x)
  4752  		return true
  4753  	}
  4754  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4755  	// result: (MOVDreg x)
  4756  	for {
  4757  		x := v_0
  4758  		if x.Op != OpSelect0 {
  4759  			break
  4760  		}
  4761  		x_0 := x.Args[0]
  4762  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4763  			break
  4764  		}
  4765  		v.reset(OpRISCV64MOVDreg)
  4766  		v.AddArg(x)
  4767  		return true
  4768  	}
  4769  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4770  	// result: (MOVDreg x)
  4771  	for {
  4772  		x := v_0
  4773  		if x.Op != OpSelect0 {
  4774  			break
  4775  		}
  4776  		x_0 := x.Args[0]
  4777  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4778  			break
  4779  		}
  4780  		v.reset(OpRISCV64MOVDreg)
  4781  		v.AddArg(x)
  4782  		return true
  4783  	}
  4784  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4785  	// result: (MOVDreg x)
  4786  	for {
  4787  		x := v_0
  4788  		if x.Op != OpSelect0 {
  4789  			break
  4790  		}
  4791  		x_0 := x.Args[0]
  4792  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4793  			break
  4794  		}
  4795  		v.reset(OpRISCV64MOVDreg)
  4796  		v.AddArg(x)
  4797  		return true
  4798  	}
  4799  	// match: (MOVBUreg x:(MOVBUreg _))
  4800  	// result: (MOVDreg x)
  4801  	for {
  4802  		x := v_0
  4803  		if x.Op != OpRISCV64MOVBUreg {
  4804  			break
  4805  		}
  4806  		v.reset(OpRISCV64MOVDreg)
  4807  		v.AddArg(x)
  4808  		return true
  4809  	}
  4810  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4811  	// cond: x.Uses == 1 && clobber(x)
  4812  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4813  	for {
  4814  		t := v.Type
  4815  		x := v_0
  4816  		if x.Op != OpRISCV64MOVBload {
  4817  			break
  4818  		}
  4819  		off := auxIntToInt32(x.AuxInt)
  4820  		sym := auxToSym(x.Aux)
  4821  		mem := x.Args[1]
  4822  		ptr := x.Args[0]
  4823  		if !(x.Uses == 1 && clobber(x)) {
  4824  			break
  4825  		}
  4826  		b = x.Block
  4827  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4828  		v.copyOf(v0)
  4829  		v0.AuxInt = int32ToAuxInt(off)
  4830  		v0.Aux = symToAux(sym)
  4831  		v0.AddArg2(ptr, mem)
  4832  		return true
  4833  	}
  4834  	return false
  4835  }
  4836  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4837  	v_1 := v.Args[1]
  4838  	v_0 := v.Args[0]
  4839  	b := v.Block
  4840  	config := b.Func.Config
  4841  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4842  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4843  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4844  	for {
  4845  		off1 := auxIntToInt32(v.AuxInt)
  4846  		sym1 := auxToSym(v.Aux)
  4847  		if v_0.Op != OpRISCV64MOVaddr {
  4848  			break
  4849  		}
  4850  		off2 := auxIntToInt32(v_0.AuxInt)
  4851  		sym2 := auxToSym(v_0.Aux)
  4852  		base := v_0.Args[0]
  4853  		mem := v_1
  4854  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4855  			break
  4856  		}
  4857  		v.reset(OpRISCV64MOVBload)
  4858  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4859  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4860  		v.AddArg2(base, mem)
  4861  		return true
  4862  	}
  4863  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4864  	// cond: is32Bit(int64(off1)+off2)
  4865  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4866  	for {
  4867  		off1 := auxIntToInt32(v.AuxInt)
  4868  		sym := auxToSym(v.Aux)
  4869  		if v_0.Op != OpRISCV64ADDI {
  4870  			break
  4871  		}
  4872  		off2 := auxIntToInt64(v_0.AuxInt)
  4873  		base := v_0.Args[0]
  4874  		mem := v_1
  4875  		if !(is32Bit(int64(off1) + off2)) {
  4876  			break
  4877  		}
  4878  		v.reset(OpRISCV64MOVBload)
  4879  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4880  		v.Aux = symToAux(sym)
  4881  		v.AddArg2(base, mem)
  4882  		return true
  4883  	}
  4884  	return false
  4885  }
  4886  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4887  	v_0 := v.Args[0]
  4888  	b := v.Block
  4889  	// match: (MOVBreg x:(ANDI [c] y))
  4890  	// cond: c >= 0 && int64(int8(c)) == c
  4891  	// result: x
  4892  	for {
  4893  		x := v_0
  4894  		if x.Op != OpRISCV64ANDI {
  4895  			break
  4896  		}
  4897  		c := auxIntToInt64(x.AuxInt)
  4898  		if !(c >= 0 && int64(int8(c)) == c) {
  4899  			break
  4900  		}
  4901  		v.copyOf(x)
  4902  		return true
  4903  	}
  4904  	// match: (MOVBreg (MOVDconst [c]))
  4905  	// result: (MOVDconst [int64(int8(c))])
  4906  	for {
  4907  		if v_0.Op != OpRISCV64MOVDconst {
  4908  			break
  4909  		}
  4910  		c := auxIntToInt64(v_0.AuxInt)
  4911  		v.reset(OpRISCV64MOVDconst)
  4912  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4913  		return true
  4914  	}
  4915  	// match: (MOVBreg x:(MOVBload _ _))
  4916  	// result: (MOVDreg x)
  4917  	for {
  4918  		x := v_0
  4919  		if x.Op != OpRISCV64MOVBload {
  4920  			break
  4921  		}
  4922  		v.reset(OpRISCV64MOVDreg)
  4923  		v.AddArg(x)
  4924  		return true
  4925  	}
  4926  	// match: (MOVBreg x:(MOVBreg _))
  4927  	// result: (MOVDreg x)
  4928  	for {
  4929  		x := v_0
  4930  		if x.Op != OpRISCV64MOVBreg {
  4931  			break
  4932  		}
  4933  		v.reset(OpRISCV64MOVDreg)
  4934  		v.AddArg(x)
  4935  		return true
  4936  	}
  4937  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  4938  	// cond: x.Uses == 1 && clobber(x)
  4939  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  4940  	for {
  4941  		t := v.Type
  4942  		x := v_0
  4943  		if x.Op != OpRISCV64MOVBUload {
  4944  			break
  4945  		}
  4946  		off := auxIntToInt32(x.AuxInt)
  4947  		sym := auxToSym(x.Aux)
  4948  		mem := x.Args[1]
  4949  		ptr := x.Args[0]
  4950  		if !(x.Uses == 1 && clobber(x)) {
  4951  			break
  4952  		}
  4953  		b = x.Block
  4954  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  4955  		v.copyOf(v0)
  4956  		v0.AuxInt = int32ToAuxInt(off)
  4957  		v0.Aux = symToAux(sym)
  4958  		v0.AddArg2(ptr, mem)
  4959  		return true
  4960  	}
  4961  	return false
  4962  }
  4963  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  4964  	v_2 := v.Args[2]
  4965  	v_1 := v.Args[1]
  4966  	v_0 := v.Args[0]
  4967  	b := v.Block
  4968  	config := b.Func.Config
  4969  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4970  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4971  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4972  	for {
  4973  		off1 := auxIntToInt32(v.AuxInt)
  4974  		sym1 := auxToSym(v.Aux)
  4975  		if v_0.Op != OpRISCV64MOVaddr {
  4976  			break
  4977  		}
  4978  		off2 := auxIntToInt32(v_0.AuxInt)
  4979  		sym2 := auxToSym(v_0.Aux)
  4980  		base := v_0.Args[0]
  4981  		val := v_1
  4982  		mem := v_2
  4983  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4984  			break
  4985  		}
  4986  		v.reset(OpRISCV64MOVBstore)
  4987  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4988  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4989  		v.AddArg3(base, val, mem)
  4990  		return true
  4991  	}
  4992  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  4993  	// cond: is32Bit(int64(off1)+off2)
  4994  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  4995  	for {
  4996  		off1 := auxIntToInt32(v.AuxInt)
  4997  		sym := auxToSym(v.Aux)
  4998  		if v_0.Op != OpRISCV64ADDI {
  4999  			break
  5000  		}
  5001  		off2 := auxIntToInt64(v_0.AuxInt)
  5002  		base := v_0.Args[0]
  5003  		val := v_1
  5004  		mem := v_2
  5005  		if !(is32Bit(int64(off1) + off2)) {
  5006  			break
  5007  		}
  5008  		v.reset(OpRISCV64MOVBstore)
  5009  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5010  		v.Aux = symToAux(sym)
  5011  		v.AddArg3(base, val, mem)
  5012  		return true
  5013  	}
  5014  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  5015  	// result: (MOVBstorezero [off] {sym} ptr mem)
  5016  	for {
  5017  		off := auxIntToInt32(v.AuxInt)
  5018  		sym := auxToSym(v.Aux)
  5019  		ptr := v_0
  5020  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5021  			break
  5022  		}
  5023  		mem := v_2
  5024  		v.reset(OpRISCV64MOVBstorezero)
  5025  		v.AuxInt = int32ToAuxInt(off)
  5026  		v.Aux = symToAux(sym)
  5027  		v.AddArg2(ptr, mem)
  5028  		return true
  5029  	}
  5030  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  5031  	// result: (MOVBstore [off] {sym} ptr x mem)
  5032  	for {
  5033  		off := auxIntToInt32(v.AuxInt)
  5034  		sym := auxToSym(v.Aux)
  5035  		ptr := v_0
  5036  		if v_1.Op != OpRISCV64MOVBreg {
  5037  			break
  5038  		}
  5039  		x := v_1.Args[0]
  5040  		mem := v_2
  5041  		v.reset(OpRISCV64MOVBstore)
  5042  		v.AuxInt = int32ToAuxInt(off)
  5043  		v.Aux = symToAux(sym)
  5044  		v.AddArg3(ptr, x, mem)
  5045  		return true
  5046  	}
  5047  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  5048  	// result: (MOVBstore [off] {sym} ptr x mem)
  5049  	for {
  5050  		off := auxIntToInt32(v.AuxInt)
  5051  		sym := auxToSym(v.Aux)
  5052  		ptr := v_0
  5053  		if v_1.Op != OpRISCV64MOVHreg {
  5054  			break
  5055  		}
  5056  		x := v_1.Args[0]
  5057  		mem := v_2
  5058  		v.reset(OpRISCV64MOVBstore)
  5059  		v.AuxInt = int32ToAuxInt(off)
  5060  		v.Aux = symToAux(sym)
  5061  		v.AddArg3(ptr, x, mem)
  5062  		return true
  5063  	}
  5064  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  5065  	// result: (MOVBstore [off] {sym} ptr x mem)
  5066  	for {
  5067  		off := auxIntToInt32(v.AuxInt)
  5068  		sym := auxToSym(v.Aux)
  5069  		ptr := v_0
  5070  		if v_1.Op != OpRISCV64MOVWreg {
  5071  			break
  5072  		}
  5073  		x := v_1.Args[0]
  5074  		mem := v_2
  5075  		v.reset(OpRISCV64MOVBstore)
  5076  		v.AuxInt = int32ToAuxInt(off)
  5077  		v.Aux = symToAux(sym)
  5078  		v.AddArg3(ptr, x, mem)
  5079  		return true
  5080  	}
  5081  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  5082  	// result: (MOVBstore [off] {sym} ptr x mem)
  5083  	for {
  5084  		off := auxIntToInt32(v.AuxInt)
  5085  		sym := auxToSym(v.Aux)
  5086  		ptr := v_0
  5087  		if v_1.Op != OpRISCV64MOVBUreg {
  5088  			break
  5089  		}
  5090  		x := v_1.Args[0]
  5091  		mem := v_2
  5092  		v.reset(OpRISCV64MOVBstore)
  5093  		v.AuxInt = int32ToAuxInt(off)
  5094  		v.Aux = symToAux(sym)
  5095  		v.AddArg3(ptr, x, mem)
  5096  		return true
  5097  	}
  5098  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  5099  	// result: (MOVBstore [off] {sym} ptr x mem)
  5100  	for {
  5101  		off := auxIntToInt32(v.AuxInt)
  5102  		sym := auxToSym(v.Aux)
  5103  		ptr := v_0
  5104  		if v_1.Op != OpRISCV64MOVHUreg {
  5105  			break
  5106  		}
  5107  		x := v_1.Args[0]
  5108  		mem := v_2
  5109  		v.reset(OpRISCV64MOVBstore)
  5110  		v.AuxInt = int32ToAuxInt(off)
  5111  		v.Aux = symToAux(sym)
  5112  		v.AddArg3(ptr, x, mem)
  5113  		return true
  5114  	}
  5115  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  5116  	// result: (MOVBstore [off] {sym} ptr x mem)
  5117  	for {
  5118  		off := auxIntToInt32(v.AuxInt)
  5119  		sym := auxToSym(v.Aux)
  5120  		ptr := v_0
  5121  		if v_1.Op != OpRISCV64MOVWUreg {
  5122  			break
  5123  		}
  5124  		x := v_1.Args[0]
  5125  		mem := v_2
  5126  		v.reset(OpRISCV64MOVBstore)
  5127  		v.AuxInt = int32ToAuxInt(off)
  5128  		v.Aux = symToAux(sym)
  5129  		v.AddArg3(ptr, x, mem)
  5130  		return true
  5131  	}
  5132  	return false
  5133  }
  5134  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  5135  	v_1 := v.Args[1]
  5136  	v_0 := v.Args[0]
  5137  	b := v.Block
  5138  	config := b.Func.Config
  5139  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5140  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5141  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5142  	for {
  5143  		off1 := auxIntToInt32(v.AuxInt)
  5144  		sym1 := auxToSym(v.Aux)
  5145  		if v_0.Op != OpRISCV64MOVaddr {
  5146  			break
  5147  		}
  5148  		off2 := auxIntToInt32(v_0.AuxInt)
  5149  		sym2 := auxToSym(v_0.Aux)
  5150  		base := v_0.Args[0]
  5151  		mem := v_1
  5152  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5153  			break
  5154  		}
  5155  		v.reset(OpRISCV64MOVBstorezero)
  5156  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5157  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5158  		v.AddArg2(base, mem)
  5159  		return true
  5160  	}
  5161  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] base) mem)
  5162  	// cond: is32Bit(int64(off1)+off2)
  5163  	// result: (MOVBstorezero [off1+int32(off2)] {sym} base mem)
  5164  	for {
  5165  		off1 := auxIntToInt32(v.AuxInt)
  5166  		sym := auxToSym(v.Aux)
  5167  		if v_0.Op != OpRISCV64ADDI {
  5168  			break
  5169  		}
  5170  		off2 := auxIntToInt64(v_0.AuxInt)
  5171  		base := v_0.Args[0]
  5172  		mem := v_1
  5173  		if !(is32Bit(int64(off1) + off2)) {
  5174  			break
  5175  		}
  5176  		v.reset(OpRISCV64MOVBstorezero)
  5177  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5178  		v.Aux = symToAux(sym)
  5179  		v.AddArg2(base, mem)
  5180  		return true
  5181  	}
  5182  	return false
  5183  }
  5184  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  5185  	v_1 := v.Args[1]
  5186  	v_0 := v.Args[0]
  5187  	b := v.Block
  5188  	config := b.Func.Config
  5189  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5190  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5191  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5192  	for {
  5193  		off1 := auxIntToInt32(v.AuxInt)
  5194  		sym1 := auxToSym(v.Aux)
  5195  		if v_0.Op != OpRISCV64MOVaddr {
  5196  			break
  5197  		}
  5198  		off2 := auxIntToInt32(v_0.AuxInt)
  5199  		sym2 := auxToSym(v_0.Aux)
  5200  		base := v_0.Args[0]
  5201  		mem := v_1
  5202  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5203  			break
  5204  		}
  5205  		v.reset(OpRISCV64MOVDload)
  5206  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5207  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5208  		v.AddArg2(base, mem)
  5209  		return true
  5210  	}
  5211  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  5212  	// cond: is32Bit(int64(off1)+off2)
  5213  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  5214  	for {
  5215  		off1 := auxIntToInt32(v.AuxInt)
  5216  		sym := auxToSym(v.Aux)
  5217  		if v_0.Op != OpRISCV64ADDI {
  5218  			break
  5219  		}
  5220  		off2 := auxIntToInt64(v_0.AuxInt)
  5221  		base := v_0.Args[0]
  5222  		mem := v_1
  5223  		if !(is32Bit(int64(off1) + off2)) {
  5224  			break
  5225  		}
  5226  		v.reset(OpRISCV64MOVDload)
  5227  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5228  		v.Aux = symToAux(sym)
  5229  		v.AddArg2(base, mem)
  5230  		return true
  5231  	}
  5232  	// match: (MOVDload [off] {sym} ptr1 (FMOVDstore [off] {sym} ptr2 x _))
  5233  	// cond: isSamePtr(ptr1, ptr2)
  5234  	// result: (FMVXD x)
  5235  	for {
  5236  		off := auxIntToInt32(v.AuxInt)
  5237  		sym := auxToSym(v.Aux)
  5238  		ptr1 := v_0
  5239  		if v_1.Op != OpRISCV64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5240  			break
  5241  		}
  5242  		x := v_1.Args[1]
  5243  		ptr2 := v_1.Args[0]
  5244  		if !(isSamePtr(ptr1, ptr2)) {
  5245  			break
  5246  		}
  5247  		v.reset(OpRISCV64FMVXD)
  5248  		v.AddArg(x)
  5249  		return true
  5250  	}
  5251  	return false
  5252  }
  5253  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  5254  	v_0 := v.Args[0]
  5255  	// match: (MOVDnop (MOVDconst [c]))
  5256  	// result: (MOVDconst [c])
  5257  	for {
  5258  		if v_0.Op != OpRISCV64MOVDconst {
  5259  			break
  5260  		}
  5261  		c := auxIntToInt64(v_0.AuxInt)
  5262  		v.reset(OpRISCV64MOVDconst)
  5263  		v.AuxInt = int64ToAuxInt(c)
  5264  		return true
  5265  	}
  5266  	return false
  5267  }
  5268  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  5269  	v_0 := v.Args[0]
  5270  	// match: (MOVDreg x)
  5271  	// cond: x.Uses == 1
  5272  	// result: (MOVDnop x)
  5273  	for {
  5274  		x := v_0
  5275  		if !(x.Uses == 1) {
  5276  			break
  5277  		}
  5278  		v.reset(OpRISCV64MOVDnop)
  5279  		v.AddArg(x)
  5280  		return true
  5281  	}
  5282  	return false
  5283  }
  5284  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  5285  	v_2 := v.Args[2]
  5286  	v_1 := v.Args[1]
  5287  	v_0 := v.Args[0]
  5288  	b := v.Block
  5289  	config := b.Func.Config
  5290  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5291  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5292  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5293  	for {
  5294  		off1 := auxIntToInt32(v.AuxInt)
  5295  		sym1 := auxToSym(v.Aux)
  5296  		if v_0.Op != OpRISCV64MOVaddr {
  5297  			break
  5298  		}
  5299  		off2 := auxIntToInt32(v_0.AuxInt)
  5300  		sym2 := auxToSym(v_0.Aux)
  5301  		base := v_0.Args[0]
  5302  		val := v_1
  5303  		mem := v_2
  5304  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5305  			break
  5306  		}
  5307  		v.reset(OpRISCV64MOVDstore)
  5308  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5309  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5310  		v.AddArg3(base, val, mem)
  5311  		return true
  5312  	}
  5313  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  5314  	// cond: is32Bit(int64(off1)+off2)
  5315  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  5316  	for {
  5317  		off1 := auxIntToInt32(v.AuxInt)
  5318  		sym := auxToSym(v.Aux)
  5319  		if v_0.Op != OpRISCV64ADDI {
  5320  			break
  5321  		}
  5322  		off2 := auxIntToInt64(v_0.AuxInt)
  5323  		base := v_0.Args[0]
  5324  		val := v_1
  5325  		mem := v_2
  5326  		if !(is32Bit(int64(off1) + off2)) {
  5327  			break
  5328  		}
  5329  		v.reset(OpRISCV64MOVDstore)
  5330  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5331  		v.Aux = symToAux(sym)
  5332  		v.AddArg3(base, val, mem)
  5333  		return true
  5334  	}
  5335  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  5336  	// result: (MOVDstorezero [off] {sym} ptr mem)
  5337  	for {
  5338  		off := auxIntToInt32(v.AuxInt)
  5339  		sym := auxToSym(v.Aux)
  5340  		ptr := v_0
  5341  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5342  			break
  5343  		}
  5344  		mem := v_2
  5345  		v.reset(OpRISCV64MOVDstorezero)
  5346  		v.AuxInt = int32ToAuxInt(off)
  5347  		v.Aux = symToAux(sym)
  5348  		v.AddArg2(ptr, mem)
  5349  		return true
  5350  	}
  5351  	return false
  5352  }
  5353  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  5354  	v_1 := v.Args[1]
  5355  	v_0 := v.Args[0]
  5356  	b := v.Block
  5357  	config := b.Func.Config
  5358  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5359  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5360  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5361  	for {
  5362  		off1 := auxIntToInt32(v.AuxInt)
  5363  		sym1 := auxToSym(v.Aux)
  5364  		if v_0.Op != OpRISCV64MOVaddr {
  5365  			break
  5366  		}
  5367  		off2 := auxIntToInt32(v_0.AuxInt)
  5368  		sym2 := auxToSym(v_0.Aux)
  5369  		base := v_0.Args[0]
  5370  		mem := v_1
  5371  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5372  			break
  5373  		}
  5374  		v.reset(OpRISCV64MOVDstorezero)
  5375  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5376  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5377  		v.AddArg2(base, mem)
  5378  		return true
  5379  	}
  5380  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] base) mem)
  5381  	// cond: is32Bit(int64(off1)+off2)
  5382  	// result: (MOVDstorezero [off1+int32(off2)] {sym} base mem)
  5383  	for {
  5384  		off1 := auxIntToInt32(v.AuxInt)
  5385  		sym := auxToSym(v.Aux)
  5386  		if v_0.Op != OpRISCV64ADDI {
  5387  			break
  5388  		}
  5389  		off2 := auxIntToInt64(v_0.AuxInt)
  5390  		base := v_0.Args[0]
  5391  		mem := v_1
  5392  		if !(is32Bit(int64(off1) + off2)) {
  5393  			break
  5394  		}
  5395  		v.reset(OpRISCV64MOVDstorezero)
  5396  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5397  		v.Aux = symToAux(sym)
  5398  		v.AddArg2(base, mem)
  5399  		return true
  5400  	}
  5401  	return false
  5402  }
  5403  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  5404  	v_1 := v.Args[1]
  5405  	v_0 := v.Args[0]
  5406  	b := v.Block
  5407  	config := b.Func.Config
  5408  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5409  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5410  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5411  	for {
  5412  		off1 := auxIntToInt32(v.AuxInt)
  5413  		sym1 := auxToSym(v.Aux)
  5414  		if v_0.Op != OpRISCV64MOVaddr {
  5415  			break
  5416  		}
  5417  		off2 := auxIntToInt32(v_0.AuxInt)
  5418  		sym2 := auxToSym(v_0.Aux)
  5419  		base := v_0.Args[0]
  5420  		mem := v_1
  5421  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5422  			break
  5423  		}
  5424  		v.reset(OpRISCV64MOVHUload)
  5425  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5426  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5427  		v.AddArg2(base, mem)
  5428  		return true
  5429  	}
  5430  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  5431  	// cond: is32Bit(int64(off1)+off2)
  5432  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  5433  	for {
  5434  		off1 := auxIntToInt32(v.AuxInt)
  5435  		sym := auxToSym(v.Aux)
  5436  		if v_0.Op != OpRISCV64ADDI {
  5437  			break
  5438  		}
  5439  		off2 := auxIntToInt64(v_0.AuxInt)
  5440  		base := v_0.Args[0]
  5441  		mem := v_1
  5442  		if !(is32Bit(int64(off1) + off2)) {
  5443  			break
  5444  		}
  5445  		v.reset(OpRISCV64MOVHUload)
  5446  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5447  		v.Aux = symToAux(sym)
  5448  		v.AddArg2(base, mem)
  5449  		return true
  5450  	}
  5451  	return false
  5452  }
  5453  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  5454  	v_0 := v.Args[0]
  5455  	b := v.Block
  5456  	// match: (MOVHUreg x:(ANDI [c] y))
  5457  	// cond: c >= 0 && int64(uint16(c)) == c
  5458  	// result: x
  5459  	for {
  5460  		x := v_0
  5461  		if x.Op != OpRISCV64ANDI {
  5462  			break
  5463  		}
  5464  		c := auxIntToInt64(x.AuxInt)
  5465  		if !(c >= 0 && int64(uint16(c)) == c) {
  5466  			break
  5467  		}
  5468  		v.copyOf(x)
  5469  		return true
  5470  	}
  5471  	// match: (MOVHUreg (ANDI [c] x))
  5472  	// cond: c < 0
  5473  	// result: (ANDI [int64(uint16(c))] x)
  5474  	for {
  5475  		if v_0.Op != OpRISCV64ANDI {
  5476  			break
  5477  		}
  5478  		c := auxIntToInt64(v_0.AuxInt)
  5479  		x := v_0.Args[0]
  5480  		if !(c < 0) {
  5481  			break
  5482  		}
  5483  		v.reset(OpRISCV64ANDI)
  5484  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5485  		v.AddArg(x)
  5486  		return true
  5487  	}
  5488  	// match: (MOVHUreg (MOVDconst [c]))
  5489  	// result: (MOVDconst [int64(uint16(c))])
  5490  	for {
  5491  		if v_0.Op != OpRISCV64MOVDconst {
  5492  			break
  5493  		}
  5494  		c := auxIntToInt64(v_0.AuxInt)
  5495  		v.reset(OpRISCV64MOVDconst)
  5496  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5497  		return true
  5498  	}
  5499  	// match: (MOVHUreg x:(MOVBUload _ _))
  5500  	// result: (MOVDreg x)
  5501  	for {
  5502  		x := v_0
  5503  		if x.Op != OpRISCV64MOVBUload {
  5504  			break
  5505  		}
  5506  		v.reset(OpRISCV64MOVDreg)
  5507  		v.AddArg(x)
  5508  		return true
  5509  	}
  5510  	// match: (MOVHUreg x:(MOVHUload _ _))
  5511  	// result: (MOVDreg x)
  5512  	for {
  5513  		x := v_0
  5514  		if x.Op != OpRISCV64MOVHUload {
  5515  			break
  5516  		}
  5517  		v.reset(OpRISCV64MOVDreg)
  5518  		v.AddArg(x)
  5519  		return true
  5520  	}
  5521  	// match: (MOVHUreg x:(MOVBUreg _))
  5522  	// result: (MOVDreg x)
  5523  	for {
  5524  		x := v_0
  5525  		if x.Op != OpRISCV64MOVBUreg {
  5526  			break
  5527  		}
  5528  		v.reset(OpRISCV64MOVDreg)
  5529  		v.AddArg(x)
  5530  		return true
  5531  	}
  5532  	// match: (MOVHUreg x:(MOVHUreg _))
  5533  	// result: (MOVDreg x)
  5534  	for {
  5535  		x := v_0
  5536  		if x.Op != OpRISCV64MOVHUreg {
  5537  			break
  5538  		}
  5539  		v.reset(OpRISCV64MOVDreg)
  5540  		v.AddArg(x)
  5541  		return true
  5542  	}
  5543  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  5544  	// cond: x.Uses == 1 && clobber(x)
  5545  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  5546  	for {
  5547  		t := v.Type
  5548  		x := v_0
  5549  		if x.Op != OpRISCV64MOVHload {
  5550  			break
  5551  		}
  5552  		off := auxIntToInt32(x.AuxInt)
  5553  		sym := auxToSym(x.Aux)
  5554  		mem := x.Args[1]
  5555  		ptr := x.Args[0]
  5556  		if !(x.Uses == 1 && clobber(x)) {
  5557  			break
  5558  		}
  5559  		b = x.Block
  5560  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  5561  		v.copyOf(v0)
  5562  		v0.AuxInt = int32ToAuxInt(off)
  5563  		v0.Aux = symToAux(sym)
  5564  		v0.AddArg2(ptr, mem)
  5565  		return true
  5566  	}
  5567  	return false
  5568  }
  5569  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  5570  	v_1 := v.Args[1]
  5571  	v_0 := v.Args[0]
  5572  	b := v.Block
  5573  	config := b.Func.Config
  5574  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5575  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5576  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5577  	for {
  5578  		off1 := auxIntToInt32(v.AuxInt)
  5579  		sym1 := auxToSym(v.Aux)
  5580  		if v_0.Op != OpRISCV64MOVaddr {
  5581  			break
  5582  		}
  5583  		off2 := auxIntToInt32(v_0.AuxInt)
  5584  		sym2 := auxToSym(v_0.Aux)
  5585  		base := v_0.Args[0]
  5586  		mem := v_1
  5587  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5588  			break
  5589  		}
  5590  		v.reset(OpRISCV64MOVHload)
  5591  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5592  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5593  		v.AddArg2(base, mem)
  5594  		return true
  5595  	}
  5596  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  5597  	// cond: is32Bit(int64(off1)+off2)
  5598  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  5599  	for {
  5600  		off1 := auxIntToInt32(v.AuxInt)
  5601  		sym := auxToSym(v.Aux)
  5602  		if v_0.Op != OpRISCV64ADDI {
  5603  			break
  5604  		}
  5605  		off2 := auxIntToInt64(v_0.AuxInt)
  5606  		base := v_0.Args[0]
  5607  		mem := v_1
  5608  		if !(is32Bit(int64(off1) + off2)) {
  5609  			break
  5610  		}
  5611  		v.reset(OpRISCV64MOVHload)
  5612  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5613  		v.Aux = symToAux(sym)
  5614  		v.AddArg2(base, mem)
  5615  		return true
  5616  	}
  5617  	return false
  5618  }
  5619  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  5620  	v_0 := v.Args[0]
  5621  	b := v.Block
  5622  	// match: (MOVHreg x:(ANDI [c] y))
  5623  	// cond: c >= 0 && int64(int16(c)) == c
  5624  	// result: x
  5625  	for {
  5626  		x := v_0
  5627  		if x.Op != OpRISCV64ANDI {
  5628  			break
  5629  		}
  5630  		c := auxIntToInt64(x.AuxInt)
  5631  		if !(c >= 0 && int64(int16(c)) == c) {
  5632  			break
  5633  		}
  5634  		v.copyOf(x)
  5635  		return true
  5636  	}
  5637  	// match: (MOVHreg (MOVDconst [c]))
  5638  	// result: (MOVDconst [int64(int16(c))])
  5639  	for {
  5640  		if v_0.Op != OpRISCV64MOVDconst {
  5641  			break
  5642  		}
  5643  		c := auxIntToInt64(v_0.AuxInt)
  5644  		v.reset(OpRISCV64MOVDconst)
  5645  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  5646  		return true
  5647  	}
  5648  	// match: (MOVHreg x:(MOVBload _ _))
  5649  	// result: (MOVDreg x)
  5650  	for {
  5651  		x := v_0
  5652  		if x.Op != OpRISCV64MOVBload {
  5653  			break
  5654  		}
  5655  		v.reset(OpRISCV64MOVDreg)
  5656  		v.AddArg(x)
  5657  		return true
  5658  	}
  5659  	// match: (MOVHreg x:(MOVBUload _ _))
  5660  	// result: (MOVDreg x)
  5661  	for {
  5662  		x := v_0
  5663  		if x.Op != OpRISCV64MOVBUload {
  5664  			break
  5665  		}
  5666  		v.reset(OpRISCV64MOVDreg)
  5667  		v.AddArg(x)
  5668  		return true
  5669  	}
  5670  	// match: (MOVHreg x:(MOVHload _ _))
  5671  	// result: (MOVDreg x)
  5672  	for {
  5673  		x := v_0
  5674  		if x.Op != OpRISCV64MOVHload {
  5675  			break
  5676  		}
  5677  		v.reset(OpRISCV64MOVDreg)
  5678  		v.AddArg(x)
  5679  		return true
  5680  	}
  5681  	// match: (MOVHreg x:(MOVBreg _))
  5682  	// result: (MOVDreg x)
  5683  	for {
  5684  		x := v_0
  5685  		if x.Op != OpRISCV64MOVBreg {
  5686  			break
  5687  		}
  5688  		v.reset(OpRISCV64MOVDreg)
  5689  		v.AddArg(x)
  5690  		return true
  5691  	}
  5692  	// match: (MOVHreg x:(MOVBUreg _))
  5693  	// result: (MOVDreg x)
  5694  	for {
  5695  		x := v_0
  5696  		if x.Op != OpRISCV64MOVBUreg {
  5697  			break
  5698  		}
  5699  		v.reset(OpRISCV64MOVDreg)
  5700  		v.AddArg(x)
  5701  		return true
  5702  	}
  5703  	// match: (MOVHreg x:(MOVHreg _))
  5704  	// result: (MOVDreg x)
  5705  	for {
  5706  		x := v_0
  5707  		if x.Op != OpRISCV64MOVHreg {
  5708  			break
  5709  		}
  5710  		v.reset(OpRISCV64MOVDreg)
  5711  		v.AddArg(x)
  5712  		return true
  5713  	}
  5714  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  5715  	// cond: x.Uses == 1 && clobber(x)
  5716  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  5717  	for {
  5718  		t := v.Type
  5719  		x := v_0
  5720  		if x.Op != OpRISCV64MOVHUload {
  5721  			break
  5722  		}
  5723  		off := auxIntToInt32(x.AuxInt)
  5724  		sym := auxToSym(x.Aux)
  5725  		mem := x.Args[1]
  5726  		ptr := x.Args[0]
  5727  		if !(x.Uses == 1 && clobber(x)) {
  5728  			break
  5729  		}
  5730  		b = x.Block
  5731  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5732  		v.copyOf(v0)
  5733  		v0.AuxInt = int32ToAuxInt(off)
  5734  		v0.Aux = symToAux(sym)
  5735  		v0.AddArg2(ptr, mem)
  5736  		return true
  5737  	}
  5738  	return false
  5739  }
  5740  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5741  	v_2 := v.Args[2]
  5742  	v_1 := v.Args[1]
  5743  	v_0 := v.Args[0]
  5744  	b := v.Block
  5745  	config := b.Func.Config
  5746  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5747  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5748  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5749  	for {
  5750  		off1 := auxIntToInt32(v.AuxInt)
  5751  		sym1 := auxToSym(v.Aux)
  5752  		if v_0.Op != OpRISCV64MOVaddr {
  5753  			break
  5754  		}
  5755  		off2 := auxIntToInt32(v_0.AuxInt)
  5756  		sym2 := auxToSym(v_0.Aux)
  5757  		base := v_0.Args[0]
  5758  		val := v_1
  5759  		mem := v_2
  5760  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5761  			break
  5762  		}
  5763  		v.reset(OpRISCV64MOVHstore)
  5764  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5765  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5766  		v.AddArg3(base, val, mem)
  5767  		return true
  5768  	}
  5769  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5770  	// cond: is32Bit(int64(off1)+off2)
  5771  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5772  	for {
  5773  		off1 := auxIntToInt32(v.AuxInt)
  5774  		sym := auxToSym(v.Aux)
  5775  		if v_0.Op != OpRISCV64ADDI {
  5776  			break
  5777  		}
  5778  		off2 := auxIntToInt64(v_0.AuxInt)
  5779  		base := v_0.Args[0]
  5780  		val := v_1
  5781  		mem := v_2
  5782  		if !(is32Bit(int64(off1) + off2)) {
  5783  			break
  5784  		}
  5785  		v.reset(OpRISCV64MOVHstore)
  5786  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5787  		v.Aux = symToAux(sym)
  5788  		v.AddArg3(base, val, mem)
  5789  		return true
  5790  	}
  5791  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5792  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5793  	for {
  5794  		off := auxIntToInt32(v.AuxInt)
  5795  		sym := auxToSym(v.Aux)
  5796  		ptr := v_0
  5797  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5798  			break
  5799  		}
  5800  		mem := v_2
  5801  		v.reset(OpRISCV64MOVHstorezero)
  5802  		v.AuxInt = int32ToAuxInt(off)
  5803  		v.Aux = symToAux(sym)
  5804  		v.AddArg2(ptr, mem)
  5805  		return true
  5806  	}
  5807  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5808  	// result: (MOVHstore [off] {sym} ptr x mem)
  5809  	for {
  5810  		off := auxIntToInt32(v.AuxInt)
  5811  		sym := auxToSym(v.Aux)
  5812  		ptr := v_0
  5813  		if v_1.Op != OpRISCV64MOVHreg {
  5814  			break
  5815  		}
  5816  		x := v_1.Args[0]
  5817  		mem := v_2
  5818  		v.reset(OpRISCV64MOVHstore)
  5819  		v.AuxInt = int32ToAuxInt(off)
  5820  		v.Aux = symToAux(sym)
  5821  		v.AddArg3(ptr, x, mem)
  5822  		return true
  5823  	}
  5824  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5825  	// result: (MOVHstore [off] {sym} ptr x mem)
  5826  	for {
  5827  		off := auxIntToInt32(v.AuxInt)
  5828  		sym := auxToSym(v.Aux)
  5829  		ptr := v_0
  5830  		if v_1.Op != OpRISCV64MOVWreg {
  5831  			break
  5832  		}
  5833  		x := v_1.Args[0]
  5834  		mem := v_2
  5835  		v.reset(OpRISCV64MOVHstore)
  5836  		v.AuxInt = int32ToAuxInt(off)
  5837  		v.Aux = symToAux(sym)
  5838  		v.AddArg3(ptr, x, mem)
  5839  		return true
  5840  	}
  5841  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5842  	// result: (MOVHstore [off] {sym} ptr x mem)
  5843  	for {
  5844  		off := auxIntToInt32(v.AuxInt)
  5845  		sym := auxToSym(v.Aux)
  5846  		ptr := v_0
  5847  		if v_1.Op != OpRISCV64MOVHUreg {
  5848  			break
  5849  		}
  5850  		x := v_1.Args[0]
  5851  		mem := v_2
  5852  		v.reset(OpRISCV64MOVHstore)
  5853  		v.AuxInt = int32ToAuxInt(off)
  5854  		v.Aux = symToAux(sym)
  5855  		v.AddArg3(ptr, x, mem)
  5856  		return true
  5857  	}
  5858  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5859  	// result: (MOVHstore [off] {sym} ptr x mem)
  5860  	for {
  5861  		off := auxIntToInt32(v.AuxInt)
  5862  		sym := auxToSym(v.Aux)
  5863  		ptr := v_0
  5864  		if v_1.Op != OpRISCV64MOVWUreg {
  5865  			break
  5866  		}
  5867  		x := v_1.Args[0]
  5868  		mem := v_2
  5869  		v.reset(OpRISCV64MOVHstore)
  5870  		v.AuxInt = int32ToAuxInt(off)
  5871  		v.Aux = symToAux(sym)
  5872  		v.AddArg3(ptr, x, mem)
  5873  		return true
  5874  	}
  5875  	return false
  5876  }
  5877  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  5878  	v_1 := v.Args[1]
  5879  	v_0 := v.Args[0]
  5880  	b := v.Block
  5881  	config := b.Func.Config
  5882  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5883  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5884  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5885  	for {
  5886  		off1 := auxIntToInt32(v.AuxInt)
  5887  		sym1 := auxToSym(v.Aux)
  5888  		if v_0.Op != OpRISCV64MOVaddr {
  5889  			break
  5890  		}
  5891  		off2 := auxIntToInt32(v_0.AuxInt)
  5892  		sym2 := auxToSym(v_0.Aux)
  5893  		base := v_0.Args[0]
  5894  		mem := v_1
  5895  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5896  			break
  5897  		}
  5898  		v.reset(OpRISCV64MOVHstorezero)
  5899  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5900  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5901  		v.AddArg2(base, mem)
  5902  		return true
  5903  	}
  5904  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] base) mem)
  5905  	// cond: is32Bit(int64(off1)+off2)
  5906  	// result: (MOVHstorezero [off1+int32(off2)] {sym} base mem)
  5907  	for {
  5908  		off1 := auxIntToInt32(v.AuxInt)
  5909  		sym := auxToSym(v.Aux)
  5910  		if v_0.Op != OpRISCV64ADDI {
  5911  			break
  5912  		}
  5913  		off2 := auxIntToInt64(v_0.AuxInt)
  5914  		base := v_0.Args[0]
  5915  		mem := v_1
  5916  		if !(is32Bit(int64(off1) + off2)) {
  5917  			break
  5918  		}
  5919  		v.reset(OpRISCV64MOVHstorezero)
  5920  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5921  		v.Aux = symToAux(sym)
  5922  		v.AddArg2(base, mem)
  5923  		return true
  5924  	}
  5925  	return false
  5926  }
  5927  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  5928  	v_1 := v.Args[1]
  5929  	v_0 := v.Args[0]
  5930  	b := v.Block
  5931  	config := b.Func.Config
  5932  	typ := &b.Func.Config.Types
  5933  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5934  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5935  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5936  	for {
  5937  		off1 := auxIntToInt32(v.AuxInt)
  5938  		sym1 := auxToSym(v.Aux)
  5939  		if v_0.Op != OpRISCV64MOVaddr {
  5940  			break
  5941  		}
  5942  		off2 := auxIntToInt32(v_0.AuxInt)
  5943  		sym2 := auxToSym(v_0.Aux)
  5944  		base := v_0.Args[0]
  5945  		mem := v_1
  5946  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5947  			break
  5948  		}
  5949  		v.reset(OpRISCV64MOVWUload)
  5950  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5951  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5952  		v.AddArg2(base, mem)
  5953  		return true
  5954  	}
  5955  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  5956  	// cond: is32Bit(int64(off1)+off2)
  5957  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  5958  	for {
  5959  		off1 := auxIntToInt32(v.AuxInt)
  5960  		sym := auxToSym(v.Aux)
  5961  		if v_0.Op != OpRISCV64ADDI {
  5962  			break
  5963  		}
  5964  		off2 := auxIntToInt64(v_0.AuxInt)
  5965  		base := v_0.Args[0]
  5966  		mem := v_1
  5967  		if !(is32Bit(int64(off1) + off2)) {
  5968  			break
  5969  		}
  5970  		v.reset(OpRISCV64MOVWUload)
  5971  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5972  		v.Aux = symToAux(sym)
  5973  		v.AddArg2(base, mem)
  5974  		return true
  5975  	}
  5976  	// match: (MOVWUload [off] {sym} ptr1 (FMOVWstore [off] {sym} ptr2 x _))
  5977  	// cond: isSamePtr(ptr1, ptr2)
  5978  	// result: (MOVWUreg (FMVXS x))
  5979  	for {
  5980  		off := auxIntToInt32(v.AuxInt)
  5981  		sym := auxToSym(v.Aux)
  5982  		ptr1 := v_0
  5983  		if v_1.Op != OpRISCV64FMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5984  			break
  5985  		}
  5986  		x := v_1.Args[1]
  5987  		ptr2 := v_1.Args[0]
  5988  		if !(isSamePtr(ptr1, ptr2)) {
  5989  			break
  5990  		}
  5991  		v.reset(OpRISCV64MOVWUreg)
  5992  		v0 := b.NewValue0(v_1.Pos, OpRISCV64FMVXS, typ.Int32)
  5993  		v0.AddArg(x)
  5994  		v.AddArg(v0)
  5995  		return true
  5996  	}
  5997  	return false
  5998  }
  5999  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  6000  	v_0 := v.Args[0]
  6001  	b := v.Block
  6002  	typ := &b.Func.Config.Types
  6003  	// match: (MOVWUreg x:(ANDI [c] y))
  6004  	// cond: c >= 0 && int64(uint32(c)) == c
  6005  	// result: x
  6006  	for {
  6007  		x := v_0
  6008  		if x.Op != OpRISCV64ANDI {
  6009  			break
  6010  		}
  6011  		c := auxIntToInt64(x.AuxInt)
  6012  		if !(c >= 0 && int64(uint32(c)) == c) {
  6013  			break
  6014  		}
  6015  		v.copyOf(x)
  6016  		return true
  6017  	}
  6018  	// match: (MOVWUreg (ANDI [c] x))
  6019  	// cond: c < 0
  6020  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  6021  	for {
  6022  		if v_0.Op != OpRISCV64ANDI {
  6023  			break
  6024  		}
  6025  		c := auxIntToInt64(v_0.AuxInt)
  6026  		x := v_0.Args[0]
  6027  		if !(c < 0) {
  6028  			break
  6029  		}
  6030  		v.reset(OpRISCV64AND)
  6031  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6032  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  6033  		v.AddArg2(v0, x)
  6034  		return true
  6035  	}
  6036  	// match: (MOVWUreg (MOVDconst [c]))
  6037  	// result: (MOVDconst [int64(uint32(c))])
  6038  	for {
  6039  		if v_0.Op != OpRISCV64MOVDconst {
  6040  			break
  6041  		}
  6042  		c := auxIntToInt64(v_0.AuxInt)
  6043  		v.reset(OpRISCV64MOVDconst)
  6044  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  6045  		return true
  6046  	}
  6047  	// match: (MOVWUreg x:(MOVBUload _ _))
  6048  	// result: (MOVDreg x)
  6049  	for {
  6050  		x := v_0
  6051  		if x.Op != OpRISCV64MOVBUload {
  6052  			break
  6053  		}
  6054  		v.reset(OpRISCV64MOVDreg)
  6055  		v.AddArg(x)
  6056  		return true
  6057  	}
  6058  	// match: (MOVWUreg x:(MOVHUload _ _))
  6059  	// result: (MOVDreg x)
  6060  	for {
  6061  		x := v_0
  6062  		if x.Op != OpRISCV64MOVHUload {
  6063  			break
  6064  		}
  6065  		v.reset(OpRISCV64MOVDreg)
  6066  		v.AddArg(x)
  6067  		return true
  6068  	}
  6069  	// match: (MOVWUreg x:(MOVWUload _ _))
  6070  	// result: (MOVDreg x)
  6071  	for {
  6072  		x := v_0
  6073  		if x.Op != OpRISCV64MOVWUload {
  6074  			break
  6075  		}
  6076  		v.reset(OpRISCV64MOVDreg)
  6077  		v.AddArg(x)
  6078  		return true
  6079  	}
  6080  	// match: (MOVWUreg x:(MOVBUreg _))
  6081  	// result: (MOVDreg x)
  6082  	for {
  6083  		x := v_0
  6084  		if x.Op != OpRISCV64MOVBUreg {
  6085  			break
  6086  		}
  6087  		v.reset(OpRISCV64MOVDreg)
  6088  		v.AddArg(x)
  6089  		return true
  6090  	}
  6091  	// match: (MOVWUreg x:(MOVHUreg _))
  6092  	// result: (MOVDreg x)
  6093  	for {
  6094  		x := v_0
  6095  		if x.Op != OpRISCV64MOVHUreg {
  6096  			break
  6097  		}
  6098  		v.reset(OpRISCV64MOVDreg)
  6099  		v.AddArg(x)
  6100  		return true
  6101  	}
  6102  	// match: (MOVWUreg x:(MOVWUreg _))
  6103  	// result: (MOVDreg x)
  6104  	for {
  6105  		x := v_0
  6106  		if x.Op != OpRISCV64MOVWUreg {
  6107  			break
  6108  		}
  6109  		v.reset(OpRISCV64MOVDreg)
  6110  		v.AddArg(x)
  6111  		return true
  6112  	}
  6113  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  6114  	// cond: x.Uses == 1 && clobber(x)
  6115  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  6116  	for {
  6117  		t := v.Type
  6118  		x := v_0
  6119  		if x.Op != OpRISCV64MOVWload {
  6120  			break
  6121  		}
  6122  		off := auxIntToInt32(x.AuxInt)
  6123  		sym := auxToSym(x.Aux)
  6124  		mem := x.Args[1]
  6125  		ptr := x.Args[0]
  6126  		if !(x.Uses == 1 && clobber(x)) {
  6127  			break
  6128  		}
  6129  		b = x.Block
  6130  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  6131  		v.copyOf(v0)
  6132  		v0.AuxInt = int32ToAuxInt(off)
  6133  		v0.Aux = symToAux(sym)
  6134  		v0.AddArg2(ptr, mem)
  6135  		return true
  6136  	}
  6137  	return false
  6138  }
  6139  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  6140  	v_1 := v.Args[1]
  6141  	v_0 := v.Args[0]
  6142  	b := v.Block
  6143  	config := b.Func.Config
  6144  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6145  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6146  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6147  	for {
  6148  		off1 := auxIntToInt32(v.AuxInt)
  6149  		sym1 := auxToSym(v.Aux)
  6150  		if v_0.Op != OpRISCV64MOVaddr {
  6151  			break
  6152  		}
  6153  		off2 := auxIntToInt32(v_0.AuxInt)
  6154  		sym2 := auxToSym(v_0.Aux)
  6155  		base := v_0.Args[0]
  6156  		mem := v_1
  6157  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6158  			break
  6159  		}
  6160  		v.reset(OpRISCV64MOVWload)
  6161  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6162  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6163  		v.AddArg2(base, mem)
  6164  		return true
  6165  	}
  6166  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  6167  	// cond: is32Bit(int64(off1)+off2)
  6168  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  6169  	for {
  6170  		off1 := auxIntToInt32(v.AuxInt)
  6171  		sym := auxToSym(v.Aux)
  6172  		if v_0.Op != OpRISCV64ADDI {
  6173  			break
  6174  		}
  6175  		off2 := auxIntToInt64(v_0.AuxInt)
  6176  		base := v_0.Args[0]
  6177  		mem := v_1
  6178  		if !(is32Bit(int64(off1) + off2)) {
  6179  			break
  6180  		}
  6181  		v.reset(OpRISCV64MOVWload)
  6182  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6183  		v.Aux = symToAux(sym)
  6184  		v.AddArg2(base, mem)
  6185  		return true
  6186  	}
  6187  	// match: (MOVWload [off] {sym} ptr1 (FMOVWstore [off] {sym} ptr2 x _))
  6188  	// cond: isSamePtr(ptr1, ptr2)
  6189  	// result: (FMVXS x)
  6190  	for {
  6191  		off := auxIntToInt32(v.AuxInt)
  6192  		sym := auxToSym(v.Aux)
  6193  		ptr1 := v_0
  6194  		if v_1.Op != OpRISCV64FMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6195  			break
  6196  		}
  6197  		x := v_1.Args[1]
  6198  		ptr2 := v_1.Args[0]
  6199  		if !(isSamePtr(ptr1, ptr2)) {
  6200  			break
  6201  		}
  6202  		v.reset(OpRISCV64FMVXS)
  6203  		v.AddArg(x)
  6204  		return true
  6205  	}
  6206  	return false
  6207  }
  6208  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  6209  	v_0 := v.Args[0]
  6210  	b := v.Block
  6211  	// match: (MOVWreg x:(ANDI [c] y))
  6212  	// cond: c >= 0 && int64(int32(c)) == c
  6213  	// result: x
  6214  	for {
  6215  		x := v_0
  6216  		if x.Op != OpRISCV64ANDI {
  6217  			break
  6218  		}
  6219  		c := auxIntToInt64(x.AuxInt)
  6220  		if !(c >= 0 && int64(int32(c)) == c) {
  6221  			break
  6222  		}
  6223  		v.copyOf(x)
  6224  		return true
  6225  	}
  6226  	// match: (MOVWreg (NEG x))
  6227  	// result: (NEGW x)
  6228  	for {
  6229  		if v_0.Op != OpRISCV64NEG {
  6230  			break
  6231  		}
  6232  		x := v_0.Args[0]
  6233  		v.reset(OpRISCV64NEGW)
  6234  		v.AddArg(x)
  6235  		return true
  6236  	}
  6237  	// match: (MOVWreg (MOVDconst [c]))
  6238  	// result: (MOVDconst [int64(int32(c))])
  6239  	for {
  6240  		if v_0.Op != OpRISCV64MOVDconst {
  6241  			break
  6242  		}
  6243  		c := auxIntToInt64(v_0.AuxInt)
  6244  		v.reset(OpRISCV64MOVDconst)
  6245  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  6246  		return true
  6247  	}
  6248  	// match: (MOVWreg x:(MOVBload _ _))
  6249  	// result: (MOVDreg x)
  6250  	for {
  6251  		x := v_0
  6252  		if x.Op != OpRISCV64MOVBload {
  6253  			break
  6254  		}
  6255  		v.reset(OpRISCV64MOVDreg)
  6256  		v.AddArg(x)
  6257  		return true
  6258  	}
  6259  	// match: (MOVWreg x:(MOVBUload _ _))
  6260  	// result: (MOVDreg x)
  6261  	for {
  6262  		x := v_0
  6263  		if x.Op != OpRISCV64MOVBUload {
  6264  			break
  6265  		}
  6266  		v.reset(OpRISCV64MOVDreg)
  6267  		v.AddArg(x)
  6268  		return true
  6269  	}
  6270  	// match: (MOVWreg x:(MOVHload _ _))
  6271  	// result: (MOVDreg x)
  6272  	for {
  6273  		x := v_0
  6274  		if x.Op != OpRISCV64MOVHload {
  6275  			break
  6276  		}
  6277  		v.reset(OpRISCV64MOVDreg)
  6278  		v.AddArg(x)
  6279  		return true
  6280  	}
  6281  	// match: (MOVWreg x:(MOVHUload _ _))
  6282  	// result: (MOVDreg x)
  6283  	for {
  6284  		x := v_0
  6285  		if x.Op != OpRISCV64MOVHUload {
  6286  			break
  6287  		}
  6288  		v.reset(OpRISCV64MOVDreg)
  6289  		v.AddArg(x)
  6290  		return true
  6291  	}
  6292  	// match: (MOVWreg x:(MOVWload _ _))
  6293  	// result: (MOVDreg x)
  6294  	for {
  6295  		x := v_0
  6296  		if x.Op != OpRISCV64MOVWload {
  6297  			break
  6298  		}
  6299  		v.reset(OpRISCV64MOVDreg)
  6300  		v.AddArg(x)
  6301  		return true
  6302  	}
  6303  	// match: (MOVWreg x:(ADDIW _))
  6304  	// result: (MOVDreg x)
  6305  	for {
  6306  		x := v_0
  6307  		if x.Op != OpRISCV64ADDIW {
  6308  			break
  6309  		}
  6310  		v.reset(OpRISCV64MOVDreg)
  6311  		v.AddArg(x)
  6312  		return true
  6313  	}
  6314  	// match: (MOVWreg x:(SUBW _ _))
  6315  	// result: (MOVDreg x)
  6316  	for {
  6317  		x := v_0
  6318  		if x.Op != OpRISCV64SUBW {
  6319  			break
  6320  		}
  6321  		v.reset(OpRISCV64MOVDreg)
  6322  		v.AddArg(x)
  6323  		return true
  6324  	}
  6325  	// match: (MOVWreg x:(NEGW _))
  6326  	// result: (MOVDreg x)
  6327  	for {
  6328  		x := v_0
  6329  		if x.Op != OpRISCV64NEGW {
  6330  			break
  6331  		}
  6332  		v.reset(OpRISCV64MOVDreg)
  6333  		v.AddArg(x)
  6334  		return true
  6335  	}
  6336  	// match: (MOVWreg x:(MULW _ _))
  6337  	// result: (MOVDreg x)
  6338  	for {
  6339  		x := v_0
  6340  		if x.Op != OpRISCV64MULW {
  6341  			break
  6342  		}
  6343  		v.reset(OpRISCV64MOVDreg)
  6344  		v.AddArg(x)
  6345  		return true
  6346  	}
  6347  	// match: (MOVWreg x:(DIVW _ _))
  6348  	// result: (MOVDreg x)
  6349  	for {
  6350  		x := v_0
  6351  		if x.Op != OpRISCV64DIVW {
  6352  			break
  6353  		}
  6354  		v.reset(OpRISCV64MOVDreg)
  6355  		v.AddArg(x)
  6356  		return true
  6357  	}
  6358  	// match: (MOVWreg x:(DIVUW _ _))
  6359  	// result: (MOVDreg x)
  6360  	for {
  6361  		x := v_0
  6362  		if x.Op != OpRISCV64DIVUW {
  6363  			break
  6364  		}
  6365  		v.reset(OpRISCV64MOVDreg)
  6366  		v.AddArg(x)
  6367  		return true
  6368  	}
  6369  	// match: (MOVWreg x:(REMW _ _))
  6370  	// result: (MOVDreg x)
  6371  	for {
  6372  		x := v_0
  6373  		if x.Op != OpRISCV64REMW {
  6374  			break
  6375  		}
  6376  		v.reset(OpRISCV64MOVDreg)
  6377  		v.AddArg(x)
  6378  		return true
  6379  	}
  6380  	// match: (MOVWreg x:(REMUW _ _))
  6381  	// result: (MOVDreg x)
  6382  	for {
  6383  		x := v_0
  6384  		if x.Op != OpRISCV64REMUW {
  6385  			break
  6386  		}
  6387  		v.reset(OpRISCV64MOVDreg)
  6388  		v.AddArg(x)
  6389  		return true
  6390  	}
  6391  	// match: (MOVWreg x:(ROLW _ _))
  6392  	// result: (MOVDreg x)
  6393  	for {
  6394  		x := v_0
  6395  		if x.Op != OpRISCV64ROLW {
  6396  			break
  6397  		}
  6398  		v.reset(OpRISCV64MOVDreg)
  6399  		v.AddArg(x)
  6400  		return true
  6401  	}
  6402  	// match: (MOVWreg x:(RORW _ _))
  6403  	// result: (MOVDreg x)
  6404  	for {
  6405  		x := v_0
  6406  		if x.Op != OpRISCV64RORW {
  6407  			break
  6408  		}
  6409  		v.reset(OpRISCV64MOVDreg)
  6410  		v.AddArg(x)
  6411  		return true
  6412  	}
  6413  	// match: (MOVWreg x:(RORIW _))
  6414  	// result: (MOVDreg x)
  6415  	for {
  6416  		x := v_0
  6417  		if x.Op != OpRISCV64RORIW {
  6418  			break
  6419  		}
  6420  		v.reset(OpRISCV64MOVDreg)
  6421  		v.AddArg(x)
  6422  		return true
  6423  	}
  6424  	// match: (MOVWreg x:(MOVBreg _))
  6425  	// result: (MOVDreg x)
  6426  	for {
  6427  		x := v_0
  6428  		if x.Op != OpRISCV64MOVBreg {
  6429  			break
  6430  		}
  6431  		v.reset(OpRISCV64MOVDreg)
  6432  		v.AddArg(x)
  6433  		return true
  6434  	}
  6435  	// match: (MOVWreg x:(MOVBUreg _))
  6436  	// result: (MOVDreg x)
  6437  	for {
  6438  		x := v_0
  6439  		if x.Op != OpRISCV64MOVBUreg {
  6440  			break
  6441  		}
  6442  		v.reset(OpRISCV64MOVDreg)
  6443  		v.AddArg(x)
  6444  		return true
  6445  	}
  6446  	// match: (MOVWreg x:(MOVHreg _))
  6447  	// result: (MOVDreg x)
  6448  	for {
  6449  		x := v_0
  6450  		if x.Op != OpRISCV64MOVHreg {
  6451  			break
  6452  		}
  6453  		v.reset(OpRISCV64MOVDreg)
  6454  		v.AddArg(x)
  6455  		return true
  6456  	}
  6457  	// match: (MOVWreg x:(MOVWreg _))
  6458  	// result: (MOVDreg x)
  6459  	for {
  6460  		x := v_0
  6461  		if x.Op != OpRISCV64MOVWreg {
  6462  			break
  6463  		}
  6464  		v.reset(OpRISCV64MOVDreg)
  6465  		v.AddArg(x)
  6466  		return true
  6467  	}
  6468  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  6469  	// cond: x.Uses == 1 && clobber(x)
  6470  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  6471  	for {
  6472  		t := v.Type
  6473  		x := v_0
  6474  		if x.Op != OpRISCV64MOVWUload {
  6475  			break
  6476  		}
  6477  		off := auxIntToInt32(x.AuxInt)
  6478  		sym := auxToSym(x.Aux)
  6479  		mem := x.Args[1]
  6480  		ptr := x.Args[0]
  6481  		if !(x.Uses == 1 && clobber(x)) {
  6482  			break
  6483  		}
  6484  		b = x.Block
  6485  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  6486  		v.copyOf(v0)
  6487  		v0.AuxInt = int32ToAuxInt(off)
  6488  		v0.Aux = symToAux(sym)
  6489  		v0.AddArg2(ptr, mem)
  6490  		return true
  6491  	}
  6492  	return false
  6493  }
  6494  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  6495  	v_2 := v.Args[2]
  6496  	v_1 := v.Args[1]
  6497  	v_0 := v.Args[0]
  6498  	b := v.Block
  6499  	config := b.Func.Config
  6500  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  6501  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6502  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  6503  	for {
  6504  		off1 := auxIntToInt32(v.AuxInt)
  6505  		sym1 := auxToSym(v.Aux)
  6506  		if v_0.Op != OpRISCV64MOVaddr {
  6507  			break
  6508  		}
  6509  		off2 := auxIntToInt32(v_0.AuxInt)
  6510  		sym2 := auxToSym(v_0.Aux)
  6511  		base := v_0.Args[0]
  6512  		val := v_1
  6513  		mem := v_2
  6514  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6515  			break
  6516  		}
  6517  		v.reset(OpRISCV64MOVWstore)
  6518  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6519  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6520  		v.AddArg3(base, val, mem)
  6521  		return true
  6522  	}
  6523  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  6524  	// cond: is32Bit(int64(off1)+off2)
  6525  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  6526  	for {
  6527  		off1 := auxIntToInt32(v.AuxInt)
  6528  		sym := auxToSym(v.Aux)
  6529  		if v_0.Op != OpRISCV64ADDI {
  6530  			break
  6531  		}
  6532  		off2 := auxIntToInt64(v_0.AuxInt)
  6533  		base := v_0.Args[0]
  6534  		val := v_1
  6535  		mem := v_2
  6536  		if !(is32Bit(int64(off1) + off2)) {
  6537  			break
  6538  		}
  6539  		v.reset(OpRISCV64MOVWstore)
  6540  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6541  		v.Aux = symToAux(sym)
  6542  		v.AddArg3(base, val, mem)
  6543  		return true
  6544  	}
  6545  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  6546  	// result: (MOVWstorezero [off] {sym} ptr mem)
  6547  	for {
  6548  		off := auxIntToInt32(v.AuxInt)
  6549  		sym := auxToSym(v.Aux)
  6550  		ptr := v_0
  6551  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6552  			break
  6553  		}
  6554  		mem := v_2
  6555  		v.reset(OpRISCV64MOVWstorezero)
  6556  		v.AuxInt = int32ToAuxInt(off)
  6557  		v.Aux = symToAux(sym)
  6558  		v.AddArg2(ptr, mem)
  6559  		return true
  6560  	}
  6561  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  6562  	// result: (MOVWstore [off] {sym} ptr x mem)
  6563  	for {
  6564  		off := auxIntToInt32(v.AuxInt)
  6565  		sym := auxToSym(v.Aux)
  6566  		ptr := v_0
  6567  		if v_1.Op != OpRISCV64MOVWreg {
  6568  			break
  6569  		}
  6570  		x := v_1.Args[0]
  6571  		mem := v_2
  6572  		v.reset(OpRISCV64MOVWstore)
  6573  		v.AuxInt = int32ToAuxInt(off)
  6574  		v.Aux = symToAux(sym)
  6575  		v.AddArg3(ptr, x, mem)
  6576  		return true
  6577  	}
  6578  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  6579  	// result: (MOVWstore [off] {sym} ptr x mem)
  6580  	for {
  6581  		off := auxIntToInt32(v.AuxInt)
  6582  		sym := auxToSym(v.Aux)
  6583  		ptr := v_0
  6584  		if v_1.Op != OpRISCV64MOVWUreg {
  6585  			break
  6586  		}
  6587  		x := v_1.Args[0]
  6588  		mem := v_2
  6589  		v.reset(OpRISCV64MOVWstore)
  6590  		v.AuxInt = int32ToAuxInt(off)
  6591  		v.Aux = symToAux(sym)
  6592  		v.AddArg3(ptr, x, mem)
  6593  		return true
  6594  	}
  6595  	return false
  6596  }
  6597  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  6598  	v_1 := v.Args[1]
  6599  	v_0 := v.Args[0]
  6600  	b := v.Block
  6601  	config := b.Func.Config
  6602  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6603  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6604  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6605  	for {
  6606  		off1 := auxIntToInt32(v.AuxInt)
  6607  		sym1 := auxToSym(v.Aux)
  6608  		if v_0.Op != OpRISCV64MOVaddr {
  6609  			break
  6610  		}
  6611  		off2 := auxIntToInt32(v_0.AuxInt)
  6612  		sym2 := auxToSym(v_0.Aux)
  6613  		base := v_0.Args[0]
  6614  		mem := v_1
  6615  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6616  			break
  6617  		}
  6618  		v.reset(OpRISCV64MOVWstorezero)
  6619  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6620  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6621  		v.AddArg2(base, mem)
  6622  		return true
  6623  	}
  6624  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] base) mem)
  6625  	// cond: is32Bit(int64(off1)+off2)
  6626  	// result: (MOVWstorezero [off1+int32(off2)] {sym} base mem)
  6627  	for {
  6628  		off1 := auxIntToInt32(v.AuxInt)
  6629  		sym := auxToSym(v.Aux)
  6630  		if v_0.Op != OpRISCV64ADDI {
  6631  			break
  6632  		}
  6633  		off2 := auxIntToInt64(v_0.AuxInt)
  6634  		base := v_0.Args[0]
  6635  		mem := v_1
  6636  		if !(is32Bit(int64(off1) + off2)) {
  6637  			break
  6638  		}
  6639  		v.reset(OpRISCV64MOVWstorezero)
  6640  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6641  		v.Aux = symToAux(sym)
  6642  		v.AddArg2(base, mem)
  6643  		return true
  6644  	}
  6645  	return false
  6646  }
  6647  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  6648  	v_0 := v.Args[0]
  6649  	b := v.Block
  6650  	// match: (NEG (SUB x y))
  6651  	// result: (SUB y x)
  6652  	for {
  6653  		if v_0.Op != OpRISCV64SUB {
  6654  			break
  6655  		}
  6656  		y := v_0.Args[1]
  6657  		x := v_0.Args[0]
  6658  		v.reset(OpRISCV64SUB)
  6659  		v.AddArg2(y, x)
  6660  		return true
  6661  	}
  6662  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  6663  	// cond: s.Uses == 1 && is32Bit(-val)
  6664  	// result: (ADDI [-val] (SUB <t> y x))
  6665  	for {
  6666  		t := v.Type
  6667  		s := v_0
  6668  		if s.Op != OpRISCV64ADDI {
  6669  			break
  6670  		}
  6671  		val := auxIntToInt64(s.AuxInt)
  6672  		s_0 := s.Args[0]
  6673  		if s_0.Op != OpRISCV64SUB {
  6674  			break
  6675  		}
  6676  		y := s_0.Args[1]
  6677  		x := s_0.Args[0]
  6678  		if !(s.Uses == 1 && is32Bit(-val)) {
  6679  			break
  6680  		}
  6681  		v.reset(OpRISCV64ADDI)
  6682  		v.AuxInt = int64ToAuxInt(-val)
  6683  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  6684  		v0.AddArg2(y, x)
  6685  		v.AddArg(v0)
  6686  		return true
  6687  	}
  6688  	// match: (NEG (NEG x))
  6689  	// result: x
  6690  	for {
  6691  		if v_0.Op != OpRISCV64NEG {
  6692  			break
  6693  		}
  6694  		x := v_0.Args[0]
  6695  		v.copyOf(x)
  6696  		return true
  6697  	}
  6698  	// match: (NEG <t> s:(ADDI [val] (NEG x)))
  6699  	// cond: s.Uses == 1 && is32Bit(-val)
  6700  	// result: (ADDI [-val] x)
  6701  	for {
  6702  		s := v_0
  6703  		if s.Op != OpRISCV64ADDI {
  6704  			break
  6705  		}
  6706  		val := auxIntToInt64(s.AuxInt)
  6707  		s_0 := s.Args[0]
  6708  		if s_0.Op != OpRISCV64NEG {
  6709  			break
  6710  		}
  6711  		x := s_0.Args[0]
  6712  		if !(s.Uses == 1 && is32Bit(-val)) {
  6713  			break
  6714  		}
  6715  		v.reset(OpRISCV64ADDI)
  6716  		v.AuxInt = int64ToAuxInt(-val)
  6717  		v.AddArg(x)
  6718  		return true
  6719  	}
  6720  	// match: (NEG (MOVDconst [x]))
  6721  	// result: (MOVDconst [-x])
  6722  	for {
  6723  		if v_0.Op != OpRISCV64MOVDconst {
  6724  			break
  6725  		}
  6726  		x := auxIntToInt64(v_0.AuxInt)
  6727  		v.reset(OpRISCV64MOVDconst)
  6728  		v.AuxInt = int64ToAuxInt(-x)
  6729  		return true
  6730  	}
  6731  	return false
  6732  }
  6733  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  6734  	v_0 := v.Args[0]
  6735  	// match: (NEGW (MOVDconst [x]))
  6736  	// result: (MOVDconst [int64(int32(-x))])
  6737  	for {
  6738  		if v_0.Op != OpRISCV64MOVDconst {
  6739  			break
  6740  		}
  6741  		x := auxIntToInt64(v_0.AuxInt)
  6742  		v.reset(OpRISCV64MOVDconst)
  6743  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  6744  		return true
  6745  	}
  6746  	return false
  6747  }
  6748  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  6749  	v_1 := v.Args[1]
  6750  	v_0 := v.Args[0]
  6751  	// match: (OR (MOVDconst [val]) x)
  6752  	// cond: is32Bit(val)
  6753  	// result: (ORI [val] x)
  6754  	for {
  6755  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6756  			if v_0.Op != OpRISCV64MOVDconst {
  6757  				continue
  6758  			}
  6759  			val := auxIntToInt64(v_0.AuxInt)
  6760  			x := v_1
  6761  			if !(is32Bit(val)) {
  6762  				continue
  6763  			}
  6764  			v.reset(OpRISCV64ORI)
  6765  			v.AuxInt = int64ToAuxInt(val)
  6766  			v.AddArg(x)
  6767  			return true
  6768  		}
  6769  		break
  6770  	}
  6771  	// match: (OR x x)
  6772  	// result: x
  6773  	for {
  6774  		x := v_0
  6775  		if x != v_1 {
  6776  			break
  6777  		}
  6778  		v.copyOf(x)
  6779  		return true
  6780  	}
  6781  	return false
  6782  }
  6783  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  6784  	v_0 := v.Args[0]
  6785  	// match: (ORI [0] x)
  6786  	// result: x
  6787  	for {
  6788  		if auxIntToInt64(v.AuxInt) != 0 {
  6789  			break
  6790  		}
  6791  		x := v_0
  6792  		v.copyOf(x)
  6793  		return true
  6794  	}
  6795  	// match: (ORI [-1] x)
  6796  	// result: (MOVDconst [-1])
  6797  	for {
  6798  		if auxIntToInt64(v.AuxInt) != -1 {
  6799  			break
  6800  		}
  6801  		v.reset(OpRISCV64MOVDconst)
  6802  		v.AuxInt = int64ToAuxInt(-1)
  6803  		return true
  6804  	}
  6805  	// match: (ORI [x] (MOVDconst [y]))
  6806  	// result: (MOVDconst [x | y])
  6807  	for {
  6808  		x := auxIntToInt64(v.AuxInt)
  6809  		if v_0.Op != OpRISCV64MOVDconst {
  6810  			break
  6811  		}
  6812  		y := auxIntToInt64(v_0.AuxInt)
  6813  		v.reset(OpRISCV64MOVDconst)
  6814  		v.AuxInt = int64ToAuxInt(x | y)
  6815  		return true
  6816  	}
  6817  	// match: (ORI [x] (ORI [y] z))
  6818  	// result: (ORI [x | y] z)
  6819  	for {
  6820  		x := auxIntToInt64(v.AuxInt)
  6821  		if v_0.Op != OpRISCV64ORI {
  6822  			break
  6823  		}
  6824  		y := auxIntToInt64(v_0.AuxInt)
  6825  		z := v_0.Args[0]
  6826  		v.reset(OpRISCV64ORI)
  6827  		v.AuxInt = int64ToAuxInt(x | y)
  6828  		v.AddArg(z)
  6829  		return true
  6830  	}
  6831  	return false
  6832  }
  6833  func rewriteValueRISCV64_OpRISCV64ORN(v *Value) bool {
  6834  	v_1 := v.Args[1]
  6835  	v_0 := v.Args[0]
  6836  	// match: (ORN x x)
  6837  	// result: (MOVDconst [-1])
  6838  	for {
  6839  		x := v_0
  6840  		if x != v_1 {
  6841  			break
  6842  		}
  6843  		v.reset(OpRISCV64MOVDconst)
  6844  		v.AuxInt = int64ToAuxInt(-1)
  6845  		return true
  6846  	}
  6847  	return false
  6848  }
  6849  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  6850  	v_1 := v.Args[1]
  6851  	v_0 := v.Args[0]
  6852  	// match: (ROL x (MOVDconst [val]))
  6853  	// result: (RORI [int64(int8(-val)&63)] x)
  6854  	for {
  6855  		x := v_0
  6856  		if v_1.Op != OpRISCV64MOVDconst {
  6857  			break
  6858  		}
  6859  		val := auxIntToInt64(v_1.AuxInt)
  6860  		v.reset(OpRISCV64RORI)
  6861  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63))
  6862  		v.AddArg(x)
  6863  		return true
  6864  	}
  6865  	// match: (ROL x (NEG y))
  6866  	// result: (ROR x y)
  6867  	for {
  6868  		x := v_0
  6869  		if v_1.Op != OpRISCV64NEG {
  6870  			break
  6871  		}
  6872  		y := v_1.Args[0]
  6873  		v.reset(OpRISCV64ROR)
  6874  		v.AddArg2(x, y)
  6875  		return true
  6876  	}
  6877  	return false
  6878  }
  6879  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  6880  	v_1 := v.Args[1]
  6881  	v_0 := v.Args[0]
  6882  	// match: (ROLW x (MOVDconst [val]))
  6883  	// result: (RORIW [int64(int8(-val)&31)] x)
  6884  	for {
  6885  		x := v_0
  6886  		if v_1.Op != OpRISCV64MOVDconst {
  6887  			break
  6888  		}
  6889  		val := auxIntToInt64(v_1.AuxInt)
  6890  		v.reset(OpRISCV64RORIW)
  6891  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31))
  6892  		v.AddArg(x)
  6893  		return true
  6894  	}
  6895  	// match: (ROLW x (NEG y))
  6896  	// result: (RORW x y)
  6897  	for {
  6898  		x := v_0
  6899  		if v_1.Op != OpRISCV64NEG {
  6900  			break
  6901  		}
  6902  		y := v_1.Args[0]
  6903  		v.reset(OpRISCV64RORW)
  6904  		v.AddArg2(x, y)
  6905  		return true
  6906  	}
  6907  	return false
  6908  }
  6909  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  6910  	v_1 := v.Args[1]
  6911  	v_0 := v.Args[0]
  6912  	// match: (ROR x (MOVDconst [val]))
  6913  	// result: (RORI [int64(val&63)] x)
  6914  	for {
  6915  		x := v_0
  6916  		if v_1.Op != OpRISCV64MOVDconst {
  6917  			break
  6918  		}
  6919  		val := auxIntToInt64(v_1.AuxInt)
  6920  		v.reset(OpRISCV64RORI)
  6921  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6922  		v.AddArg(x)
  6923  		return true
  6924  	}
  6925  	return false
  6926  }
  6927  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  6928  	v_1 := v.Args[1]
  6929  	v_0 := v.Args[0]
  6930  	// match: (RORW x (MOVDconst [val]))
  6931  	// result: (RORIW [int64(val&31)] x)
  6932  	for {
  6933  		x := v_0
  6934  		if v_1.Op != OpRISCV64MOVDconst {
  6935  			break
  6936  		}
  6937  		val := auxIntToInt64(v_1.AuxInt)
  6938  		v.reset(OpRISCV64RORIW)
  6939  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6940  		v.AddArg(x)
  6941  		return true
  6942  	}
  6943  	return false
  6944  }
  6945  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  6946  	v_0 := v.Args[0]
  6947  	// match: (SEQZ (NEG x))
  6948  	// result: (SEQZ x)
  6949  	for {
  6950  		if v_0.Op != OpRISCV64NEG {
  6951  			break
  6952  		}
  6953  		x := v_0.Args[0]
  6954  		v.reset(OpRISCV64SEQZ)
  6955  		v.AddArg(x)
  6956  		return true
  6957  	}
  6958  	// match: (SEQZ (SEQZ x))
  6959  	// result: (SNEZ x)
  6960  	for {
  6961  		if v_0.Op != OpRISCV64SEQZ {
  6962  			break
  6963  		}
  6964  		x := v_0.Args[0]
  6965  		v.reset(OpRISCV64SNEZ)
  6966  		v.AddArg(x)
  6967  		return true
  6968  	}
  6969  	// match: (SEQZ (SNEZ x))
  6970  	// result: (SEQZ x)
  6971  	for {
  6972  		if v_0.Op != OpRISCV64SNEZ {
  6973  			break
  6974  		}
  6975  		x := v_0.Args[0]
  6976  		v.reset(OpRISCV64SEQZ)
  6977  		v.AddArg(x)
  6978  		return true
  6979  	}
  6980  	return false
  6981  }
  6982  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  6983  	v_1 := v.Args[1]
  6984  	v_0 := v.Args[0]
  6985  	// match: (SLL x (MOVDconst [val]))
  6986  	// result: (SLLI [int64(val&63)] x)
  6987  	for {
  6988  		x := v_0
  6989  		if v_1.Op != OpRISCV64MOVDconst {
  6990  			break
  6991  		}
  6992  		val := auxIntToInt64(v_1.AuxInt)
  6993  		v.reset(OpRISCV64SLLI)
  6994  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6995  		v.AddArg(x)
  6996  		return true
  6997  	}
  6998  	return false
  6999  }
  7000  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  7001  	v_0 := v.Args[0]
  7002  	// match: (SLLI [x] (MOVDconst [y]))
  7003  	// cond: is32Bit(y << uint32(x))
  7004  	// result: (MOVDconst [y << uint32(x)])
  7005  	for {
  7006  		x := auxIntToInt64(v.AuxInt)
  7007  		if v_0.Op != OpRISCV64MOVDconst {
  7008  			break
  7009  		}
  7010  		y := auxIntToInt64(v_0.AuxInt)
  7011  		if !(is32Bit(y << uint32(x))) {
  7012  			break
  7013  		}
  7014  		v.reset(OpRISCV64MOVDconst)
  7015  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  7016  		return true
  7017  	}
  7018  	return false
  7019  }
  7020  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  7021  	v_1 := v.Args[1]
  7022  	v_0 := v.Args[0]
  7023  	// match: (SLLW x (MOVDconst [val]))
  7024  	// result: (SLLIW [int64(val&31)] x)
  7025  	for {
  7026  		x := v_0
  7027  		if v_1.Op != OpRISCV64MOVDconst {
  7028  			break
  7029  		}
  7030  		val := auxIntToInt64(v_1.AuxInt)
  7031  		v.reset(OpRISCV64SLLIW)
  7032  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  7033  		v.AddArg(x)
  7034  		return true
  7035  	}
  7036  	return false
  7037  }
  7038  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  7039  	v_1 := v.Args[1]
  7040  	v_0 := v.Args[0]
  7041  	// match: (SLT x (MOVDconst [val]))
  7042  	// cond: val >= -2048 && val <= 2047
  7043  	// result: (SLTI [val] x)
  7044  	for {
  7045  		x := v_0
  7046  		if v_1.Op != OpRISCV64MOVDconst {
  7047  			break
  7048  		}
  7049  		val := auxIntToInt64(v_1.AuxInt)
  7050  		if !(val >= -2048 && val <= 2047) {
  7051  			break
  7052  		}
  7053  		v.reset(OpRISCV64SLTI)
  7054  		v.AuxInt = int64ToAuxInt(val)
  7055  		v.AddArg(x)
  7056  		return true
  7057  	}
  7058  	// match: (SLT x x)
  7059  	// result: (MOVDconst [0])
  7060  	for {
  7061  		x := v_0
  7062  		if x != v_1 {
  7063  			break
  7064  		}
  7065  		v.reset(OpRISCV64MOVDconst)
  7066  		v.AuxInt = int64ToAuxInt(0)
  7067  		return true
  7068  	}
  7069  	return false
  7070  }
  7071  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  7072  	v_0 := v.Args[0]
  7073  	// match: (SLTI [x] (MOVDconst [y]))
  7074  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  7075  	for {
  7076  		x := auxIntToInt64(v.AuxInt)
  7077  		if v_0.Op != OpRISCV64MOVDconst {
  7078  			break
  7079  		}
  7080  		y := auxIntToInt64(v_0.AuxInt)
  7081  		v.reset(OpRISCV64MOVDconst)
  7082  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  7083  		return true
  7084  	}
  7085  	// match: (SLTI [x] (ANDI [y] _))
  7086  	// cond: y >= 0 && int64(y) < int64(x)
  7087  	// result: (MOVDconst [1])
  7088  	for {
  7089  		x := auxIntToInt64(v.AuxInt)
  7090  		if v_0.Op != OpRISCV64ANDI {
  7091  			break
  7092  		}
  7093  		y := auxIntToInt64(v_0.AuxInt)
  7094  		if !(y >= 0 && int64(y) < int64(x)) {
  7095  			break
  7096  		}
  7097  		v.reset(OpRISCV64MOVDconst)
  7098  		v.AuxInt = int64ToAuxInt(1)
  7099  		return true
  7100  	}
  7101  	// match: (SLTI [x] (ORI [y] _))
  7102  	// cond: y >= 0 && int64(y) >= int64(x)
  7103  	// result: (MOVDconst [0])
  7104  	for {
  7105  		x := auxIntToInt64(v.AuxInt)
  7106  		if v_0.Op != OpRISCV64ORI {
  7107  			break
  7108  		}
  7109  		y := auxIntToInt64(v_0.AuxInt)
  7110  		if !(y >= 0 && int64(y) >= int64(x)) {
  7111  			break
  7112  		}
  7113  		v.reset(OpRISCV64MOVDconst)
  7114  		v.AuxInt = int64ToAuxInt(0)
  7115  		return true
  7116  	}
  7117  	return false
  7118  }
  7119  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  7120  	v_0 := v.Args[0]
  7121  	// match: (SLTIU [x] (MOVDconst [y]))
  7122  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  7123  	for {
  7124  		x := auxIntToInt64(v.AuxInt)
  7125  		if v_0.Op != OpRISCV64MOVDconst {
  7126  			break
  7127  		}
  7128  		y := auxIntToInt64(v_0.AuxInt)
  7129  		v.reset(OpRISCV64MOVDconst)
  7130  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  7131  		return true
  7132  	}
  7133  	// match: (SLTIU [x] (ANDI [y] _))
  7134  	// cond: y >= 0 && uint64(y) < uint64(x)
  7135  	// result: (MOVDconst [1])
  7136  	for {
  7137  		x := auxIntToInt64(v.AuxInt)
  7138  		if v_0.Op != OpRISCV64ANDI {
  7139  			break
  7140  		}
  7141  		y := auxIntToInt64(v_0.AuxInt)
  7142  		if !(y >= 0 && uint64(y) < uint64(x)) {
  7143  			break
  7144  		}
  7145  		v.reset(OpRISCV64MOVDconst)
  7146  		v.AuxInt = int64ToAuxInt(1)
  7147  		return true
  7148  	}
  7149  	// match: (SLTIU [x] (ORI [y] _))
  7150  	// cond: y >= 0 && uint64(y) >= uint64(x)
  7151  	// result: (MOVDconst [0])
  7152  	for {
  7153  		x := auxIntToInt64(v.AuxInt)
  7154  		if v_0.Op != OpRISCV64ORI {
  7155  			break
  7156  		}
  7157  		y := auxIntToInt64(v_0.AuxInt)
  7158  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  7159  			break
  7160  		}
  7161  		v.reset(OpRISCV64MOVDconst)
  7162  		v.AuxInt = int64ToAuxInt(0)
  7163  		return true
  7164  	}
  7165  	return false
  7166  }
  7167  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  7168  	v_1 := v.Args[1]
  7169  	v_0 := v.Args[0]
  7170  	// match: (SLTU x (MOVDconst [val]))
  7171  	// cond: val >= -2048 && val <= 2047
  7172  	// result: (SLTIU [val] x)
  7173  	for {
  7174  		x := v_0
  7175  		if v_1.Op != OpRISCV64MOVDconst {
  7176  			break
  7177  		}
  7178  		val := auxIntToInt64(v_1.AuxInt)
  7179  		if !(val >= -2048 && val <= 2047) {
  7180  			break
  7181  		}
  7182  		v.reset(OpRISCV64SLTIU)
  7183  		v.AuxInt = int64ToAuxInt(val)
  7184  		v.AddArg(x)
  7185  		return true
  7186  	}
  7187  	// match: (SLTU x x)
  7188  	// result: (MOVDconst [0])
  7189  	for {
  7190  		x := v_0
  7191  		if x != v_1 {
  7192  			break
  7193  		}
  7194  		v.reset(OpRISCV64MOVDconst)
  7195  		v.AuxInt = int64ToAuxInt(0)
  7196  		return true
  7197  	}
  7198  	return false
  7199  }
  7200  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  7201  	v_0 := v.Args[0]
  7202  	// match: (SNEZ (NEG x))
  7203  	// result: (SNEZ x)
  7204  	for {
  7205  		if v_0.Op != OpRISCV64NEG {
  7206  			break
  7207  		}
  7208  		x := v_0.Args[0]
  7209  		v.reset(OpRISCV64SNEZ)
  7210  		v.AddArg(x)
  7211  		return true
  7212  	}
  7213  	// match: (SNEZ (SEQZ x))
  7214  	// result: (SEQZ x)
  7215  	for {
  7216  		if v_0.Op != OpRISCV64SEQZ {
  7217  			break
  7218  		}
  7219  		x := v_0.Args[0]
  7220  		v.reset(OpRISCV64SEQZ)
  7221  		v.AddArg(x)
  7222  		return true
  7223  	}
  7224  	// match: (SNEZ (SNEZ x))
  7225  	// result: (SNEZ x)
  7226  	for {
  7227  		if v_0.Op != OpRISCV64SNEZ {
  7228  			break
  7229  		}
  7230  		x := v_0.Args[0]
  7231  		v.reset(OpRISCV64SNEZ)
  7232  		v.AddArg(x)
  7233  		return true
  7234  	}
  7235  	return false
  7236  }
  7237  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  7238  	v_1 := v.Args[1]
  7239  	v_0 := v.Args[0]
  7240  	// match: (SRA x (MOVDconst [val]))
  7241  	// result: (SRAI [int64(val&63)] x)
  7242  	for {
  7243  		x := v_0
  7244  		if v_1.Op != OpRISCV64MOVDconst {
  7245  			break
  7246  		}
  7247  		val := auxIntToInt64(v_1.AuxInt)
  7248  		v.reset(OpRISCV64SRAI)
  7249  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  7250  		v.AddArg(x)
  7251  		return true
  7252  	}
  7253  	return false
  7254  }
  7255  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  7256  	v_0 := v.Args[0]
  7257  	b := v.Block
  7258  	// match: (SRAI <t> [x] (MOVWreg y))
  7259  	// cond: x >= 0 && x <= 31
  7260  	// result: (SRAIW <t> [int64(x)] y)
  7261  	for {
  7262  		t := v.Type
  7263  		x := auxIntToInt64(v.AuxInt)
  7264  		if v_0.Op != OpRISCV64MOVWreg {
  7265  			break
  7266  		}
  7267  		y := v_0.Args[0]
  7268  		if !(x >= 0 && x <= 31) {
  7269  			break
  7270  		}
  7271  		v.reset(OpRISCV64SRAIW)
  7272  		v.Type = t
  7273  		v.AuxInt = int64ToAuxInt(int64(x))
  7274  		v.AddArg(y)
  7275  		return true
  7276  	}
  7277  	// match: (SRAI <t> [x] (MOVBreg y))
  7278  	// cond: x >= 8
  7279  	// result: (SRAI [63] (SLLI <t> [56] y))
  7280  	for {
  7281  		t := v.Type
  7282  		x := auxIntToInt64(v.AuxInt)
  7283  		if v_0.Op != OpRISCV64MOVBreg {
  7284  			break
  7285  		}
  7286  		y := v_0.Args[0]
  7287  		if !(x >= 8) {
  7288  			break
  7289  		}
  7290  		v.reset(OpRISCV64SRAI)
  7291  		v.AuxInt = int64ToAuxInt(63)
  7292  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  7293  		v0.AuxInt = int64ToAuxInt(56)
  7294  		v0.AddArg(y)
  7295  		v.AddArg(v0)
  7296  		return true
  7297  	}
  7298  	// match: (SRAI <t> [x] (MOVHreg y))
  7299  	// cond: x >= 16
  7300  	// result: (SRAI [63] (SLLI <t> [48] y))
  7301  	for {
  7302  		t := v.Type
  7303  		x := auxIntToInt64(v.AuxInt)
  7304  		if v_0.Op != OpRISCV64MOVHreg {
  7305  			break
  7306  		}
  7307  		y := v_0.Args[0]
  7308  		if !(x >= 16) {
  7309  			break
  7310  		}
  7311  		v.reset(OpRISCV64SRAI)
  7312  		v.AuxInt = int64ToAuxInt(63)
  7313  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  7314  		v0.AuxInt = int64ToAuxInt(48)
  7315  		v0.AddArg(y)
  7316  		v.AddArg(v0)
  7317  		return true
  7318  	}
  7319  	// match: (SRAI <t> [x] (MOVWreg y))
  7320  	// cond: x >= 32
  7321  	// result: (SRAIW [31] y)
  7322  	for {
  7323  		x := auxIntToInt64(v.AuxInt)
  7324  		if v_0.Op != OpRISCV64MOVWreg {
  7325  			break
  7326  		}
  7327  		y := v_0.Args[0]
  7328  		if !(x >= 32) {
  7329  			break
  7330  		}
  7331  		v.reset(OpRISCV64SRAIW)
  7332  		v.AuxInt = int64ToAuxInt(31)
  7333  		v.AddArg(y)
  7334  		return true
  7335  	}
  7336  	// match: (SRAI [x] (MOVDconst [y]))
  7337  	// result: (MOVDconst [int64(y) >> uint32(x)])
  7338  	for {
  7339  		x := auxIntToInt64(v.AuxInt)
  7340  		if v_0.Op != OpRISCV64MOVDconst {
  7341  			break
  7342  		}
  7343  		y := auxIntToInt64(v_0.AuxInt)
  7344  		v.reset(OpRISCV64MOVDconst)
  7345  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  7346  		return true
  7347  	}
  7348  	return false
  7349  }
  7350  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  7351  	v_1 := v.Args[1]
  7352  	v_0 := v.Args[0]
  7353  	// match: (SRAW x (MOVDconst [val]))
  7354  	// result: (SRAIW [int64(val&31)] x)
  7355  	for {
  7356  		x := v_0
  7357  		if v_1.Op != OpRISCV64MOVDconst {
  7358  			break
  7359  		}
  7360  		val := auxIntToInt64(v_1.AuxInt)
  7361  		v.reset(OpRISCV64SRAIW)
  7362  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  7363  		v.AddArg(x)
  7364  		return true
  7365  	}
  7366  	return false
  7367  }
  7368  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  7369  	v_1 := v.Args[1]
  7370  	v_0 := v.Args[0]
  7371  	// match: (SRL x (MOVDconst [val]))
  7372  	// result: (SRLI [int64(val&63)] x)
  7373  	for {
  7374  		x := v_0
  7375  		if v_1.Op != OpRISCV64MOVDconst {
  7376  			break
  7377  		}
  7378  		val := auxIntToInt64(v_1.AuxInt)
  7379  		v.reset(OpRISCV64SRLI)
  7380  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  7381  		v.AddArg(x)
  7382  		return true
  7383  	}
  7384  	return false
  7385  }
  7386  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  7387  	v_0 := v.Args[0]
  7388  	// match: (SRLI <t> [x] (MOVWUreg y))
  7389  	// cond: x >= 0 && x <= 31
  7390  	// result: (SRLIW <t> [int64(x)] y)
  7391  	for {
  7392  		t := v.Type
  7393  		x := auxIntToInt64(v.AuxInt)
  7394  		if v_0.Op != OpRISCV64MOVWUreg {
  7395  			break
  7396  		}
  7397  		y := v_0.Args[0]
  7398  		if !(x >= 0 && x <= 31) {
  7399  			break
  7400  		}
  7401  		v.reset(OpRISCV64SRLIW)
  7402  		v.Type = t
  7403  		v.AuxInt = int64ToAuxInt(int64(x))
  7404  		v.AddArg(y)
  7405  		return true
  7406  	}
  7407  	// match: (SRLI <t> [x] (MOVBUreg y))
  7408  	// cond: x >= 8
  7409  	// result: (MOVDconst <t> [0])
  7410  	for {
  7411  		t := v.Type
  7412  		x := auxIntToInt64(v.AuxInt)
  7413  		if v_0.Op != OpRISCV64MOVBUreg {
  7414  			break
  7415  		}
  7416  		if !(x >= 8) {
  7417  			break
  7418  		}
  7419  		v.reset(OpRISCV64MOVDconst)
  7420  		v.Type = t
  7421  		v.AuxInt = int64ToAuxInt(0)
  7422  		return true
  7423  	}
  7424  	// match: (SRLI <t> [x] (MOVHUreg y))
  7425  	// cond: x >= 16
  7426  	// result: (MOVDconst <t> [0])
  7427  	for {
  7428  		t := v.Type
  7429  		x := auxIntToInt64(v.AuxInt)
  7430  		if v_0.Op != OpRISCV64MOVHUreg {
  7431  			break
  7432  		}
  7433  		if !(x >= 16) {
  7434  			break
  7435  		}
  7436  		v.reset(OpRISCV64MOVDconst)
  7437  		v.Type = t
  7438  		v.AuxInt = int64ToAuxInt(0)
  7439  		return true
  7440  	}
  7441  	// match: (SRLI <t> [x] (MOVWUreg y))
  7442  	// cond: x >= 32
  7443  	// result: (MOVDconst <t> [0])
  7444  	for {
  7445  		t := v.Type
  7446  		x := auxIntToInt64(v.AuxInt)
  7447  		if v_0.Op != OpRISCV64MOVWUreg {
  7448  			break
  7449  		}
  7450  		if !(x >= 32) {
  7451  			break
  7452  		}
  7453  		v.reset(OpRISCV64MOVDconst)
  7454  		v.Type = t
  7455  		v.AuxInt = int64ToAuxInt(0)
  7456  		return true
  7457  	}
  7458  	// match: (SRLI [x] (MOVDconst [y]))
  7459  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  7460  	for {
  7461  		x := auxIntToInt64(v.AuxInt)
  7462  		if v_0.Op != OpRISCV64MOVDconst {
  7463  			break
  7464  		}
  7465  		y := auxIntToInt64(v_0.AuxInt)
  7466  		v.reset(OpRISCV64MOVDconst)
  7467  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  7468  		return true
  7469  	}
  7470  	return false
  7471  }
  7472  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  7473  	v_1 := v.Args[1]
  7474  	v_0 := v.Args[0]
  7475  	// match: (SRLW x (MOVDconst [val]))
  7476  	// result: (SRLIW [int64(val&31)] x)
  7477  	for {
  7478  		x := v_0
  7479  		if v_1.Op != OpRISCV64MOVDconst {
  7480  			break
  7481  		}
  7482  		val := auxIntToInt64(v_1.AuxInt)
  7483  		v.reset(OpRISCV64SRLIW)
  7484  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  7485  		v.AddArg(x)
  7486  		return true
  7487  	}
  7488  	return false
  7489  }
  7490  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  7491  	v_1 := v.Args[1]
  7492  	v_0 := v.Args[0]
  7493  	b := v.Block
  7494  	// match: (SUB x (NEG y))
  7495  	// result: (ADD x y)
  7496  	for {
  7497  		x := v_0
  7498  		if v_1.Op != OpRISCV64NEG {
  7499  			break
  7500  		}
  7501  		y := v_1.Args[0]
  7502  		v.reset(OpRISCV64ADD)
  7503  		v.AddArg2(x, y)
  7504  		return true
  7505  	}
  7506  	// match: (SUB x x)
  7507  	// result: (MOVDconst [0])
  7508  	for {
  7509  		x := v_0
  7510  		if x != v_1 {
  7511  			break
  7512  		}
  7513  		v.reset(OpRISCV64MOVDconst)
  7514  		v.AuxInt = int64ToAuxInt(0)
  7515  		return true
  7516  	}
  7517  	// match: (SUB x (MOVDconst [val]))
  7518  	// cond: is32Bit(-val)
  7519  	// result: (ADDI [-val] x)
  7520  	for {
  7521  		x := v_0
  7522  		if v_1.Op != OpRISCV64MOVDconst {
  7523  			break
  7524  		}
  7525  		val := auxIntToInt64(v_1.AuxInt)
  7526  		if !(is32Bit(-val)) {
  7527  			break
  7528  		}
  7529  		v.reset(OpRISCV64ADDI)
  7530  		v.AuxInt = int64ToAuxInt(-val)
  7531  		v.AddArg(x)
  7532  		return true
  7533  	}
  7534  	// match: (SUB <t> (MOVDconst [val]) y)
  7535  	// cond: is32Bit(-val)
  7536  	// result: (NEG (ADDI <t> [-val] y))
  7537  	for {
  7538  		t := v.Type
  7539  		if v_0.Op != OpRISCV64MOVDconst {
  7540  			break
  7541  		}
  7542  		val := auxIntToInt64(v_0.AuxInt)
  7543  		y := v_1
  7544  		if !(is32Bit(-val)) {
  7545  			break
  7546  		}
  7547  		v.reset(OpRISCV64NEG)
  7548  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  7549  		v0.AuxInt = int64ToAuxInt(-val)
  7550  		v0.AddArg(y)
  7551  		v.AddArg(v0)
  7552  		return true
  7553  	}
  7554  	// match: (SUB x (MOVDconst [0]))
  7555  	// result: x
  7556  	for {
  7557  		x := v_0
  7558  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7559  			break
  7560  		}
  7561  		v.copyOf(x)
  7562  		return true
  7563  	}
  7564  	// match: (SUB (MOVDconst [0]) x)
  7565  	// result: (NEG x)
  7566  	for {
  7567  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7568  			break
  7569  		}
  7570  		x := v_1
  7571  		v.reset(OpRISCV64NEG)
  7572  		v.AddArg(x)
  7573  		return true
  7574  	}
  7575  	return false
  7576  }
  7577  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  7578  	v_1 := v.Args[1]
  7579  	v_0 := v.Args[0]
  7580  	// match: (SUBW x (MOVDconst [0]))
  7581  	// result: (ADDIW [0] x)
  7582  	for {
  7583  		x := v_0
  7584  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7585  			break
  7586  		}
  7587  		v.reset(OpRISCV64ADDIW)
  7588  		v.AuxInt = int64ToAuxInt(0)
  7589  		v.AddArg(x)
  7590  		return true
  7591  	}
  7592  	// match: (SUBW (MOVDconst [0]) x)
  7593  	// result: (NEGW x)
  7594  	for {
  7595  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7596  			break
  7597  		}
  7598  		x := v_1
  7599  		v.reset(OpRISCV64NEGW)
  7600  		v.AddArg(x)
  7601  		return true
  7602  	}
  7603  	return false
  7604  }
  7605  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  7606  	v_1 := v.Args[1]
  7607  	v_0 := v.Args[0]
  7608  	// match: (XOR (MOVDconst [val]) x)
  7609  	// cond: is32Bit(val)
  7610  	// result: (XORI [val] x)
  7611  	for {
  7612  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7613  			if v_0.Op != OpRISCV64MOVDconst {
  7614  				continue
  7615  			}
  7616  			val := auxIntToInt64(v_0.AuxInt)
  7617  			x := v_1
  7618  			if !(is32Bit(val)) {
  7619  				continue
  7620  			}
  7621  			v.reset(OpRISCV64XORI)
  7622  			v.AuxInt = int64ToAuxInt(val)
  7623  			v.AddArg(x)
  7624  			return true
  7625  		}
  7626  		break
  7627  	}
  7628  	// match: (XOR x x)
  7629  	// result: (MOVDconst [0])
  7630  	for {
  7631  		x := v_0
  7632  		if x != v_1 {
  7633  			break
  7634  		}
  7635  		v.reset(OpRISCV64MOVDconst)
  7636  		v.AuxInt = int64ToAuxInt(0)
  7637  		return true
  7638  	}
  7639  	return false
  7640  }
  7641  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  7642  	v_1 := v.Args[1]
  7643  	v_0 := v.Args[0]
  7644  	b := v.Block
  7645  	typ := &b.Func.Config.Types
  7646  	// match: (RotateLeft16 <t> x y)
  7647  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  7648  	for {
  7649  		t := v.Type
  7650  		x := v_0
  7651  		y := v_1
  7652  		v.reset(OpRISCV64OR)
  7653  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7654  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7655  		v1.AuxInt = int64ToAuxInt(15)
  7656  		v1.AddArg(y)
  7657  		v0.AddArg2(x, v1)
  7658  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7659  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7660  		v3.AddArg(x)
  7661  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7662  		v4.AuxInt = int64ToAuxInt(15)
  7663  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7664  		v5.AddArg(y)
  7665  		v4.AddArg(v5)
  7666  		v2.AddArg2(v3, v4)
  7667  		v.AddArg2(v0, v2)
  7668  		return true
  7669  	}
  7670  }
  7671  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  7672  	v_1 := v.Args[1]
  7673  	v_0 := v.Args[0]
  7674  	b := v.Block
  7675  	typ := &b.Func.Config.Types
  7676  	// match: (RotateLeft8 <t> x y)
  7677  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  7678  	for {
  7679  		t := v.Type
  7680  		x := v_0
  7681  		y := v_1
  7682  		v.reset(OpRISCV64OR)
  7683  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7684  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7685  		v1.AuxInt = int64ToAuxInt(7)
  7686  		v1.AddArg(y)
  7687  		v0.AddArg2(x, v1)
  7688  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7689  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7690  		v3.AddArg(x)
  7691  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7692  		v4.AuxInt = int64ToAuxInt(7)
  7693  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7694  		v5.AddArg(y)
  7695  		v4.AddArg(v5)
  7696  		v2.AddArg2(v3, v4)
  7697  		v.AddArg2(v0, v2)
  7698  		return true
  7699  	}
  7700  }
  7701  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  7702  	v_1 := v.Args[1]
  7703  	v_0 := v.Args[0]
  7704  	b := v.Block
  7705  	typ := &b.Func.Config.Types
  7706  	// match: (Rsh16Ux16 <t> x y)
  7707  	// cond: !shiftIsBounded(v)
  7708  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7709  	for {
  7710  		t := v.Type
  7711  		x := v_0
  7712  		y := v_1
  7713  		if !(!shiftIsBounded(v)) {
  7714  			break
  7715  		}
  7716  		v.reset(OpRISCV64AND)
  7717  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7718  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7719  		v1.AddArg(x)
  7720  		v0.AddArg2(v1, y)
  7721  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7722  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7723  		v3.AuxInt = int64ToAuxInt(64)
  7724  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7725  		v4.AddArg(y)
  7726  		v3.AddArg(v4)
  7727  		v2.AddArg(v3)
  7728  		v.AddArg2(v0, v2)
  7729  		return true
  7730  	}
  7731  	// match: (Rsh16Ux16 x y)
  7732  	// cond: shiftIsBounded(v)
  7733  	// result: (SRL (ZeroExt16to64 x) y)
  7734  	for {
  7735  		x := v_0
  7736  		y := v_1
  7737  		if !(shiftIsBounded(v)) {
  7738  			break
  7739  		}
  7740  		v.reset(OpRISCV64SRL)
  7741  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7742  		v0.AddArg(x)
  7743  		v.AddArg2(v0, y)
  7744  		return true
  7745  	}
  7746  	return false
  7747  }
  7748  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  7749  	v_1 := v.Args[1]
  7750  	v_0 := v.Args[0]
  7751  	b := v.Block
  7752  	typ := &b.Func.Config.Types
  7753  	// match: (Rsh16Ux32 <t> x y)
  7754  	// cond: !shiftIsBounded(v)
  7755  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7756  	for {
  7757  		t := v.Type
  7758  		x := v_0
  7759  		y := v_1
  7760  		if !(!shiftIsBounded(v)) {
  7761  			break
  7762  		}
  7763  		v.reset(OpRISCV64AND)
  7764  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7765  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7766  		v1.AddArg(x)
  7767  		v0.AddArg2(v1, y)
  7768  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7769  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7770  		v3.AuxInt = int64ToAuxInt(64)
  7771  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7772  		v4.AddArg(y)
  7773  		v3.AddArg(v4)
  7774  		v2.AddArg(v3)
  7775  		v.AddArg2(v0, v2)
  7776  		return true
  7777  	}
  7778  	// match: (Rsh16Ux32 x y)
  7779  	// cond: shiftIsBounded(v)
  7780  	// result: (SRL (ZeroExt16to64 x) y)
  7781  	for {
  7782  		x := v_0
  7783  		y := v_1
  7784  		if !(shiftIsBounded(v)) {
  7785  			break
  7786  		}
  7787  		v.reset(OpRISCV64SRL)
  7788  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7789  		v0.AddArg(x)
  7790  		v.AddArg2(v0, y)
  7791  		return true
  7792  	}
  7793  	return false
  7794  }
  7795  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  7796  	v_1 := v.Args[1]
  7797  	v_0 := v.Args[0]
  7798  	b := v.Block
  7799  	typ := &b.Func.Config.Types
  7800  	// match: (Rsh16Ux64 <t> x y)
  7801  	// cond: !shiftIsBounded(v)
  7802  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  7803  	for {
  7804  		t := v.Type
  7805  		x := v_0
  7806  		y := v_1
  7807  		if !(!shiftIsBounded(v)) {
  7808  			break
  7809  		}
  7810  		v.reset(OpRISCV64AND)
  7811  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7812  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7813  		v1.AddArg(x)
  7814  		v0.AddArg2(v1, y)
  7815  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7816  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7817  		v3.AuxInt = int64ToAuxInt(64)
  7818  		v3.AddArg(y)
  7819  		v2.AddArg(v3)
  7820  		v.AddArg2(v0, v2)
  7821  		return true
  7822  	}
  7823  	// match: (Rsh16Ux64 x y)
  7824  	// cond: shiftIsBounded(v)
  7825  	// result: (SRL (ZeroExt16to64 x) y)
  7826  	for {
  7827  		x := v_0
  7828  		y := v_1
  7829  		if !(shiftIsBounded(v)) {
  7830  			break
  7831  		}
  7832  		v.reset(OpRISCV64SRL)
  7833  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7834  		v0.AddArg(x)
  7835  		v.AddArg2(v0, y)
  7836  		return true
  7837  	}
  7838  	return false
  7839  }
  7840  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  7841  	v_1 := v.Args[1]
  7842  	v_0 := v.Args[0]
  7843  	b := v.Block
  7844  	typ := &b.Func.Config.Types
  7845  	// match: (Rsh16Ux8 <t> x y)
  7846  	// cond: !shiftIsBounded(v)
  7847  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7848  	for {
  7849  		t := v.Type
  7850  		x := v_0
  7851  		y := v_1
  7852  		if !(!shiftIsBounded(v)) {
  7853  			break
  7854  		}
  7855  		v.reset(OpRISCV64AND)
  7856  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7857  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7858  		v1.AddArg(x)
  7859  		v0.AddArg2(v1, y)
  7860  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7861  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7862  		v3.AuxInt = int64ToAuxInt(64)
  7863  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7864  		v4.AddArg(y)
  7865  		v3.AddArg(v4)
  7866  		v2.AddArg(v3)
  7867  		v.AddArg2(v0, v2)
  7868  		return true
  7869  	}
  7870  	// match: (Rsh16Ux8 x y)
  7871  	// cond: shiftIsBounded(v)
  7872  	// result: (SRL (ZeroExt16to64 x) y)
  7873  	for {
  7874  		x := v_0
  7875  		y := v_1
  7876  		if !(shiftIsBounded(v)) {
  7877  			break
  7878  		}
  7879  		v.reset(OpRISCV64SRL)
  7880  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7881  		v0.AddArg(x)
  7882  		v.AddArg2(v0, y)
  7883  		return true
  7884  	}
  7885  	return false
  7886  }
  7887  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  7888  	v_1 := v.Args[1]
  7889  	v_0 := v.Args[0]
  7890  	b := v.Block
  7891  	typ := &b.Func.Config.Types
  7892  	// match: (Rsh16x16 <t> x y)
  7893  	// cond: !shiftIsBounded(v)
  7894  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7895  	for {
  7896  		t := v.Type
  7897  		x := v_0
  7898  		y := v_1
  7899  		if !(!shiftIsBounded(v)) {
  7900  			break
  7901  		}
  7902  		v.reset(OpRISCV64SRA)
  7903  		v.Type = t
  7904  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7905  		v0.AddArg(x)
  7906  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7907  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7908  		v2.AuxInt = int64ToAuxInt(-1)
  7909  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7910  		v3.AuxInt = int64ToAuxInt(64)
  7911  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7912  		v4.AddArg(y)
  7913  		v3.AddArg(v4)
  7914  		v2.AddArg(v3)
  7915  		v1.AddArg2(y, v2)
  7916  		v.AddArg2(v0, v1)
  7917  		return true
  7918  	}
  7919  	// match: (Rsh16x16 x y)
  7920  	// cond: shiftIsBounded(v)
  7921  	// result: (SRA (SignExt16to64 x) y)
  7922  	for {
  7923  		x := v_0
  7924  		y := v_1
  7925  		if !(shiftIsBounded(v)) {
  7926  			break
  7927  		}
  7928  		v.reset(OpRISCV64SRA)
  7929  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7930  		v0.AddArg(x)
  7931  		v.AddArg2(v0, y)
  7932  		return true
  7933  	}
  7934  	return false
  7935  }
  7936  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  7937  	v_1 := v.Args[1]
  7938  	v_0 := v.Args[0]
  7939  	b := v.Block
  7940  	typ := &b.Func.Config.Types
  7941  	// match: (Rsh16x32 <t> x y)
  7942  	// cond: !shiftIsBounded(v)
  7943  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7944  	for {
  7945  		t := v.Type
  7946  		x := v_0
  7947  		y := v_1
  7948  		if !(!shiftIsBounded(v)) {
  7949  			break
  7950  		}
  7951  		v.reset(OpRISCV64SRA)
  7952  		v.Type = t
  7953  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7954  		v0.AddArg(x)
  7955  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7956  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7957  		v2.AuxInt = int64ToAuxInt(-1)
  7958  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7959  		v3.AuxInt = int64ToAuxInt(64)
  7960  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7961  		v4.AddArg(y)
  7962  		v3.AddArg(v4)
  7963  		v2.AddArg(v3)
  7964  		v1.AddArg2(y, v2)
  7965  		v.AddArg2(v0, v1)
  7966  		return true
  7967  	}
  7968  	// match: (Rsh16x32 x y)
  7969  	// cond: shiftIsBounded(v)
  7970  	// result: (SRA (SignExt16to64 x) y)
  7971  	for {
  7972  		x := v_0
  7973  		y := v_1
  7974  		if !(shiftIsBounded(v)) {
  7975  			break
  7976  		}
  7977  		v.reset(OpRISCV64SRA)
  7978  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7979  		v0.AddArg(x)
  7980  		v.AddArg2(v0, y)
  7981  		return true
  7982  	}
  7983  	return false
  7984  }
  7985  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  7986  	v_1 := v.Args[1]
  7987  	v_0 := v.Args[0]
  7988  	b := v.Block
  7989  	typ := &b.Func.Config.Types
  7990  	// match: (Rsh16x64 <t> x y)
  7991  	// cond: !shiftIsBounded(v)
  7992  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7993  	for {
  7994  		t := v.Type
  7995  		x := v_0
  7996  		y := v_1
  7997  		if !(!shiftIsBounded(v)) {
  7998  			break
  7999  		}
  8000  		v.reset(OpRISCV64SRA)
  8001  		v.Type = t
  8002  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8003  		v0.AddArg(x)
  8004  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8005  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8006  		v2.AuxInt = int64ToAuxInt(-1)
  8007  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8008  		v3.AuxInt = int64ToAuxInt(64)
  8009  		v3.AddArg(y)
  8010  		v2.AddArg(v3)
  8011  		v1.AddArg2(y, v2)
  8012  		v.AddArg2(v0, v1)
  8013  		return true
  8014  	}
  8015  	// match: (Rsh16x64 x y)
  8016  	// cond: shiftIsBounded(v)
  8017  	// result: (SRA (SignExt16to64 x) y)
  8018  	for {
  8019  		x := v_0
  8020  		y := v_1
  8021  		if !(shiftIsBounded(v)) {
  8022  			break
  8023  		}
  8024  		v.reset(OpRISCV64SRA)
  8025  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8026  		v0.AddArg(x)
  8027  		v.AddArg2(v0, y)
  8028  		return true
  8029  	}
  8030  	return false
  8031  }
  8032  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  8033  	v_1 := v.Args[1]
  8034  	v_0 := v.Args[0]
  8035  	b := v.Block
  8036  	typ := &b.Func.Config.Types
  8037  	// match: (Rsh16x8 <t> x y)
  8038  	// cond: !shiftIsBounded(v)
  8039  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8040  	for {
  8041  		t := v.Type
  8042  		x := v_0
  8043  		y := v_1
  8044  		if !(!shiftIsBounded(v)) {
  8045  			break
  8046  		}
  8047  		v.reset(OpRISCV64SRA)
  8048  		v.Type = t
  8049  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8050  		v0.AddArg(x)
  8051  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8052  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8053  		v2.AuxInt = int64ToAuxInt(-1)
  8054  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8055  		v3.AuxInt = int64ToAuxInt(64)
  8056  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8057  		v4.AddArg(y)
  8058  		v3.AddArg(v4)
  8059  		v2.AddArg(v3)
  8060  		v1.AddArg2(y, v2)
  8061  		v.AddArg2(v0, v1)
  8062  		return true
  8063  	}
  8064  	// match: (Rsh16x8 x y)
  8065  	// cond: shiftIsBounded(v)
  8066  	// result: (SRA (SignExt16to64 x) y)
  8067  	for {
  8068  		x := v_0
  8069  		y := v_1
  8070  		if !(shiftIsBounded(v)) {
  8071  			break
  8072  		}
  8073  		v.reset(OpRISCV64SRA)
  8074  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8075  		v0.AddArg(x)
  8076  		v.AddArg2(v0, y)
  8077  		return true
  8078  	}
  8079  	return false
  8080  }
  8081  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  8082  	v_1 := v.Args[1]
  8083  	v_0 := v.Args[0]
  8084  	b := v.Block
  8085  	typ := &b.Func.Config.Types
  8086  	// match: (Rsh32Ux16 <t> x y)
  8087  	// cond: !shiftIsBounded(v)
  8088  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  8089  	for {
  8090  		t := v.Type
  8091  		x := v_0
  8092  		y := v_1
  8093  		if !(!shiftIsBounded(v)) {
  8094  			break
  8095  		}
  8096  		v.reset(OpRISCV64AND)
  8097  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8098  		v0.AddArg2(x, y)
  8099  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8100  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8101  		v2.AuxInt = int64ToAuxInt(32)
  8102  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8103  		v3.AddArg(y)
  8104  		v2.AddArg(v3)
  8105  		v1.AddArg(v2)
  8106  		v.AddArg2(v0, v1)
  8107  		return true
  8108  	}
  8109  	// match: (Rsh32Ux16 x y)
  8110  	// cond: shiftIsBounded(v)
  8111  	// result: (SRLW x y)
  8112  	for {
  8113  		x := v_0
  8114  		y := v_1
  8115  		if !(shiftIsBounded(v)) {
  8116  			break
  8117  		}
  8118  		v.reset(OpRISCV64SRLW)
  8119  		v.AddArg2(x, y)
  8120  		return true
  8121  	}
  8122  	return false
  8123  }
  8124  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  8125  	v_1 := v.Args[1]
  8126  	v_0 := v.Args[0]
  8127  	b := v.Block
  8128  	typ := &b.Func.Config.Types
  8129  	// match: (Rsh32Ux32 <t> x y)
  8130  	// cond: !shiftIsBounded(v)
  8131  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  8132  	for {
  8133  		t := v.Type
  8134  		x := v_0
  8135  		y := v_1
  8136  		if !(!shiftIsBounded(v)) {
  8137  			break
  8138  		}
  8139  		v.reset(OpRISCV64AND)
  8140  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8141  		v0.AddArg2(x, y)
  8142  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8143  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8144  		v2.AuxInt = int64ToAuxInt(32)
  8145  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8146  		v3.AddArg(y)
  8147  		v2.AddArg(v3)
  8148  		v1.AddArg(v2)
  8149  		v.AddArg2(v0, v1)
  8150  		return true
  8151  	}
  8152  	// match: (Rsh32Ux32 x y)
  8153  	// cond: shiftIsBounded(v)
  8154  	// result: (SRLW x y)
  8155  	for {
  8156  		x := v_0
  8157  		y := v_1
  8158  		if !(shiftIsBounded(v)) {
  8159  			break
  8160  		}
  8161  		v.reset(OpRISCV64SRLW)
  8162  		v.AddArg2(x, y)
  8163  		return true
  8164  	}
  8165  	return false
  8166  }
  8167  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  8168  	v_1 := v.Args[1]
  8169  	v_0 := v.Args[0]
  8170  	b := v.Block
  8171  	// match: (Rsh32Ux64 <t> x y)
  8172  	// cond: !shiftIsBounded(v)
  8173  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  8174  	for {
  8175  		t := v.Type
  8176  		x := v_0
  8177  		y := v_1
  8178  		if !(!shiftIsBounded(v)) {
  8179  			break
  8180  		}
  8181  		v.reset(OpRISCV64AND)
  8182  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8183  		v0.AddArg2(x, y)
  8184  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8185  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8186  		v2.AuxInt = int64ToAuxInt(32)
  8187  		v2.AddArg(y)
  8188  		v1.AddArg(v2)
  8189  		v.AddArg2(v0, v1)
  8190  		return true
  8191  	}
  8192  	// match: (Rsh32Ux64 x y)
  8193  	// cond: shiftIsBounded(v)
  8194  	// result: (SRLW x y)
  8195  	for {
  8196  		x := v_0
  8197  		y := v_1
  8198  		if !(shiftIsBounded(v)) {
  8199  			break
  8200  		}
  8201  		v.reset(OpRISCV64SRLW)
  8202  		v.AddArg2(x, y)
  8203  		return true
  8204  	}
  8205  	return false
  8206  }
  8207  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  8208  	v_1 := v.Args[1]
  8209  	v_0 := v.Args[0]
  8210  	b := v.Block
  8211  	typ := &b.Func.Config.Types
  8212  	// match: (Rsh32Ux8 <t> x y)
  8213  	// cond: !shiftIsBounded(v)
  8214  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  8215  	for {
  8216  		t := v.Type
  8217  		x := v_0
  8218  		y := v_1
  8219  		if !(!shiftIsBounded(v)) {
  8220  			break
  8221  		}
  8222  		v.reset(OpRISCV64AND)
  8223  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8224  		v0.AddArg2(x, y)
  8225  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8226  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8227  		v2.AuxInt = int64ToAuxInt(32)
  8228  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8229  		v3.AddArg(y)
  8230  		v2.AddArg(v3)
  8231  		v1.AddArg(v2)
  8232  		v.AddArg2(v0, v1)
  8233  		return true
  8234  	}
  8235  	// match: (Rsh32Ux8 x y)
  8236  	// cond: shiftIsBounded(v)
  8237  	// result: (SRLW x y)
  8238  	for {
  8239  		x := v_0
  8240  		y := v_1
  8241  		if !(shiftIsBounded(v)) {
  8242  			break
  8243  		}
  8244  		v.reset(OpRISCV64SRLW)
  8245  		v.AddArg2(x, y)
  8246  		return true
  8247  	}
  8248  	return false
  8249  }
  8250  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  8251  	v_1 := v.Args[1]
  8252  	v_0 := v.Args[0]
  8253  	b := v.Block
  8254  	typ := &b.Func.Config.Types
  8255  	// match: (Rsh32x16 <t> x y)
  8256  	// cond: !shiftIsBounded(v)
  8257  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  8258  	for {
  8259  		t := v.Type
  8260  		x := v_0
  8261  		y := v_1
  8262  		if !(!shiftIsBounded(v)) {
  8263  			break
  8264  		}
  8265  		v.reset(OpRISCV64SRAW)
  8266  		v.Type = t
  8267  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8268  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8269  		v1.AuxInt = int64ToAuxInt(-1)
  8270  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8271  		v2.AuxInt = int64ToAuxInt(32)
  8272  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8273  		v3.AddArg(y)
  8274  		v2.AddArg(v3)
  8275  		v1.AddArg(v2)
  8276  		v0.AddArg2(y, v1)
  8277  		v.AddArg2(x, v0)
  8278  		return true
  8279  	}
  8280  	// match: (Rsh32x16 x y)
  8281  	// cond: shiftIsBounded(v)
  8282  	// result: (SRAW x y)
  8283  	for {
  8284  		x := v_0
  8285  		y := v_1
  8286  		if !(shiftIsBounded(v)) {
  8287  			break
  8288  		}
  8289  		v.reset(OpRISCV64SRAW)
  8290  		v.AddArg2(x, y)
  8291  		return true
  8292  	}
  8293  	return false
  8294  }
  8295  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  8296  	v_1 := v.Args[1]
  8297  	v_0 := v.Args[0]
  8298  	b := v.Block
  8299  	typ := &b.Func.Config.Types
  8300  	// match: (Rsh32x32 <t> x y)
  8301  	// cond: !shiftIsBounded(v)
  8302  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  8303  	for {
  8304  		t := v.Type
  8305  		x := v_0
  8306  		y := v_1
  8307  		if !(!shiftIsBounded(v)) {
  8308  			break
  8309  		}
  8310  		v.reset(OpRISCV64SRAW)
  8311  		v.Type = t
  8312  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8313  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8314  		v1.AuxInt = int64ToAuxInt(-1)
  8315  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8316  		v2.AuxInt = int64ToAuxInt(32)
  8317  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8318  		v3.AddArg(y)
  8319  		v2.AddArg(v3)
  8320  		v1.AddArg(v2)
  8321  		v0.AddArg2(y, v1)
  8322  		v.AddArg2(x, v0)
  8323  		return true
  8324  	}
  8325  	// match: (Rsh32x32 x y)
  8326  	// cond: shiftIsBounded(v)
  8327  	// result: (SRAW x y)
  8328  	for {
  8329  		x := v_0
  8330  		y := v_1
  8331  		if !(shiftIsBounded(v)) {
  8332  			break
  8333  		}
  8334  		v.reset(OpRISCV64SRAW)
  8335  		v.AddArg2(x, y)
  8336  		return true
  8337  	}
  8338  	return false
  8339  }
  8340  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  8341  	v_1 := v.Args[1]
  8342  	v_0 := v.Args[0]
  8343  	b := v.Block
  8344  	// match: (Rsh32x64 <t> x y)
  8345  	// cond: !shiftIsBounded(v)
  8346  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  8347  	for {
  8348  		t := v.Type
  8349  		x := v_0
  8350  		y := v_1
  8351  		if !(!shiftIsBounded(v)) {
  8352  			break
  8353  		}
  8354  		v.reset(OpRISCV64SRAW)
  8355  		v.Type = t
  8356  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8357  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8358  		v1.AuxInt = int64ToAuxInt(-1)
  8359  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8360  		v2.AuxInt = int64ToAuxInt(32)
  8361  		v2.AddArg(y)
  8362  		v1.AddArg(v2)
  8363  		v0.AddArg2(y, v1)
  8364  		v.AddArg2(x, v0)
  8365  		return true
  8366  	}
  8367  	// match: (Rsh32x64 x y)
  8368  	// cond: shiftIsBounded(v)
  8369  	// result: (SRAW x y)
  8370  	for {
  8371  		x := v_0
  8372  		y := v_1
  8373  		if !(shiftIsBounded(v)) {
  8374  			break
  8375  		}
  8376  		v.reset(OpRISCV64SRAW)
  8377  		v.AddArg2(x, y)
  8378  		return true
  8379  	}
  8380  	return false
  8381  }
  8382  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  8383  	v_1 := v.Args[1]
  8384  	v_0 := v.Args[0]
  8385  	b := v.Block
  8386  	typ := &b.Func.Config.Types
  8387  	// match: (Rsh32x8 <t> x y)
  8388  	// cond: !shiftIsBounded(v)
  8389  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  8390  	for {
  8391  		t := v.Type
  8392  		x := v_0
  8393  		y := v_1
  8394  		if !(!shiftIsBounded(v)) {
  8395  			break
  8396  		}
  8397  		v.reset(OpRISCV64SRAW)
  8398  		v.Type = t
  8399  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8400  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8401  		v1.AuxInt = int64ToAuxInt(-1)
  8402  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8403  		v2.AuxInt = int64ToAuxInt(32)
  8404  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8405  		v3.AddArg(y)
  8406  		v2.AddArg(v3)
  8407  		v1.AddArg(v2)
  8408  		v0.AddArg2(y, v1)
  8409  		v.AddArg2(x, v0)
  8410  		return true
  8411  	}
  8412  	// match: (Rsh32x8 x y)
  8413  	// cond: shiftIsBounded(v)
  8414  	// result: (SRAW x y)
  8415  	for {
  8416  		x := v_0
  8417  		y := v_1
  8418  		if !(shiftIsBounded(v)) {
  8419  			break
  8420  		}
  8421  		v.reset(OpRISCV64SRAW)
  8422  		v.AddArg2(x, y)
  8423  		return true
  8424  	}
  8425  	return false
  8426  }
  8427  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  8428  	v_1 := v.Args[1]
  8429  	v_0 := v.Args[0]
  8430  	b := v.Block
  8431  	typ := &b.Func.Config.Types
  8432  	// match: (Rsh64Ux16 <t> x y)
  8433  	// cond: !shiftIsBounded(v)
  8434  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8435  	for {
  8436  		t := v.Type
  8437  		x := v_0
  8438  		y := v_1
  8439  		if !(!shiftIsBounded(v)) {
  8440  			break
  8441  		}
  8442  		v.reset(OpRISCV64AND)
  8443  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8444  		v0.AddArg2(x, y)
  8445  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8446  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8447  		v2.AuxInt = int64ToAuxInt(64)
  8448  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8449  		v3.AddArg(y)
  8450  		v2.AddArg(v3)
  8451  		v1.AddArg(v2)
  8452  		v.AddArg2(v0, v1)
  8453  		return true
  8454  	}
  8455  	// match: (Rsh64Ux16 x y)
  8456  	// cond: shiftIsBounded(v)
  8457  	// result: (SRL x y)
  8458  	for {
  8459  		x := v_0
  8460  		y := v_1
  8461  		if !(shiftIsBounded(v)) {
  8462  			break
  8463  		}
  8464  		v.reset(OpRISCV64SRL)
  8465  		v.AddArg2(x, y)
  8466  		return true
  8467  	}
  8468  	return false
  8469  }
  8470  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  8471  	v_1 := v.Args[1]
  8472  	v_0 := v.Args[0]
  8473  	b := v.Block
  8474  	typ := &b.Func.Config.Types
  8475  	// match: (Rsh64Ux32 <t> x y)
  8476  	// cond: !shiftIsBounded(v)
  8477  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8478  	for {
  8479  		t := v.Type
  8480  		x := v_0
  8481  		y := v_1
  8482  		if !(!shiftIsBounded(v)) {
  8483  			break
  8484  		}
  8485  		v.reset(OpRISCV64AND)
  8486  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8487  		v0.AddArg2(x, y)
  8488  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8489  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8490  		v2.AuxInt = int64ToAuxInt(64)
  8491  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8492  		v3.AddArg(y)
  8493  		v2.AddArg(v3)
  8494  		v1.AddArg(v2)
  8495  		v.AddArg2(v0, v1)
  8496  		return true
  8497  	}
  8498  	// match: (Rsh64Ux32 x y)
  8499  	// cond: shiftIsBounded(v)
  8500  	// result: (SRL x y)
  8501  	for {
  8502  		x := v_0
  8503  		y := v_1
  8504  		if !(shiftIsBounded(v)) {
  8505  			break
  8506  		}
  8507  		v.reset(OpRISCV64SRL)
  8508  		v.AddArg2(x, y)
  8509  		return true
  8510  	}
  8511  	return false
  8512  }
  8513  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  8514  	v_1 := v.Args[1]
  8515  	v_0 := v.Args[0]
  8516  	b := v.Block
  8517  	// match: (Rsh64Ux64 <t> x y)
  8518  	// cond: !shiftIsBounded(v)
  8519  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  8520  	for {
  8521  		t := v.Type
  8522  		x := v_0
  8523  		y := v_1
  8524  		if !(!shiftIsBounded(v)) {
  8525  			break
  8526  		}
  8527  		v.reset(OpRISCV64AND)
  8528  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8529  		v0.AddArg2(x, y)
  8530  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8531  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8532  		v2.AuxInt = int64ToAuxInt(64)
  8533  		v2.AddArg(y)
  8534  		v1.AddArg(v2)
  8535  		v.AddArg2(v0, v1)
  8536  		return true
  8537  	}
  8538  	// match: (Rsh64Ux64 x y)
  8539  	// cond: shiftIsBounded(v)
  8540  	// result: (SRL x y)
  8541  	for {
  8542  		x := v_0
  8543  		y := v_1
  8544  		if !(shiftIsBounded(v)) {
  8545  			break
  8546  		}
  8547  		v.reset(OpRISCV64SRL)
  8548  		v.AddArg2(x, y)
  8549  		return true
  8550  	}
  8551  	return false
  8552  }
  8553  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  8554  	v_1 := v.Args[1]
  8555  	v_0 := v.Args[0]
  8556  	b := v.Block
  8557  	typ := &b.Func.Config.Types
  8558  	// match: (Rsh64Ux8 <t> x y)
  8559  	// cond: !shiftIsBounded(v)
  8560  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8561  	for {
  8562  		t := v.Type
  8563  		x := v_0
  8564  		y := v_1
  8565  		if !(!shiftIsBounded(v)) {
  8566  			break
  8567  		}
  8568  		v.reset(OpRISCV64AND)
  8569  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8570  		v0.AddArg2(x, y)
  8571  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8572  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8573  		v2.AuxInt = int64ToAuxInt(64)
  8574  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8575  		v3.AddArg(y)
  8576  		v2.AddArg(v3)
  8577  		v1.AddArg(v2)
  8578  		v.AddArg2(v0, v1)
  8579  		return true
  8580  	}
  8581  	// match: (Rsh64Ux8 x y)
  8582  	// cond: shiftIsBounded(v)
  8583  	// result: (SRL x y)
  8584  	for {
  8585  		x := v_0
  8586  		y := v_1
  8587  		if !(shiftIsBounded(v)) {
  8588  			break
  8589  		}
  8590  		v.reset(OpRISCV64SRL)
  8591  		v.AddArg2(x, y)
  8592  		return true
  8593  	}
  8594  	return false
  8595  }
  8596  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  8597  	v_1 := v.Args[1]
  8598  	v_0 := v.Args[0]
  8599  	b := v.Block
  8600  	typ := &b.Func.Config.Types
  8601  	// match: (Rsh64x16 <t> x y)
  8602  	// cond: !shiftIsBounded(v)
  8603  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8604  	for {
  8605  		t := v.Type
  8606  		x := v_0
  8607  		y := v_1
  8608  		if !(!shiftIsBounded(v)) {
  8609  			break
  8610  		}
  8611  		v.reset(OpRISCV64SRA)
  8612  		v.Type = t
  8613  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8614  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8615  		v1.AuxInt = int64ToAuxInt(-1)
  8616  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8617  		v2.AuxInt = int64ToAuxInt(64)
  8618  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8619  		v3.AddArg(y)
  8620  		v2.AddArg(v3)
  8621  		v1.AddArg(v2)
  8622  		v0.AddArg2(y, v1)
  8623  		v.AddArg2(x, v0)
  8624  		return true
  8625  	}
  8626  	// match: (Rsh64x16 x y)
  8627  	// cond: shiftIsBounded(v)
  8628  	// result: (SRA x y)
  8629  	for {
  8630  		x := v_0
  8631  		y := v_1
  8632  		if !(shiftIsBounded(v)) {
  8633  			break
  8634  		}
  8635  		v.reset(OpRISCV64SRA)
  8636  		v.AddArg2(x, y)
  8637  		return true
  8638  	}
  8639  	return false
  8640  }
  8641  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  8642  	v_1 := v.Args[1]
  8643  	v_0 := v.Args[0]
  8644  	b := v.Block
  8645  	typ := &b.Func.Config.Types
  8646  	// match: (Rsh64x32 <t> x y)
  8647  	// cond: !shiftIsBounded(v)
  8648  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8649  	for {
  8650  		t := v.Type
  8651  		x := v_0
  8652  		y := v_1
  8653  		if !(!shiftIsBounded(v)) {
  8654  			break
  8655  		}
  8656  		v.reset(OpRISCV64SRA)
  8657  		v.Type = t
  8658  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8659  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8660  		v1.AuxInt = int64ToAuxInt(-1)
  8661  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8662  		v2.AuxInt = int64ToAuxInt(64)
  8663  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8664  		v3.AddArg(y)
  8665  		v2.AddArg(v3)
  8666  		v1.AddArg(v2)
  8667  		v0.AddArg2(y, v1)
  8668  		v.AddArg2(x, v0)
  8669  		return true
  8670  	}
  8671  	// match: (Rsh64x32 x y)
  8672  	// cond: shiftIsBounded(v)
  8673  	// result: (SRA x y)
  8674  	for {
  8675  		x := v_0
  8676  		y := v_1
  8677  		if !(shiftIsBounded(v)) {
  8678  			break
  8679  		}
  8680  		v.reset(OpRISCV64SRA)
  8681  		v.AddArg2(x, y)
  8682  		return true
  8683  	}
  8684  	return false
  8685  }
  8686  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  8687  	v_1 := v.Args[1]
  8688  	v_0 := v.Args[0]
  8689  	b := v.Block
  8690  	// match: (Rsh64x64 <t> x y)
  8691  	// cond: !shiftIsBounded(v)
  8692  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8693  	for {
  8694  		t := v.Type
  8695  		x := v_0
  8696  		y := v_1
  8697  		if !(!shiftIsBounded(v)) {
  8698  			break
  8699  		}
  8700  		v.reset(OpRISCV64SRA)
  8701  		v.Type = t
  8702  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8703  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8704  		v1.AuxInt = int64ToAuxInt(-1)
  8705  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8706  		v2.AuxInt = int64ToAuxInt(64)
  8707  		v2.AddArg(y)
  8708  		v1.AddArg(v2)
  8709  		v0.AddArg2(y, v1)
  8710  		v.AddArg2(x, v0)
  8711  		return true
  8712  	}
  8713  	// match: (Rsh64x64 x y)
  8714  	// cond: shiftIsBounded(v)
  8715  	// result: (SRA x y)
  8716  	for {
  8717  		x := v_0
  8718  		y := v_1
  8719  		if !(shiftIsBounded(v)) {
  8720  			break
  8721  		}
  8722  		v.reset(OpRISCV64SRA)
  8723  		v.AddArg2(x, y)
  8724  		return true
  8725  	}
  8726  	return false
  8727  }
  8728  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  8729  	v_1 := v.Args[1]
  8730  	v_0 := v.Args[0]
  8731  	b := v.Block
  8732  	typ := &b.Func.Config.Types
  8733  	// match: (Rsh64x8 <t> x y)
  8734  	// cond: !shiftIsBounded(v)
  8735  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8736  	for {
  8737  		t := v.Type
  8738  		x := v_0
  8739  		y := v_1
  8740  		if !(!shiftIsBounded(v)) {
  8741  			break
  8742  		}
  8743  		v.reset(OpRISCV64SRA)
  8744  		v.Type = t
  8745  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8746  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8747  		v1.AuxInt = int64ToAuxInt(-1)
  8748  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8749  		v2.AuxInt = int64ToAuxInt(64)
  8750  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8751  		v3.AddArg(y)
  8752  		v2.AddArg(v3)
  8753  		v1.AddArg(v2)
  8754  		v0.AddArg2(y, v1)
  8755  		v.AddArg2(x, v0)
  8756  		return true
  8757  	}
  8758  	// match: (Rsh64x8 x y)
  8759  	// cond: shiftIsBounded(v)
  8760  	// result: (SRA x y)
  8761  	for {
  8762  		x := v_0
  8763  		y := v_1
  8764  		if !(shiftIsBounded(v)) {
  8765  			break
  8766  		}
  8767  		v.reset(OpRISCV64SRA)
  8768  		v.AddArg2(x, y)
  8769  		return true
  8770  	}
  8771  	return false
  8772  }
  8773  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  8774  	v_1 := v.Args[1]
  8775  	v_0 := v.Args[0]
  8776  	b := v.Block
  8777  	typ := &b.Func.Config.Types
  8778  	// match: (Rsh8Ux16 <t> x y)
  8779  	// cond: !shiftIsBounded(v)
  8780  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8781  	for {
  8782  		t := v.Type
  8783  		x := v_0
  8784  		y := v_1
  8785  		if !(!shiftIsBounded(v)) {
  8786  			break
  8787  		}
  8788  		v.reset(OpRISCV64AND)
  8789  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8790  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8791  		v1.AddArg(x)
  8792  		v0.AddArg2(v1, y)
  8793  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8794  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8795  		v3.AuxInt = int64ToAuxInt(64)
  8796  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8797  		v4.AddArg(y)
  8798  		v3.AddArg(v4)
  8799  		v2.AddArg(v3)
  8800  		v.AddArg2(v0, v2)
  8801  		return true
  8802  	}
  8803  	// match: (Rsh8Ux16 x y)
  8804  	// cond: shiftIsBounded(v)
  8805  	// result: (SRL (ZeroExt8to64 x) y)
  8806  	for {
  8807  		x := v_0
  8808  		y := v_1
  8809  		if !(shiftIsBounded(v)) {
  8810  			break
  8811  		}
  8812  		v.reset(OpRISCV64SRL)
  8813  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8814  		v0.AddArg(x)
  8815  		v.AddArg2(v0, y)
  8816  		return true
  8817  	}
  8818  	return false
  8819  }
  8820  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  8821  	v_1 := v.Args[1]
  8822  	v_0 := v.Args[0]
  8823  	b := v.Block
  8824  	typ := &b.Func.Config.Types
  8825  	// match: (Rsh8Ux32 <t> x y)
  8826  	// cond: !shiftIsBounded(v)
  8827  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8828  	for {
  8829  		t := v.Type
  8830  		x := v_0
  8831  		y := v_1
  8832  		if !(!shiftIsBounded(v)) {
  8833  			break
  8834  		}
  8835  		v.reset(OpRISCV64AND)
  8836  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8837  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8838  		v1.AddArg(x)
  8839  		v0.AddArg2(v1, y)
  8840  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8841  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8842  		v3.AuxInt = int64ToAuxInt(64)
  8843  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8844  		v4.AddArg(y)
  8845  		v3.AddArg(v4)
  8846  		v2.AddArg(v3)
  8847  		v.AddArg2(v0, v2)
  8848  		return true
  8849  	}
  8850  	// match: (Rsh8Ux32 x y)
  8851  	// cond: shiftIsBounded(v)
  8852  	// result: (SRL (ZeroExt8to64 x) y)
  8853  	for {
  8854  		x := v_0
  8855  		y := v_1
  8856  		if !(shiftIsBounded(v)) {
  8857  			break
  8858  		}
  8859  		v.reset(OpRISCV64SRL)
  8860  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8861  		v0.AddArg(x)
  8862  		v.AddArg2(v0, y)
  8863  		return true
  8864  	}
  8865  	return false
  8866  }
  8867  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  8868  	v_1 := v.Args[1]
  8869  	v_0 := v.Args[0]
  8870  	b := v.Block
  8871  	typ := &b.Func.Config.Types
  8872  	// match: (Rsh8Ux64 <t> x y)
  8873  	// cond: !shiftIsBounded(v)
  8874  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  8875  	for {
  8876  		t := v.Type
  8877  		x := v_0
  8878  		y := v_1
  8879  		if !(!shiftIsBounded(v)) {
  8880  			break
  8881  		}
  8882  		v.reset(OpRISCV64AND)
  8883  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8884  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8885  		v1.AddArg(x)
  8886  		v0.AddArg2(v1, y)
  8887  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8888  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8889  		v3.AuxInt = int64ToAuxInt(64)
  8890  		v3.AddArg(y)
  8891  		v2.AddArg(v3)
  8892  		v.AddArg2(v0, v2)
  8893  		return true
  8894  	}
  8895  	// match: (Rsh8Ux64 x y)
  8896  	// cond: shiftIsBounded(v)
  8897  	// result: (SRL (ZeroExt8to64 x) y)
  8898  	for {
  8899  		x := v_0
  8900  		y := v_1
  8901  		if !(shiftIsBounded(v)) {
  8902  			break
  8903  		}
  8904  		v.reset(OpRISCV64SRL)
  8905  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8906  		v0.AddArg(x)
  8907  		v.AddArg2(v0, y)
  8908  		return true
  8909  	}
  8910  	return false
  8911  }
  8912  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  8913  	v_1 := v.Args[1]
  8914  	v_0 := v.Args[0]
  8915  	b := v.Block
  8916  	typ := &b.Func.Config.Types
  8917  	// match: (Rsh8Ux8 <t> x y)
  8918  	// cond: !shiftIsBounded(v)
  8919  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8920  	for {
  8921  		t := v.Type
  8922  		x := v_0
  8923  		y := v_1
  8924  		if !(!shiftIsBounded(v)) {
  8925  			break
  8926  		}
  8927  		v.reset(OpRISCV64AND)
  8928  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8929  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8930  		v1.AddArg(x)
  8931  		v0.AddArg2(v1, y)
  8932  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8933  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8934  		v3.AuxInt = int64ToAuxInt(64)
  8935  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8936  		v4.AddArg(y)
  8937  		v3.AddArg(v4)
  8938  		v2.AddArg(v3)
  8939  		v.AddArg2(v0, v2)
  8940  		return true
  8941  	}
  8942  	// match: (Rsh8Ux8 x y)
  8943  	// cond: shiftIsBounded(v)
  8944  	// result: (SRL (ZeroExt8to64 x) y)
  8945  	for {
  8946  		x := v_0
  8947  		y := v_1
  8948  		if !(shiftIsBounded(v)) {
  8949  			break
  8950  		}
  8951  		v.reset(OpRISCV64SRL)
  8952  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8953  		v0.AddArg(x)
  8954  		v.AddArg2(v0, y)
  8955  		return true
  8956  	}
  8957  	return false
  8958  }
  8959  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  8960  	v_1 := v.Args[1]
  8961  	v_0 := v.Args[0]
  8962  	b := v.Block
  8963  	typ := &b.Func.Config.Types
  8964  	// match: (Rsh8x16 <t> x y)
  8965  	// cond: !shiftIsBounded(v)
  8966  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8967  	for {
  8968  		t := v.Type
  8969  		x := v_0
  8970  		y := v_1
  8971  		if !(!shiftIsBounded(v)) {
  8972  			break
  8973  		}
  8974  		v.reset(OpRISCV64SRA)
  8975  		v.Type = t
  8976  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8977  		v0.AddArg(x)
  8978  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8979  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8980  		v2.AuxInt = int64ToAuxInt(-1)
  8981  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8982  		v3.AuxInt = int64ToAuxInt(64)
  8983  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8984  		v4.AddArg(y)
  8985  		v3.AddArg(v4)
  8986  		v2.AddArg(v3)
  8987  		v1.AddArg2(y, v2)
  8988  		v.AddArg2(v0, v1)
  8989  		return true
  8990  	}
  8991  	// match: (Rsh8x16 x y)
  8992  	// cond: shiftIsBounded(v)
  8993  	// result: (SRA (SignExt8to64 x) y)
  8994  	for {
  8995  		x := v_0
  8996  		y := v_1
  8997  		if !(shiftIsBounded(v)) {
  8998  			break
  8999  		}
  9000  		v.reset(OpRISCV64SRA)
  9001  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9002  		v0.AddArg(x)
  9003  		v.AddArg2(v0, y)
  9004  		return true
  9005  	}
  9006  	return false
  9007  }
  9008  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  9009  	v_1 := v.Args[1]
  9010  	v_0 := v.Args[0]
  9011  	b := v.Block
  9012  	typ := &b.Func.Config.Types
  9013  	// match: (Rsh8x32 <t> x y)
  9014  	// cond: !shiftIsBounded(v)
  9015  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  9016  	for {
  9017  		t := v.Type
  9018  		x := v_0
  9019  		y := v_1
  9020  		if !(!shiftIsBounded(v)) {
  9021  			break
  9022  		}
  9023  		v.reset(OpRISCV64SRA)
  9024  		v.Type = t
  9025  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9026  		v0.AddArg(x)
  9027  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9028  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9029  		v2.AuxInt = int64ToAuxInt(-1)
  9030  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9031  		v3.AuxInt = int64ToAuxInt(64)
  9032  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9033  		v4.AddArg(y)
  9034  		v3.AddArg(v4)
  9035  		v2.AddArg(v3)
  9036  		v1.AddArg2(y, v2)
  9037  		v.AddArg2(v0, v1)
  9038  		return true
  9039  	}
  9040  	// match: (Rsh8x32 x y)
  9041  	// cond: shiftIsBounded(v)
  9042  	// result: (SRA (SignExt8to64 x) y)
  9043  	for {
  9044  		x := v_0
  9045  		y := v_1
  9046  		if !(shiftIsBounded(v)) {
  9047  			break
  9048  		}
  9049  		v.reset(OpRISCV64SRA)
  9050  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9051  		v0.AddArg(x)
  9052  		v.AddArg2(v0, y)
  9053  		return true
  9054  	}
  9055  	return false
  9056  }
  9057  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  9058  	v_1 := v.Args[1]
  9059  	v_0 := v.Args[0]
  9060  	b := v.Block
  9061  	typ := &b.Func.Config.Types
  9062  	// match: (Rsh8x64 <t> x y)
  9063  	// cond: !shiftIsBounded(v)
  9064  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  9065  	for {
  9066  		t := v.Type
  9067  		x := v_0
  9068  		y := v_1
  9069  		if !(!shiftIsBounded(v)) {
  9070  			break
  9071  		}
  9072  		v.reset(OpRISCV64SRA)
  9073  		v.Type = t
  9074  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9075  		v0.AddArg(x)
  9076  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9077  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9078  		v2.AuxInt = int64ToAuxInt(-1)
  9079  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9080  		v3.AuxInt = int64ToAuxInt(64)
  9081  		v3.AddArg(y)
  9082  		v2.AddArg(v3)
  9083  		v1.AddArg2(y, v2)
  9084  		v.AddArg2(v0, v1)
  9085  		return true
  9086  	}
  9087  	// match: (Rsh8x64 x y)
  9088  	// cond: shiftIsBounded(v)
  9089  	// result: (SRA (SignExt8to64 x) y)
  9090  	for {
  9091  		x := v_0
  9092  		y := v_1
  9093  		if !(shiftIsBounded(v)) {
  9094  			break
  9095  		}
  9096  		v.reset(OpRISCV64SRA)
  9097  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9098  		v0.AddArg(x)
  9099  		v.AddArg2(v0, y)
  9100  		return true
  9101  	}
  9102  	return false
  9103  }
  9104  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  9105  	v_1 := v.Args[1]
  9106  	v_0 := v.Args[0]
  9107  	b := v.Block
  9108  	typ := &b.Func.Config.Types
  9109  	// match: (Rsh8x8 <t> x y)
  9110  	// cond: !shiftIsBounded(v)
  9111  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  9112  	for {
  9113  		t := v.Type
  9114  		x := v_0
  9115  		y := v_1
  9116  		if !(!shiftIsBounded(v)) {
  9117  			break
  9118  		}
  9119  		v.reset(OpRISCV64SRA)
  9120  		v.Type = t
  9121  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9122  		v0.AddArg(x)
  9123  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9124  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9125  		v2.AuxInt = int64ToAuxInt(-1)
  9126  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9127  		v3.AuxInt = int64ToAuxInt(64)
  9128  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9129  		v4.AddArg(y)
  9130  		v3.AddArg(v4)
  9131  		v2.AddArg(v3)
  9132  		v1.AddArg2(y, v2)
  9133  		v.AddArg2(v0, v1)
  9134  		return true
  9135  	}
  9136  	// match: (Rsh8x8 x y)
  9137  	// cond: shiftIsBounded(v)
  9138  	// result: (SRA (SignExt8to64 x) y)
  9139  	for {
  9140  		x := v_0
  9141  		y := v_1
  9142  		if !(shiftIsBounded(v)) {
  9143  			break
  9144  		}
  9145  		v.reset(OpRISCV64SRA)
  9146  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9147  		v0.AddArg(x)
  9148  		v.AddArg2(v0, y)
  9149  		return true
  9150  	}
  9151  	return false
  9152  }
  9153  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  9154  	v_0 := v.Args[0]
  9155  	b := v.Block
  9156  	typ := &b.Func.Config.Types
  9157  	// match: (Select0 (Add64carry x y c))
  9158  	// result: (ADD (ADD <typ.UInt64> x y) c)
  9159  	for {
  9160  		if v_0.Op != OpAdd64carry {
  9161  			break
  9162  		}
  9163  		c := v_0.Args[2]
  9164  		x := v_0.Args[0]
  9165  		y := v_0.Args[1]
  9166  		v.reset(OpRISCV64ADD)
  9167  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  9168  		v0.AddArg2(x, y)
  9169  		v.AddArg2(v0, c)
  9170  		return true
  9171  	}
  9172  	// match: (Select0 (Sub64borrow x y c))
  9173  	// result: (SUB (SUB <typ.UInt64> x y) c)
  9174  	for {
  9175  		if v_0.Op != OpSub64borrow {
  9176  			break
  9177  		}
  9178  		c := v_0.Args[2]
  9179  		x := v_0.Args[0]
  9180  		y := v_0.Args[1]
  9181  		v.reset(OpRISCV64SUB)
  9182  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  9183  		v0.AddArg2(x, y)
  9184  		v.AddArg2(v0, c)
  9185  		return true
  9186  	}
  9187  	// match: (Select0 m:(LoweredMuluhilo x y))
  9188  	// cond: m.Uses == 1
  9189  	// result: (MULHU x y)
  9190  	for {
  9191  		m := v_0
  9192  		if m.Op != OpRISCV64LoweredMuluhilo {
  9193  			break
  9194  		}
  9195  		y := m.Args[1]
  9196  		x := m.Args[0]
  9197  		if !(m.Uses == 1) {
  9198  			break
  9199  		}
  9200  		v.reset(OpRISCV64MULHU)
  9201  		v.AddArg2(x, y)
  9202  		return true
  9203  	}
  9204  	return false
  9205  }
  9206  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  9207  	v_0 := v.Args[0]
  9208  	b := v.Block
  9209  	typ := &b.Func.Config.Types
  9210  	// match: (Select1 (Add64carry x y c))
  9211  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  9212  	for {
  9213  		if v_0.Op != OpAdd64carry {
  9214  			break
  9215  		}
  9216  		c := v_0.Args[2]
  9217  		x := v_0.Args[0]
  9218  		y := v_0.Args[1]
  9219  		v.reset(OpRISCV64OR)
  9220  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9221  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  9222  		s.AddArg2(x, y)
  9223  		v0.AddArg2(s, x)
  9224  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9225  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  9226  		v3.AddArg2(s, c)
  9227  		v2.AddArg2(v3, s)
  9228  		v.AddArg2(v0, v2)
  9229  		return true
  9230  	}
  9231  	// match: (Select1 (Sub64borrow x y c))
  9232  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  9233  	for {
  9234  		if v_0.Op != OpSub64borrow {
  9235  			break
  9236  		}
  9237  		c := v_0.Args[2]
  9238  		x := v_0.Args[0]
  9239  		y := v_0.Args[1]
  9240  		v.reset(OpRISCV64OR)
  9241  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9242  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  9243  		s.AddArg2(x, y)
  9244  		v0.AddArg2(x, s)
  9245  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9246  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  9247  		v3.AddArg2(s, c)
  9248  		v2.AddArg2(s, v3)
  9249  		v.AddArg2(v0, v2)
  9250  		return true
  9251  	}
  9252  	// match: (Select1 m:(LoweredMuluhilo x y))
  9253  	// cond: m.Uses == 1
  9254  	// result: (MUL x y)
  9255  	for {
  9256  		m := v_0
  9257  		if m.Op != OpRISCV64LoweredMuluhilo {
  9258  			break
  9259  		}
  9260  		y := m.Args[1]
  9261  		x := m.Args[0]
  9262  		if !(m.Uses == 1) {
  9263  			break
  9264  		}
  9265  		v.reset(OpRISCV64MUL)
  9266  		v.AddArg2(x, y)
  9267  		return true
  9268  	}
  9269  	return false
  9270  }
  9271  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  9272  	v_0 := v.Args[0]
  9273  	b := v.Block
  9274  	// match: (Slicemask <t> x)
  9275  	// result: (SRAI [63] (NEG <t> x))
  9276  	for {
  9277  		t := v.Type
  9278  		x := v_0
  9279  		v.reset(OpRISCV64SRAI)
  9280  		v.AuxInt = int64ToAuxInt(63)
  9281  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  9282  		v0.AddArg(x)
  9283  		v.AddArg(v0)
  9284  		return true
  9285  	}
  9286  }
  9287  func rewriteValueRISCV64_OpStore(v *Value) bool {
  9288  	v_2 := v.Args[2]
  9289  	v_1 := v.Args[1]
  9290  	v_0 := v.Args[0]
  9291  	// match: (Store {t} ptr val mem)
  9292  	// cond: t.Size() == 1
  9293  	// result: (MOVBstore ptr val mem)
  9294  	for {
  9295  		t := auxToType(v.Aux)
  9296  		ptr := v_0
  9297  		val := v_1
  9298  		mem := v_2
  9299  		if !(t.Size() == 1) {
  9300  			break
  9301  		}
  9302  		v.reset(OpRISCV64MOVBstore)
  9303  		v.AddArg3(ptr, val, mem)
  9304  		return true
  9305  	}
  9306  	// match: (Store {t} ptr val mem)
  9307  	// cond: t.Size() == 2
  9308  	// result: (MOVHstore ptr val mem)
  9309  	for {
  9310  		t := auxToType(v.Aux)
  9311  		ptr := v_0
  9312  		val := v_1
  9313  		mem := v_2
  9314  		if !(t.Size() == 2) {
  9315  			break
  9316  		}
  9317  		v.reset(OpRISCV64MOVHstore)
  9318  		v.AddArg3(ptr, val, mem)
  9319  		return true
  9320  	}
  9321  	// match: (Store {t} ptr val mem)
  9322  	// cond: t.Size() == 4 && !t.IsFloat()
  9323  	// result: (MOVWstore ptr val mem)
  9324  	for {
  9325  		t := auxToType(v.Aux)
  9326  		ptr := v_0
  9327  		val := v_1
  9328  		mem := v_2
  9329  		if !(t.Size() == 4 && !t.IsFloat()) {
  9330  			break
  9331  		}
  9332  		v.reset(OpRISCV64MOVWstore)
  9333  		v.AddArg3(ptr, val, mem)
  9334  		return true
  9335  	}
  9336  	// match: (Store {t} ptr val mem)
  9337  	// cond: t.Size() == 8 && !t.IsFloat()
  9338  	// result: (MOVDstore ptr val mem)
  9339  	for {
  9340  		t := auxToType(v.Aux)
  9341  		ptr := v_0
  9342  		val := v_1
  9343  		mem := v_2
  9344  		if !(t.Size() == 8 && !t.IsFloat()) {
  9345  			break
  9346  		}
  9347  		v.reset(OpRISCV64MOVDstore)
  9348  		v.AddArg3(ptr, val, mem)
  9349  		return true
  9350  	}
  9351  	// match: (Store {t} ptr val mem)
  9352  	// cond: t.Size() == 4 && t.IsFloat()
  9353  	// result: (FMOVWstore ptr val mem)
  9354  	for {
  9355  		t := auxToType(v.Aux)
  9356  		ptr := v_0
  9357  		val := v_1
  9358  		mem := v_2
  9359  		if !(t.Size() == 4 && t.IsFloat()) {
  9360  			break
  9361  		}
  9362  		v.reset(OpRISCV64FMOVWstore)
  9363  		v.AddArg3(ptr, val, mem)
  9364  		return true
  9365  	}
  9366  	// match: (Store {t} ptr val mem)
  9367  	// cond: t.Size() == 8 && t.IsFloat()
  9368  	// result: (FMOVDstore ptr val mem)
  9369  	for {
  9370  		t := auxToType(v.Aux)
  9371  		ptr := v_0
  9372  		val := v_1
  9373  		mem := v_2
  9374  		if !(t.Size() == 8 && t.IsFloat()) {
  9375  			break
  9376  		}
  9377  		v.reset(OpRISCV64FMOVDstore)
  9378  		v.AddArg3(ptr, val, mem)
  9379  		return true
  9380  	}
  9381  	return false
  9382  }
  9383  func rewriteValueRISCV64_OpZero(v *Value) bool {
  9384  	v_1 := v.Args[1]
  9385  	v_0 := v.Args[0]
  9386  	b := v.Block
  9387  	config := b.Func.Config
  9388  	typ := &b.Func.Config.Types
  9389  	// match: (Zero [0] _ mem)
  9390  	// result: mem
  9391  	for {
  9392  		if auxIntToInt64(v.AuxInt) != 0 {
  9393  			break
  9394  		}
  9395  		mem := v_1
  9396  		v.copyOf(mem)
  9397  		return true
  9398  	}
  9399  	// match: (Zero [1] ptr mem)
  9400  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  9401  	for {
  9402  		if auxIntToInt64(v.AuxInt) != 1 {
  9403  			break
  9404  		}
  9405  		ptr := v_0
  9406  		mem := v_1
  9407  		v.reset(OpRISCV64MOVBstore)
  9408  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9409  		v0.AuxInt = int64ToAuxInt(0)
  9410  		v.AddArg3(ptr, v0, mem)
  9411  		return true
  9412  	}
  9413  	// match: (Zero [2] {t} ptr mem)
  9414  	// cond: t.Alignment()%2 == 0
  9415  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  9416  	for {
  9417  		if auxIntToInt64(v.AuxInt) != 2 {
  9418  			break
  9419  		}
  9420  		t := auxToType(v.Aux)
  9421  		ptr := v_0
  9422  		mem := v_1
  9423  		if !(t.Alignment()%2 == 0) {
  9424  			break
  9425  		}
  9426  		v.reset(OpRISCV64MOVHstore)
  9427  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9428  		v0.AuxInt = int64ToAuxInt(0)
  9429  		v.AddArg3(ptr, v0, mem)
  9430  		return true
  9431  	}
  9432  	// match: (Zero [2] ptr mem)
  9433  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  9434  	for {
  9435  		if auxIntToInt64(v.AuxInt) != 2 {
  9436  			break
  9437  		}
  9438  		ptr := v_0
  9439  		mem := v_1
  9440  		v.reset(OpRISCV64MOVBstore)
  9441  		v.AuxInt = int32ToAuxInt(1)
  9442  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9443  		v0.AuxInt = int64ToAuxInt(0)
  9444  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9445  		v1.AddArg3(ptr, v0, mem)
  9446  		v.AddArg3(ptr, v0, v1)
  9447  		return true
  9448  	}
  9449  	// match: (Zero [4] {t} ptr mem)
  9450  	// cond: t.Alignment()%4 == 0
  9451  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  9452  	for {
  9453  		if auxIntToInt64(v.AuxInt) != 4 {
  9454  			break
  9455  		}
  9456  		t := auxToType(v.Aux)
  9457  		ptr := v_0
  9458  		mem := v_1
  9459  		if !(t.Alignment()%4 == 0) {
  9460  			break
  9461  		}
  9462  		v.reset(OpRISCV64MOVWstore)
  9463  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9464  		v0.AuxInt = int64ToAuxInt(0)
  9465  		v.AddArg3(ptr, v0, mem)
  9466  		return true
  9467  	}
  9468  	// match: (Zero [4] {t} ptr mem)
  9469  	// cond: t.Alignment()%2 == 0
  9470  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  9471  	for {
  9472  		if auxIntToInt64(v.AuxInt) != 4 {
  9473  			break
  9474  		}
  9475  		t := auxToType(v.Aux)
  9476  		ptr := v_0
  9477  		mem := v_1
  9478  		if !(t.Alignment()%2 == 0) {
  9479  			break
  9480  		}
  9481  		v.reset(OpRISCV64MOVHstore)
  9482  		v.AuxInt = int32ToAuxInt(2)
  9483  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9484  		v0.AuxInt = int64ToAuxInt(0)
  9485  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9486  		v1.AddArg3(ptr, v0, mem)
  9487  		v.AddArg3(ptr, v0, v1)
  9488  		return true
  9489  	}
  9490  	// match: (Zero [4] ptr mem)
  9491  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  9492  	for {
  9493  		if auxIntToInt64(v.AuxInt) != 4 {
  9494  			break
  9495  		}
  9496  		ptr := v_0
  9497  		mem := v_1
  9498  		v.reset(OpRISCV64MOVBstore)
  9499  		v.AuxInt = int32ToAuxInt(3)
  9500  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9501  		v0.AuxInt = int64ToAuxInt(0)
  9502  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9503  		v1.AuxInt = int32ToAuxInt(2)
  9504  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9505  		v2.AuxInt = int32ToAuxInt(1)
  9506  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9507  		v3.AddArg3(ptr, v0, mem)
  9508  		v2.AddArg3(ptr, v0, v3)
  9509  		v1.AddArg3(ptr, v0, v2)
  9510  		v.AddArg3(ptr, v0, v1)
  9511  		return true
  9512  	}
  9513  	// match: (Zero [8] {t} ptr mem)
  9514  	// cond: t.Alignment()%8 == 0
  9515  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  9516  	for {
  9517  		if auxIntToInt64(v.AuxInt) != 8 {
  9518  			break
  9519  		}
  9520  		t := auxToType(v.Aux)
  9521  		ptr := v_0
  9522  		mem := v_1
  9523  		if !(t.Alignment()%8 == 0) {
  9524  			break
  9525  		}
  9526  		v.reset(OpRISCV64MOVDstore)
  9527  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9528  		v0.AuxInt = int64ToAuxInt(0)
  9529  		v.AddArg3(ptr, v0, mem)
  9530  		return true
  9531  	}
  9532  	// match: (Zero [8] {t} ptr mem)
  9533  	// cond: t.Alignment()%4 == 0
  9534  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  9535  	for {
  9536  		if auxIntToInt64(v.AuxInt) != 8 {
  9537  			break
  9538  		}
  9539  		t := auxToType(v.Aux)
  9540  		ptr := v_0
  9541  		mem := v_1
  9542  		if !(t.Alignment()%4 == 0) {
  9543  			break
  9544  		}
  9545  		v.reset(OpRISCV64MOVWstore)
  9546  		v.AuxInt = int32ToAuxInt(4)
  9547  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9548  		v0.AuxInt = int64ToAuxInt(0)
  9549  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9550  		v1.AddArg3(ptr, v0, mem)
  9551  		v.AddArg3(ptr, v0, v1)
  9552  		return true
  9553  	}
  9554  	// match: (Zero [8] {t} ptr mem)
  9555  	// cond: t.Alignment()%2 == 0
  9556  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  9557  	for {
  9558  		if auxIntToInt64(v.AuxInt) != 8 {
  9559  			break
  9560  		}
  9561  		t := auxToType(v.Aux)
  9562  		ptr := v_0
  9563  		mem := v_1
  9564  		if !(t.Alignment()%2 == 0) {
  9565  			break
  9566  		}
  9567  		v.reset(OpRISCV64MOVHstore)
  9568  		v.AuxInt = int32ToAuxInt(6)
  9569  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9570  		v0.AuxInt = int64ToAuxInt(0)
  9571  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9572  		v1.AuxInt = int32ToAuxInt(4)
  9573  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9574  		v2.AuxInt = int32ToAuxInt(2)
  9575  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9576  		v3.AddArg3(ptr, v0, mem)
  9577  		v2.AddArg3(ptr, v0, v3)
  9578  		v1.AddArg3(ptr, v0, v2)
  9579  		v.AddArg3(ptr, v0, v1)
  9580  		return true
  9581  	}
  9582  	// match: (Zero [3] ptr mem)
  9583  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  9584  	for {
  9585  		if auxIntToInt64(v.AuxInt) != 3 {
  9586  			break
  9587  		}
  9588  		ptr := v_0
  9589  		mem := v_1
  9590  		v.reset(OpRISCV64MOVBstore)
  9591  		v.AuxInt = int32ToAuxInt(2)
  9592  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9593  		v0.AuxInt = int64ToAuxInt(0)
  9594  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9595  		v1.AuxInt = int32ToAuxInt(1)
  9596  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9597  		v2.AddArg3(ptr, v0, mem)
  9598  		v1.AddArg3(ptr, v0, v2)
  9599  		v.AddArg3(ptr, v0, v1)
  9600  		return true
  9601  	}
  9602  	// match: (Zero [6] {t} ptr mem)
  9603  	// cond: t.Alignment()%2 == 0
  9604  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  9605  	for {
  9606  		if auxIntToInt64(v.AuxInt) != 6 {
  9607  			break
  9608  		}
  9609  		t := auxToType(v.Aux)
  9610  		ptr := v_0
  9611  		mem := v_1
  9612  		if !(t.Alignment()%2 == 0) {
  9613  			break
  9614  		}
  9615  		v.reset(OpRISCV64MOVHstore)
  9616  		v.AuxInt = int32ToAuxInt(4)
  9617  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9618  		v0.AuxInt = int64ToAuxInt(0)
  9619  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9620  		v1.AuxInt = int32ToAuxInt(2)
  9621  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9622  		v2.AddArg3(ptr, v0, mem)
  9623  		v1.AddArg3(ptr, v0, v2)
  9624  		v.AddArg3(ptr, v0, v1)
  9625  		return true
  9626  	}
  9627  	// match: (Zero [12] {t} ptr mem)
  9628  	// cond: t.Alignment()%4 == 0
  9629  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
  9630  	for {
  9631  		if auxIntToInt64(v.AuxInt) != 12 {
  9632  			break
  9633  		}
  9634  		t := auxToType(v.Aux)
  9635  		ptr := v_0
  9636  		mem := v_1
  9637  		if !(t.Alignment()%4 == 0) {
  9638  			break
  9639  		}
  9640  		v.reset(OpRISCV64MOVWstore)
  9641  		v.AuxInt = int32ToAuxInt(8)
  9642  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9643  		v0.AuxInt = int64ToAuxInt(0)
  9644  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9645  		v1.AuxInt = int32ToAuxInt(4)
  9646  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9647  		v2.AddArg3(ptr, v0, mem)
  9648  		v1.AddArg3(ptr, v0, v2)
  9649  		v.AddArg3(ptr, v0, v1)
  9650  		return true
  9651  	}
  9652  	// match: (Zero [16] {t} ptr mem)
  9653  	// cond: t.Alignment()%8 == 0
  9654  	// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
  9655  	for {
  9656  		if auxIntToInt64(v.AuxInt) != 16 {
  9657  			break
  9658  		}
  9659  		t := auxToType(v.Aux)
  9660  		ptr := v_0
  9661  		mem := v_1
  9662  		if !(t.Alignment()%8 == 0) {
  9663  			break
  9664  		}
  9665  		v.reset(OpRISCV64MOVDstore)
  9666  		v.AuxInt = int32ToAuxInt(8)
  9667  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9668  		v0.AuxInt = int64ToAuxInt(0)
  9669  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9670  		v1.AddArg3(ptr, v0, mem)
  9671  		v.AddArg3(ptr, v0, v1)
  9672  		return true
  9673  	}
  9674  	// match: (Zero [24] {t} ptr mem)
  9675  	// cond: t.Alignment()%8 == 0
  9676  	// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
  9677  	for {
  9678  		if auxIntToInt64(v.AuxInt) != 24 {
  9679  			break
  9680  		}
  9681  		t := auxToType(v.Aux)
  9682  		ptr := v_0
  9683  		mem := v_1
  9684  		if !(t.Alignment()%8 == 0) {
  9685  			break
  9686  		}
  9687  		v.reset(OpRISCV64MOVDstore)
  9688  		v.AuxInt = int32ToAuxInt(16)
  9689  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9690  		v0.AuxInt = int64ToAuxInt(0)
  9691  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9692  		v1.AuxInt = int32ToAuxInt(8)
  9693  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9694  		v2.AddArg3(ptr, v0, mem)
  9695  		v1.AddArg3(ptr, v0, v2)
  9696  		v.AddArg3(ptr, v0, v1)
  9697  		return true
  9698  	}
  9699  	// match: (Zero [32] {t} ptr mem)
  9700  	// cond: t.Alignment()%8 == 0
  9701  	// result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
  9702  	for {
  9703  		if auxIntToInt64(v.AuxInt) != 32 {
  9704  			break
  9705  		}
  9706  		t := auxToType(v.Aux)
  9707  		ptr := v_0
  9708  		mem := v_1
  9709  		if !(t.Alignment()%8 == 0) {
  9710  			break
  9711  		}
  9712  		v.reset(OpRISCV64MOVDstore)
  9713  		v.AuxInt = int32ToAuxInt(24)
  9714  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9715  		v0.AuxInt = int64ToAuxInt(0)
  9716  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9717  		v1.AuxInt = int32ToAuxInt(16)
  9718  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9719  		v2.AuxInt = int32ToAuxInt(8)
  9720  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9721  		v3.AddArg3(ptr, v0, mem)
  9722  		v2.AddArg3(ptr, v0, v3)
  9723  		v1.AddArg3(ptr, v0, v2)
  9724  		v.AddArg3(ptr, v0, v1)
  9725  		return true
  9726  	}
  9727  	// match: (Zero [s] {t} ptr mem)
  9728  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0
  9729  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  9730  	for {
  9731  		s := auxIntToInt64(v.AuxInt)
  9732  		t := auxToType(v.Aux)
  9733  		ptr := v_0
  9734  		mem := v_1
  9735  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0) {
  9736  			break
  9737  		}
  9738  		v.reset(OpRISCV64DUFFZERO)
  9739  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  9740  		v.AddArg2(ptr, mem)
  9741  		return true
  9742  	}
  9743  	// match: (Zero [s] {t} ptr mem)
  9744  	// result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
  9745  	for {
  9746  		s := auxIntToInt64(v.AuxInt)
  9747  		t := auxToType(v.Aux)
  9748  		ptr := v_0
  9749  		mem := v_1
  9750  		v.reset(OpRISCV64LoweredZero)
  9751  		v.AuxInt = int64ToAuxInt(t.Alignment())
  9752  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
  9753  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9754  		v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  9755  		v0.AddArg2(ptr, v1)
  9756  		v.AddArg3(ptr, v0, mem)
  9757  		return true
  9758  	}
  9759  }
  9760  func rewriteBlockRISCV64(b *Block) bool {
  9761  	typ := &b.Func.Config.Types
  9762  	switch b.Kind {
  9763  	case BlockRISCV64BEQ:
  9764  		// match: (BEQ (MOVDconst [0]) cond yes no)
  9765  		// result: (BEQZ cond yes no)
  9766  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9767  			v_0 := b.Controls[0]
  9768  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9769  				break
  9770  			}
  9771  			cond := b.Controls[1]
  9772  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9773  			return true
  9774  		}
  9775  		// match: (BEQ cond (MOVDconst [0]) yes no)
  9776  		// result: (BEQZ cond yes no)
  9777  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9778  			cond := b.Controls[0]
  9779  			v_1 := b.Controls[1]
  9780  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9781  				break
  9782  			}
  9783  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9784  			return true
  9785  		}
  9786  	case BlockRISCV64BEQZ:
  9787  		// match: (BEQZ (SEQZ x) yes no)
  9788  		// result: (BNEZ x yes no)
  9789  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9790  			v_0 := b.Controls[0]
  9791  			x := v_0.Args[0]
  9792  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9793  			return true
  9794  		}
  9795  		// match: (BEQZ (SNEZ x) yes no)
  9796  		// result: (BEQZ x yes no)
  9797  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9798  			v_0 := b.Controls[0]
  9799  			x := v_0.Args[0]
  9800  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9801  			return true
  9802  		}
  9803  		// match: (BEQZ (NEG x) yes no)
  9804  		// result: (BEQZ x yes no)
  9805  		for b.Controls[0].Op == OpRISCV64NEG {
  9806  			v_0 := b.Controls[0]
  9807  			x := v_0.Args[0]
  9808  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9809  			return true
  9810  		}
  9811  		// match: (BEQZ (FNES <t> x y) yes no)
  9812  		// result: (BNEZ (FEQS <t> x y) yes no)
  9813  		for b.Controls[0].Op == OpRISCV64FNES {
  9814  			v_0 := b.Controls[0]
  9815  			t := v_0.Type
  9816  			_ = v_0.Args[1]
  9817  			v_0_0 := v_0.Args[0]
  9818  			v_0_1 := v_0.Args[1]
  9819  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9820  				x := v_0_0
  9821  				y := v_0_1
  9822  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9823  				v0.AddArg2(x, y)
  9824  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9825  				return true
  9826  			}
  9827  		}
  9828  		// match: (BEQZ (FNED <t> x y) yes no)
  9829  		// result: (BNEZ (FEQD <t> x y) yes no)
  9830  		for b.Controls[0].Op == OpRISCV64FNED {
  9831  			v_0 := b.Controls[0]
  9832  			t := v_0.Type
  9833  			_ = v_0.Args[1]
  9834  			v_0_0 := v_0.Args[0]
  9835  			v_0_1 := v_0.Args[1]
  9836  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9837  				x := v_0_0
  9838  				y := v_0_1
  9839  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9840  				v0.AddArg2(x, y)
  9841  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9842  				return true
  9843  			}
  9844  		}
  9845  		// match: (BEQZ (SUB x y) yes no)
  9846  		// result: (BEQ x y yes no)
  9847  		for b.Controls[0].Op == OpRISCV64SUB {
  9848  			v_0 := b.Controls[0]
  9849  			y := v_0.Args[1]
  9850  			x := v_0.Args[0]
  9851  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
  9852  			return true
  9853  		}
  9854  		// match: (BEQZ (SLT x y) yes no)
  9855  		// result: (BGE x y yes no)
  9856  		for b.Controls[0].Op == OpRISCV64SLT {
  9857  			v_0 := b.Controls[0]
  9858  			y := v_0.Args[1]
  9859  			x := v_0.Args[0]
  9860  			b.resetWithControl2(BlockRISCV64BGE, x, y)
  9861  			return true
  9862  		}
  9863  		// match: (BEQZ (SLTU x y) yes no)
  9864  		// result: (BGEU x y yes no)
  9865  		for b.Controls[0].Op == OpRISCV64SLTU {
  9866  			v_0 := b.Controls[0]
  9867  			y := v_0.Args[1]
  9868  			x := v_0.Args[0]
  9869  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
  9870  			return true
  9871  		}
  9872  		// match: (BEQZ (SLTI [x] y) yes no)
  9873  		// result: (BGE y (MOVDconst [x]) yes no)
  9874  		for b.Controls[0].Op == OpRISCV64SLTI {
  9875  			v_0 := b.Controls[0]
  9876  			x := auxIntToInt64(v_0.AuxInt)
  9877  			y := v_0.Args[0]
  9878  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9879  			v0.AuxInt = int64ToAuxInt(x)
  9880  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
  9881  			return true
  9882  		}
  9883  		// match: (BEQZ (SLTIU [x] y) yes no)
  9884  		// result: (BGEU y (MOVDconst [x]) yes no)
  9885  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9886  			v_0 := b.Controls[0]
  9887  			x := auxIntToInt64(v_0.AuxInt)
  9888  			y := v_0.Args[0]
  9889  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9890  			v0.AuxInt = int64ToAuxInt(x)
  9891  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
  9892  			return true
  9893  		}
  9894  	case BlockRISCV64BGE:
  9895  		// match: (BGE (MOVDconst [0]) cond yes no)
  9896  		// result: (BLEZ cond yes no)
  9897  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9898  			v_0 := b.Controls[0]
  9899  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9900  				break
  9901  			}
  9902  			cond := b.Controls[1]
  9903  			b.resetWithControl(BlockRISCV64BLEZ, cond)
  9904  			return true
  9905  		}
  9906  		// match: (BGE cond (MOVDconst [0]) yes no)
  9907  		// result: (BGEZ cond yes no)
  9908  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9909  			cond := b.Controls[0]
  9910  			v_1 := b.Controls[1]
  9911  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9912  				break
  9913  			}
  9914  			b.resetWithControl(BlockRISCV64BGEZ, cond)
  9915  			return true
  9916  		}
  9917  	case BlockRISCV64BGEU:
  9918  		// match: (BGEU (MOVDconst [0]) cond yes no)
  9919  		// result: (BEQZ cond yes no)
  9920  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9921  			v_0 := b.Controls[0]
  9922  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9923  				break
  9924  			}
  9925  			cond := b.Controls[1]
  9926  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9927  			return true
  9928  		}
  9929  	case BlockRISCV64BLT:
  9930  		// match: (BLT (MOVDconst [0]) cond yes no)
  9931  		// result: (BGTZ cond yes no)
  9932  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9933  			v_0 := b.Controls[0]
  9934  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9935  				break
  9936  			}
  9937  			cond := b.Controls[1]
  9938  			b.resetWithControl(BlockRISCV64BGTZ, cond)
  9939  			return true
  9940  		}
  9941  		// match: (BLT cond (MOVDconst [0]) yes no)
  9942  		// result: (BLTZ cond yes no)
  9943  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9944  			cond := b.Controls[0]
  9945  			v_1 := b.Controls[1]
  9946  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9947  				break
  9948  			}
  9949  			b.resetWithControl(BlockRISCV64BLTZ, cond)
  9950  			return true
  9951  		}
  9952  	case BlockRISCV64BLTU:
  9953  		// match: (BLTU (MOVDconst [0]) cond yes no)
  9954  		// result: (BNEZ cond yes no)
  9955  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9956  			v_0 := b.Controls[0]
  9957  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9958  				break
  9959  			}
  9960  			cond := b.Controls[1]
  9961  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9962  			return true
  9963  		}
  9964  	case BlockRISCV64BNE:
  9965  		// match: (BNE (MOVDconst [0]) cond yes no)
  9966  		// result: (BNEZ cond yes no)
  9967  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9968  			v_0 := b.Controls[0]
  9969  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9970  				break
  9971  			}
  9972  			cond := b.Controls[1]
  9973  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9974  			return true
  9975  		}
  9976  		// match: (BNE cond (MOVDconst [0]) yes no)
  9977  		// result: (BNEZ cond yes no)
  9978  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9979  			cond := b.Controls[0]
  9980  			v_1 := b.Controls[1]
  9981  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9982  				break
  9983  			}
  9984  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9985  			return true
  9986  		}
  9987  	case BlockRISCV64BNEZ:
  9988  		// match: (BNEZ (SEQZ x) yes no)
  9989  		// result: (BEQZ x yes no)
  9990  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9991  			v_0 := b.Controls[0]
  9992  			x := v_0.Args[0]
  9993  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9994  			return true
  9995  		}
  9996  		// match: (BNEZ (SNEZ x) yes no)
  9997  		// result: (BNEZ x yes no)
  9998  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9999  			v_0 := b.Controls[0]
 10000  			x := v_0.Args[0]
 10001  			b.resetWithControl(BlockRISCV64BNEZ, x)
 10002  			return true
 10003  		}
 10004  		// match: (BNEZ (NEG x) yes no)
 10005  		// result: (BNEZ x yes no)
 10006  		for b.Controls[0].Op == OpRISCV64NEG {
 10007  			v_0 := b.Controls[0]
 10008  			x := v_0.Args[0]
 10009  			b.resetWithControl(BlockRISCV64BNEZ, x)
 10010  			return true
 10011  		}
 10012  		// match: (BNEZ (FNES <t> x y) yes no)
 10013  		// result: (BEQZ (FEQS <t> x y) yes no)
 10014  		for b.Controls[0].Op == OpRISCV64FNES {
 10015  			v_0 := b.Controls[0]
 10016  			t := v_0.Type
 10017  			_ = v_0.Args[1]
 10018  			v_0_0 := v_0.Args[0]
 10019  			v_0_1 := v_0.Args[1]
 10020  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10021  				x := v_0_0
 10022  				y := v_0_1
 10023  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
 10024  				v0.AddArg2(x, y)
 10025  				b.resetWithControl(BlockRISCV64BEQZ, v0)
 10026  				return true
 10027  			}
 10028  		}
 10029  		// match: (BNEZ (FNED <t> x y) yes no)
 10030  		// result: (BEQZ (FEQD <t> x y) yes no)
 10031  		for b.Controls[0].Op == OpRISCV64FNED {
 10032  			v_0 := b.Controls[0]
 10033  			t := v_0.Type
 10034  			_ = v_0.Args[1]
 10035  			v_0_0 := v_0.Args[0]
 10036  			v_0_1 := v_0.Args[1]
 10037  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10038  				x := v_0_0
 10039  				y := v_0_1
 10040  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
 10041  				v0.AddArg2(x, y)
 10042  				b.resetWithControl(BlockRISCV64BEQZ, v0)
 10043  				return true
 10044  			}
 10045  		}
 10046  		// match: (BNEZ (SUB x y) yes no)
 10047  		// result: (BNE x y yes no)
 10048  		for b.Controls[0].Op == OpRISCV64SUB {
 10049  			v_0 := b.Controls[0]
 10050  			y := v_0.Args[1]
 10051  			x := v_0.Args[0]
 10052  			b.resetWithControl2(BlockRISCV64BNE, x, y)
 10053  			return true
 10054  		}
 10055  		// match: (BNEZ (SLT x y) yes no)
 10056  		// result: (BLT x y yes no)
 10057  		for b.Controls[0].Op == OpRISCV64SLT {
 10058  			v_0 := b.Controls[0]
 10059  			y := v_0.Args[1]
 10060  			x := v_0.Args[0]
 10061  			b.resetWithControl2(BlockRISCV64BLT, x, y)
 10062  			return true
 10063  		}
 10064  		// match: (BNEZ (SLTU x y) yes no)
 10065  		// result: (BLTU x y yes no)
 10066  		for b.Controls[0].Op == OpRISCV64SLTU {
 10067  			v_0 := b.Controls[0]
 10068  			y := v_0.Args[1]
 10069  			x := v_0.Args[0]
 10070  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
 10071  			return true
 10072  		}
 10073  		// match: (BNEZ (SLTI [x] y) yes no)
 10074  		// result: (BLT y (MOVDconst [x]) yes no)
 10075  		for b.Controls[0].Op == OpRISCV64SLTI {
 10076  			v_0 := b.Controls[0]
 10077  			x := auxIntToInt64(v_0.AuxInt)
 10078  			y := v_0.Args[0]
 10079  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10080  			v0.AuxInt = int64ToAuxInt(x)
 10081  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
 10082  			return true
 10083  		}
 10084  		// match: (BNEZ (SLTIU [x] y) yes no)
 10085  		// result: (BLTU y (MOVDconst [x]) yes no)
 10086  		for b.Controls[0].Op == OpRISCV64SLTIU {
 10087  			v_0 := b.Controls[0]
 10088  			x := auxIntToInt64(v_0.AuxInt)
 10089  			y := v_0.Args[0]
 10090  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10091  			v0.AuxInt = int64ToAuxInt(x)
 10092  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
 10093  			return true
 10094  		}
 10095  	case BlockIf:
 10096  		// match: (If cond yes no)
 10097  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
 10098  		for {
 10099  			cond := b.Controls[0]
 10100  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
 10101  			v0.AddArg(cond)
 10102  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 10103  			return true
 10104  		}
 10105  	}
 10106  	return false
 10107  }
 10108  

View as plain text