Source file src/cmd/compile/internal/ssa/rewriteLOONG64.go

     1  // Code generated from _gen/LOONG64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueLOONG64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpLOONG64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpLOONG64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpLOONG64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpLOONG64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpLOONG64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpLOONG64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpLOONG64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpLOONG64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueLOONG64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpLOONG64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpLOONG64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpLOONG64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpLOONG64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpLOONG64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpLOONG64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpLOONG64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpLOONG64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd32value:
    60  		v.Op = OpLOONG64LoweredAtomicAnd32value
    61  		return true
    62  	case OpAtomicAnd64value:
    63  		v.Op = OpLOONG64LoweredAtomicAnd64value
    64  		return true
    65  	case OpAtomicAnd8:
    66  		return rewriteValueLOONG64_OpAtomicAnd8(v)
    67  	case OpAtomicCompareAndSwap32:
    68  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v)
    69  	case OpAtomicCompareAndSwap32Variant:
    70  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v)
    71  	case OpAtomicCompareAndSwap64:
    72  		v.Op = OpLOONG64LoweredAtomicCas64
    73  		return true
    74  	case OpAtomicCompareAndSwap64Variant:
    75  		v.Op = OpLOONG64LoweredAtomicCas64Variant
    76  		return true
    77  	case OpAtomicExchange32:
    78  		v.Op = OpLOONG64LoweredAtomicExchange32
    79  		return true
    80  	case OpAtomicExchange64:
    81  		v.Op = OpLOONG64LoweredAtomicExchange64
    82  		return true
    83  	case OpAtomicExchange8Variant:
    84  		v.Op = OpLOONG64LoweredAtomicExchange8Variant
    85  		return true
    86  	case OpAtomicLoad32:
    87  		v.Op = OpLOONG64LoweredAtomicLoad32
    88  		return true
    89  	case OpAtomicLoad64:
    90  		v.Op = OpLOONG64LoweredAtomicLoad64
    91  		return true
    92  	case OpAtomicLoad8:
    93  		v.Op = OpLOONG64LoweredAtomicLoad8
    94  		return true
    95  	case OpAtomicLoadPtr:
    96  		v.Op = OpLOONG64LoweredAtomicLoad64
    97  		return true
    98  	case OpAtomicOr32:
    99  		v.Op = OpLOONG64LoweredAtomicOr32
   100  		return true
   101  	case OpAtomicOr32value:
   102  		v.Op = OpLOONG64LoweredAtomicOr32value
   103  		return true
   104  	case OpAtomicOr64value:
   105  		v.Op = OpLOONG64LoweredAtomicOr64value
   106  		return true
   107  	case OpAtomicOr8:
   108  		return rewriteValueLOONG64_OpAtomicOr8(v)
   109  	case OpAtomicStore32:
   110  		v.Op = OpLOONG64LoweredAtomicStore32
   111  		return true
   112  	case OpAtomicStore32Variant:
   113  		v.Op = OpLOONG64LoweredAtomicStore32Variant
   114  		return true
   115  	case OpAtomicStore64:
   116  		v.Op = OpLOONG64LoweredAtomicStore64
   117  		return true
   118  	case OpAtomicStore64Variant:
   119  		v.Op = OpLOONG64LoweredAtomicStore64Variant
   120  		return true
   121  	case OpAtomicStore8:
   122  		v.Op = OpLOONG64LoweredAtomicStore8
   123  		return true
   124  	case OpAtomicStore8Variant:
   125  		v.Op = OpLOONG64LoweredAtomicStore8Variant
   126  		return true
   127  	case OpAtomicStorePtrNoWB:
   128  		v.Op = OpLOONG64LoweredAtomicStore64
   129  		return true
   130  	case OpAvg64u:
   131  		return rewriteValueLOONG64_OpAvg64u(v)
   132  	case OpBitLen16:
   133  		return rewriteValueLOONG64_OpBitLen16(v)
   134  	case OpBitLen32:
   135  		return rewriteValueLOONG64_OpBitLen32(v)
   136  	case OpBitLen64:
   137  		return rewriteValueLOONG64_OpBitLen64(v)
   138  	case OpBitLen8:
   139  		return rewriteValueLOONG64_OpBitLen8(v)
   140  	case OpBitRev16:
   141  		return rewriteValueLOONG64_OpBitRev16(v)
   142  	case OpBitRev32:
   143  		v.Op = OpLOONG64BITREVW
   144  		return true
   145  	case OpBitRev64:
   146  		v.Op = OpLOONG64BITREVV
   147  		return true
   148  	case OpBitRev8:
   149  		v.Op = OpLOONG64BITREV4B
   150  		return true
   151  	case OpBswap16:
   152  		v.Op = OpLOONG64REVB2H
   153  		return true
   154  	case OpBswap32:
   155  		v.Op = OpLOONG64REVB2W
   156  		return true
   157  	case OpBswap64:
   158  		v.Op = OpLOONG64REVBV
   159  		return true
   160  	case OpClosureCall:
   161  		v.Op = OpLOONG64CALLclosure
   162  		return true
   163  	case OpCom16:
   164  		return rewriteValueLOONG64_OpCom16(v)
   165  	case OpCom32:
   166  		return rewriteValueLOONG64_OpCom32(v)
   167  	case OpCom64:
   168  		return rewriteValueLOONG64_OpCom64(v)
   169  	case OpCom8:
   170  		return rewriteValueLOONG64_OpCom8(v)
   171  	case OpCondSelect:
   172  		return rewriteValueLOONG64_OpCondSelect(v)
   173  	case OpConst16:
   174  		return rewriteValueLOONG64_OpConst16(v)
   175  	case OpConst32:
   176  		return rewriteValueLOONG64_OpConst32(v)
   177  	case OpConst32F:
   178  		return rewriteValueLOONG64_OpConst32F(v)
   179  	case OpConst64:
   180  		return rewriteValueLOONG64_OpConst64(v)
   181  	case OpConst64F:
   182  		return rewriteValueLOONG64_OpConst64F(v)
   183  	case OpConst8:
   184  		return rewriteValueLOONG64_OpConst8(v)
   185  	case OpConstBool:
   186  		return rewriteValueLOONG64_OpConstBool(v)
   187  	case OpConstNil:
   188  		return rewriteValueLOONG64_OpConstNil(v)
   189  	case OpCopysign:
   190  		v.Op = OpLOONG64FCOPYSGD
   191  		return true
   192  	case OpCtz16:
   193  		return rewriteValueLOONG64_OpCtz16(v)
   194  	case OpCtz16NonZero:
   195  		v.Op = OpCtz64
   196  		return true
   197  	case OpCtz32:
   198  		v.Op = OpLOONG64CTZW
   199  		return true
   200  	case OpCtz32NonZero:
   201  		v.Op = OpCtz64
   202  		return true
   203  	case OpCtz64:
   204  		v.Op = OpLOONG64CTZV
   205  		return true
   206  	case OpCtz64NonZero:
   207  		v.Op = OpCtz64
   208  		return true
   209  	case OpCtz8:
   210  		return rewriteValueLOONG64_OpCtz8(v)
   211  	case OpCtz8NonZero:
   212  		v.Op = OpCtz64
   213  		return true
   214  	case OpCvt32Fto32:
   215  		v.Op = OpLOONG64TRUNCFW
   216  		return true
   217  	case OpCvt32Fto64:
   218  		v.Op = OpLOONG64TRUNCFV
   219  		return true
   220  	case OpCvt32Fto64F:
   221  		v.Op = OpLOONG64MOVFD
   222  		return true
   223  	case OpCvt32to32F:
   224  		v.Op = OpLOONG64MOVWF
   225  		return true
   226  	case OpCvt32to64F:
   227  		v.Op = OpLOONG64MOVWD
   228  		return true
   229  	case OpCvt64Fto32:
   230  		v.Op = OpLOONG64TRUNCDW
   231  		return true
   232  	case OpCvt64Fto32F:
   233  		v.Op = OpLOONG64MOVDF
   234  		return true
   235  	case OpCvt64Fto64:
   236  		v.Op = OpLOONG64TRUNCDV
   237  		return true
   238  	case OpCvt64to32F:
   239  		v.Op = OpLOONG64MOVVF
   240  		return true
   241  	case OpCvt64to64F:
   242  		v.Op = OpLOONG64MOVVD
   243  		return true
   244  	case OpCvtBoolToUint8:
   245  		v.Op = OpCopy
   246  		return true
   247  	case OpDiv16:
   248  		return rewriteValueLOONG64_OpDiv16(v)
   249  	case OpDiv16u:
   250  		return rewriteValueLOONG64_OpDiv16u(v)
   251  	case OpDiv32:
   252  		return rewriteValueLOONG64_OpDiv32(v)
   253  	case OpDiv32F:
   254  		v.Op = OpLOONG64DIVF
   255  		return true
   256  	case OpDiv32u:
   257  		return rewriteValueLOONG64_OpDiv32u(v)
   258  	case OpDiv64:
   259  		return rewriteValueLOONG64_OpDiv64(v)
   260  	case OpDiv64F:
   261  		v.Op = OpLOONG64DIVD
   262  		return true
   263  	case OpDiv64u:
   264  		v.Op = OpLOONG64DIVVU
   265  		return true
   266  	case OpDiv8:
   267  		return rewriteValueLOONG64_OpDiv8(v)
   268  	case OpDiv8u:
   269  		return rewriteValueLOONG64_OpDiv8u(v)
   270  	case OpEq16:
   271  		return rewriteValueLOONG64_OpEq16(v)
   272  	case OpEq32:
   273  		return rewriteValueLOONG64_OpEq32(v)
   274  	case OpEq32F:
   275  		return rewriteValueLOONG64_OpEq32F(v)
   276  	case OpEq64:
   277  		return rewriteValueLOONG64_OpEq64(v)
   278  	case OpEq64F:
   279  		return rewriteValueLOONG64_OpEq64F(v)
   280  	case OpEq8:
   281  		return rewriteValueLOONG64_OpEq8(v)
   282  	case OpEqB:
   283  		return rewriteValueLOONG64_OpEqB(v)
   284  	case OpEqPtr:
   285  		return rewriteValueLOONG64_OpEqPtr(v)
   286  	case OpFMA:
   287  		v.Op = OpLOONG64FMADDD
   288  		return true
   289  	case OpGetCallerPC:
   290  		v.Op = OpLOONG64LoweredGetCallerPC
   291  		return true
   292  	case OpGetCallerSP:
   293  		v.Op = OpLOONG64LoweredGetCallerSP
   294  		return true
   295  	case OpGetClosurePtr:
   296  		v.Op = OpLOONG64LoweredGetClosurePtr
   297  		return true
   298  	case OpHmul32:
   299  		return rewriteValueLOONG64_OpHmul32(v)
   300  	case OpHmul32u:
   301  		return rewriteValueLOONG64_OpHmul32u(v)
   302  	case OpHmul64:
   303  		v.Op = OpLOONG64MULHV
   304  		return true
   305  	case OpHmul64u:
   306  		v.Op = OpLOONG64MULHVU
   307  		return true
   308  	case OpInterCall:
   309  		v.Op = OpLOONG64CALLinter
   310  		return true
   311  	case OpIsInBounds:
   312  		return rewriteValueLOONG64_OpIsInBounds(v)
   313  	case OpIsNonNil:
   314  		return rewriteValueLOONG64_OpIsNonNil(v)
   315  	case OpIsSliceInBounds:
   316  		return rewriteValueLOONG64_OpIsSliceInBounds(v)
   317  	case OpLOONG64ADDD:
   318  		return rewriteValueLOONG64_OpLOONG64ADDD(v)
   319  	case OpLOONG64ADDF:
   320  		return rewriteValueLOONG64_OpLOONG64ADDF(v)
   321  	case OpLOONG64ADDV:
   322  		return rewriteValueLOONG64_OpLOONG64ADDV(v)
   323  	case OpLOONG64ADDVconst:
   324  		return rewriteValueLOONG64_OpLOONG64ADDVconst(v)
   325  	case OpLOONG64AND:
   326  		return rewriteValueLOONG64_OpLOONG64AND(v)
   327  	case OpLOONG64ANDconst:
   328  		return rewriteValueLOONG64_OpLOONG64ANDconst(v)
   329  	case OpLOONG64DIVV:
   330  		return rewriteValueLOONG64_OpLOONG64DIVV(v)
   331  	case OpLOONG64DIVVU:
   332  		return rewriteValueLOONG64_OpLOONG64DIVVU(v)
   333  	case OpLOONG64LoweredPanicBoundsCR:
   334  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsCR(v)
   335  	case OpLOONG64LoweredPanicBoundsRC:
   336  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRC(v)
   337  	case OpLOONG64LoweredPanicBoundsRR:
   338  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRR(v)
   339  	case OpLOONG64MASKEQZ:
   340  		return rewriteValueLOONG64_OpLOONG64MASKEQZ(v)
   341  	case OpLOONG64MASKNEZ:
   342  		return rewriteValueLOONG64_OpLOONG64MASKNEZ(v)
   343  	case OpLOONG64MOVBUload:
   344  		return rewriteValueLOONG64_OpLOONG64MOVBUload(v)
   345  	case OpLOONG64MOVBUloadidx:
   346  		return rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v)
   347  	case OpLOONG64MOVBUreg:
   348  		return rewriteValueLOONG64_OpLOONG64MOVBUreg(v)
   349  	case OpLOONG64MOVBload:
   350  		return rewriteValueLOONG64_OpLOONG64MOVBload(v)
   351  	case OpLOONG64MOVBloadidx:
   352  		return rewriteValueLOONG64_OpLOONG64MOVBloadidx(v)
   353  	case OpLOONG64MOVBreg:
   354  		return rewriteValueLOONG64_OpLOONG64MOVBreg(v)
   355  	case OpLOONG64MOVBstore:
   356  		return rewriteValueLOONG64_OpLOONG64MOVBstore(v)
   357  	case OpLOONG64MOVBstoreidx:
   358  		return rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v)
   359  	case OpLOONG64MOVBstorezero:
   360  		return rewriteValueLOONG64_OpLOONG64MOVBstorezero(v)
   361  	case OpLOONG64MOVBstorezeroidx:
   362  		return rewriteValueLOONG64_OpLOONG64MOVBstorezeroidx(v)
   363  	case OpLOONG64MOVDload:
   364  		return rewriteValueLOONG64_OpLOONG64MOVDload(v)
   365  	case OpLOONG64MOVDloadidx:
   366  		return rewriteValueLOONG64_OpLOONG64MOVDloadidx(v)
   367  	case OpLOONG64MOVDstore:
   368  		return rewriteValueLOONG64_OpLOONG64MOVDstore(v)
   369  	case OpLOONG64MOVDstoreidx:
   370  		return rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v)
   371  	case OpLOONG64MOVFload:
   372  		return rewriteValueLOONG64_OpLOONG64MOVFload(v)
   373  	case OpLOONG64MOVFloadidx:
   374  		return rewriteValueLOONG64_OpLOONG64MOVFloadidx(v)
   375  	case OpLOONG64MOVFstore:
   376  		return rewriteValueLOONG64_OpLOONG64MOVFstore(v)
   377  	case OpLOONG64MOVFstoreidx:
   378  		return rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v)
   379  	case OpLOONG64MOVHUload:
   380  		return rewriteValueLOONG64_OpLOONG64MOVHUload(v)
   381  	case OpLOONG64MOVHUloadidx:
   382  		return rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v)
   383  	case OpLOONG64MOVHUreg:
   384  		return rewriteValueLOONG64_OpLOONG64MOVHUreg(v)
   385  	case OpLOONG64MOVHload:
   386  		return rewriteValueLOONG64_OpLOONG64MOVHload(v)
   387  	case OpLOONG64MOVHloadidx:
   388  		return rewriteValueLOONG64_OpLOONG64MOVHloadidx(v)
   389  	case OpLOONG64MOVHreg:
   390  		return rewriteValueLOONG64_OpLOONG64MOVHreg(v)
   391  	case OpLOONG64MOVHstore:
   392  		return rewriteValueLOONG64_OpLOONG64MOVHstore(v)
   393  	case OpLOONG64MOVHstoreidx:
   394  		return rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v)
   395  	case OpLOONG64MOVHstorezero:
   396  		return rewriteValueLOONG64_OpLOONG64MOVHstorezero(v)
   397  	case OpLOONG64MOVHstorezeroidx:
   398  		return rewriteValueLOONG64_OpLOONG64MOVHstorezeroidx(v)
   399  	case OpLOONG64MOVVload:
   400  		return rewriteValueLOONG64_OpLOONG64MOVVload(v)
   401  	case OpLOONG64MOVVloadidx:
   402  		return rewriteValueLOONG64_OpLOONG64MOVVloadidx(v)
   403  	case OpLOONG64MOVVnop:
   404  		return rewriteValueLOONG64_OpLOONG64MOVVnop(v)
   405  	case OpLOONG64MOVVreg:
   406  		return rewriteValueLOONG64_OpLOONG64MOVVreg(v)
   407  	case OpLOONG64MOVVstore:
   408  		return rewriteValueLOONG64_OpLOONG64MOVVstore(v)
   409  	case OpLOONG64MOVVstoreidx:
   410  		return rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v)
   411  	case OpLOONG64MOVVstorezero:
   412  		return rewriteValueLOONG64_OpLOONG64MOVVstorezero(v)
   413  	case OpLOONG64MOVVstorezeroidx:
   414  		return rewriteValueLOONG64_OpLOONG64MOVVstorezeroidx(v)
   415  	case OpLOONG64MOVWUload:
   416  		return rewriteValueLOONG64_OpLOONG64MOVWUload(v)
   417  	case OpLOONG64MOVWUloadidx:
   418  		return rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v)
   419  	case OpLOONG64MOVWUreg:
   420  		return rewriteValueLOONG64_OpLOONG64MOVWUreg(v)
   421  	case OpLOONG64MOVWload:
   422  		return rewriteValueLOONG64_OpLOONG64MOVWload(v)
   423  	case OpLOONG64MOVWloadidx:
   424  		return rewriteValueLOONG64_OpLOONG64MOVWloadidx(v)
   425  	case OpLOONG64MOVWreg:
   426  		return rewriteValueLOONG64_OpLOONG64MOVWreg(v)
   427  	case OpLOONG64MOVWstore:
   428  		return rewriteValueLOONG64_OpLOONG64MOVWstore(v)
   429  	case OpLOONG64MOVWstoreidx:
   430  		return rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v)
   431  	case OpLOONG64MOVWstorezero:
   432  		return rewriteValueLOONG64_OpLOONG64MOVWstorezero(v)
   433  	case OpLOONG64MOVWstorezeroidx:
   434  		return rewriteValueLOONG64_OpLOONG64MOVWstorezeroidx(v)
   435  	case OpLOONG64MULV:
   436  		return rewriteValueLOONG64_OpLOONG64MULV(v)
   437  	case OpLOONG64NEGV:
   438  		return rewriteValueLOONG64_OpLOONG64NEGV(v)
   439  	case OpLOONG64NOR:
   440  		return rewriteValueLOONG64_OpLOONG64NOR(v)
   441  	case OpLOONG64NORconst:
   442  		return rewriteValueLOONG64_OpLOONG64NORconst(v)
   443  	case OpLOONG64OR:
   444  		return rewriteValueLOONG64_OpLOONG64OR(v)
   445  	case OpLOONG64ORN:
   446  		return rewriteValueLOONG64_OpLOONG64ORN(v)
   447  	case OpLOONG64ORconst:
   448  		return rewriteValueLOONG64_OpLOONG64ORconst(v)
   449  	case OpLOONG64REMV:
   450  		return rewriteValueLOONG64_OpLOONG64REMV(v)
   451  	case OpLOONG64REMVU:
   452  		return rewriteValueLOONG64_OpLOONG64REMVU(v)
   453  	case OpLOONG64ROTR:
   454  		return rewriteValueLOONG64_OpLOONG64ROTR(v)
   455  	case OpLOONG64ROTRV:
   456  		return rewriteValueLOONG64_OpLOONG64ROTRV(v)
   457  	case OpLOONG64SGT:
   458  		return rewriteValueLOONG64_OpLOONG64SGT(v)
   459  	case OpLOONG64SGTU:
   460  		return rewriteValueLOONG64_OpLOONG64SGTU(v)
   461  	case OpLOONG64SGTUconst:
   462  		return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
   463  	case OpLOONG64SGTconst:
   464  		return rewriteValueLOONG64_OpLOONG64SGTconst(v)
   465  	case OpLOONG64SLL:
   466  		return rewriteValueLOONG64_OpLOONG64SLL(v)
   467  	case OpLOONG64SLLV:
   468  		return rewriteValueLOONG64_OpLOONG64SLLV(v)
   469  	case OpLOONG64SLLVconst:
   470  		return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
   471  	case OpLOONG64SRA:
   472  		return rewriteValueLOONG64_OpLOONG64SRA(v)
   473  	case OpLOONG64SRAV:
   474  		return rewriteValueLOONG64_OpLOONG64SRAV(v)
   475  	case OpLOONG64SRAVconst:
   476  		return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
   477  	case OpLOONG64SRL:
   478  		return rewriteValueLOONG64_OpLOONG64SRL(v)
   479  	case OpLOONG64SRLV:
   480  		return rewriteValueLOONG64_OpLOONG64SRLV(v)
   481  	case OpLOONG64SRLVconst:
   482  		return rewriteValueLOONG64_OpLOONG64SRLVconst(v)
   483  	case OpLOONG64SUBD:
   484  		return rewriteValueLOONG64_OpLOONG64SUBD(v)
   485  	case OpLOONG64SUBF:
   486  		return rewriteValueLOONG64_OpLOONG64SUBF(v)
   487  	case OpLOONG64SUBV:
   488  		return rewriteValueLOONG64_OpLOONG64SUBV(v)
   489  	case OpLOONG64SUBVconst:
   490  		return rewriteValueLOONG64_OpLOONG64SUBVconst(v)
   491  	case OpLOONG64XOR:
   492  		return rewriteValueLOONG64_OpLOONG64XOR(v)
   493  	case OpLOONG64XORconst:
   494  		return rewriteValueLOONG64_OpLOONG64XORconst(v)
   495  	case OpLeq16:
   496  		return rewriteValueLOONG64_OpLeq16(v)
   497  	case OpLeq16U:
   498  		return rewriteValueLOONG64_OpLeq16U(v)
   499  	case OpLeq32:
   500  		return rewriteValueLOONG64_OpLeq32(v)
   501  	case OpLeq32F:
   502  		return rewriteValueLOONG64_OpLeq32F(v)
   503  	case OpLeq32U:
   504  		return rewriteValueLOONG64_OpLeq32U(v)
   505  	case OpLeq64:
   506  		return rewriteValueLOONG64_OpLeq64(v)
   507  	case OpLeq64F:
   508  		return rewriteValueLOONG64_OpLeq64F(v)
   509  	case OpLeq64U:
   510  		return rewriteValueLOONG64_OpLeq64U(v)
   511  	case OpLeq8:
   512  		return rewriteValueLOONG64_OpLeq8(v)
   513  	case OpLeq8U:
   514  		return rewriteValueLOONG64_OpLeq8U(v)
   515  	case OpLess16:
   516  		return rewriteValueLOONG64_OpLess16(v)
   517  	case OpLess16U:
   518  		return rewriteValueLOONG64_OpLess16U(v)
   519  	case OpLess32:
   520  		return rewriteValueLOONG64_OpLess32(v)
   521  	case OpLess32F:
   522  		return rewriteValueLOONG64_OpLess32F(v)
   523  	case OpLess32U:
   524  		return rewriteValueLOONG64_OpLess32U(v)
   525  	case OpLess64:
   526  		return rewriteValueLOONG64_OpLess64(v)
   527  	case OpLess64F:
   528  		return rewriteValueLOONG64_OpLess64F(v)
   529  	case OpLess64U:
   530  		return rewriteValueLOONG64_OpLess64U(v)
   531  	case OpLess8:
   532  		return rewriteValueLOONG64_OpLess8(v)
   533  	case OpLess8U:
   534  		return rewriteValueLOONG64_OpLess8U(v)
   535  	case OpLoad:
   536  		return rewriteValueLOONG64_OpLoad(v)
   537  	case OpLocalAddr:
   538  		return rewriteValueLOONG64_OpLocalAddr(v)
   539  	case OpLsh16x16:
   540  		return rewriteValueLOONG64_OpLsh16x16(v)
   541  	case OpLsh16x32:
   542  		return rewriteValueLOONG64_OpLsh16x32(v)
   543  	case OpLsh16x64:
   544  		return rewriteValueLOONG64_OpLsh16x64(v)
   545  	case OpLsh16x8:
   546  		return rewriteValueLOONG64_OpLsh16x8(v)
   547  	case OpLsh32x16:
   548  		return rewriteValueLOONG64_OpLsh32x16(v)
   549  	case OpLsh32x32:
   550  		return rewriteValueLOONG64_OpLsh32x32(v)
   551  	case OpLsh32x64:
   552  		return rewriteValueLOONG64_OpLsh32x64(v)
   553  	case OpLsh32x8:
   554  		return rewriteValueLOONG64_OpLsh32x8(v)
   555  	case OpLsh64x16:
   556  		return rewriteValueLOONG64_OpLsh64x16(v)
   557  	case OpLsh64x32:
   558  		return rewriteValueLOONG64_OpLsh64x32(v)
   559  	case OpLsh64x64:
   560  		return rewriteValueLOONG64_OpLsh64x64(v)
   561  	case OpLsh64x8:
   562  		return rewriteValueLOONG64_OpLsh64x8(v)
   563  	case OpLsh8x16:
   564  		return rewriteValueLOONG64_OpLsh8x16(v)
   565  	case OpLsh8x32:
   566  		return rewriteValueLOONG64_OpLsh8x32(v)
   567  	case OpLsh8x64:
   568  		return rewriteValueLOONG64_OpLsh8x64(v)
   569  	case OpLsh8x8:
   570  		return rewriteValueLOONG64_OpLsh8x8(v)
   571  	case OpMax32F:
   572  		v.Op = OpLOONG64FMAXF
   573  		return true
   574  	case OpMax64F:
   575  		v.Op = OpLOONG64FMAXD
   576  		return true
   577  	case OpMin32F:
   578  		v.Op = OpLOONG64FMINF
   579  		return true
   580  	case OpMin64F:
   581  		v.Op = OpLOONG64FMIND
   582  		return true
   583  	case OpMod16:
   584  		return rewriteValueLOONG64_OpMod16(v)
   585  	case OpMod16u:
   586  		return rewriteValueLOONG64_OpMod16u(v)
   587  	case OpMod32:
   588  		return rewriteValueLOONG64_OpMod32(v)
   589  	case OpMod32u:
   590  		return rewriteValueLOONG64_OpMod32u(v)
   591  	case OpMod64:
   592  		return rewriteValueLOONG64_OpMod64(v)
   593  	case OpMod64u:
   594  		v.Op = OpLOONG64REMVU
   595  		return true
   596  	case OpMod8:
   597  		return rewriteValueLOONG64_OpMod8(v)
   598  	case OpMod8u:
   599  		return rewriteValueLOONG64_OpMod8u(v)
   600  	case OpMove:
   601  		return rewriteValueLOONG64_OpMove(v)
   602  	case OpMul16:
   603  		v.Op = OpLOONG64MULV
   604  		return true
   605  	case OpMul32:
   606  		v.Op = OpLOONG64MULV
   607  		return true
   608  	case OpMul32F:
   609  		v.Op = OpLOONG64MULF
   610  		return true
   611  	case OpMul64:
   612  		v.Op = OpLOONG64MULV
   613  		return true
   614  	case OpMul64F:
   615  		v.Op = OpLOONG64MULD
   616  		return true
   617  	case OpMul8:
   618  		v.Op = OpLOONG64MULV
   619  		return true
   620  	case OpNeg16:
   621  		v.Op = OpLOONG64NEGV
   622  		return true
   623  	case OpNeg32:
   624  		v.Op = OpLOONG64NEGV
   625  		return true
   626  	case OpNeg32F:
   627  		v.Op = OpLOONG64NEGF
   628  		return true
   629  	case OpNeg64:
   630  		v.Op = OpLOONG64NEGV
   631  		return true
   632  	case OpNeg64F:
   633  		v.Op = OpLOONG64NEGD
   634  		return true
   635  	case OpNeg8:
   636  		v.Op = OpLOONG64NEGV
   637  		return true
   638  	case OpNeq16:
   639  		return rewriteValueLOONG64_OpNeq16(v)
   640  	case OpNeq32:
   641  		return rewriteValueLOONG64_OpNeq32(v)
   642  	case OpNeq32F:
   643  		return rewriteValueLOONG64_OpNeq32F(v)
   644  	case OpNeq64:
   645  		return rewriteValueLOONG64_OpNeq64(v)
   646  	case OpNeq64F:
   647  		return rewriteValueLOONG64_OpNeq64F(v)
   648  	case OpNeq8:
   649  		return rewriteValueLOONG64_OpNeq8(v)
   650  	case OpNeqB:
   651  		v.Op = OpLOONG64XOR
   652  		return true
   653  	case OpNeqPtr:
   654  		return rewriteValueLOONG64_OpNeqPtr(v)
   655  	case OpNilCheck:
   656  		v.Op = OpLOONG64LoweredNilCheck
   657  		return true
   658  	case OpNot:
   659  		return rewriteValueLOONG64_OpNot(v)
   660  	case OpOffPtr:
   661  		return rewriteValueLOONG64_OpOffPtr(v)
   662  	case OpOr16:
   663  		v.Op = OpLOONG64OR
   664  		return true
   665  	case OpOr32:
   666  		v.Op = OpLOONG64OR
   667  		return true
   668  	case OpOr64:
   669  		v.Op = OpLOONG64OR
   670  		return true
   671  	case OpOr8:
   672  		v.Op = OpLOONG64OR
   673  		return true
   674  	case OpOrB:
   675  		v.Op = OpLOONG64OR
   676  		return true
   677  	case OpPanicBounds:
   678  		v.Op = OpLOONG64LoweredPanicBoundsRR
   679  		return true
   680  	case OpPopCount16:
   681  		return rewriteValueLOONG64_OpPopCount16(v)
   682  	case OpPopCount32:
   683  		return rewriteValueLOONG64_OpPopCount32(v)
   684  	case OpPopCount64:
   685  		return rewriteValueLOONG64_OpPopCount64(v)
   686  	case OpPrefetchCache:
   687  		return rewriteValueLOONG64_OpPrefetchCache(v)
   688  	case OpPrefetchCacheStreamed:
   689  		return rewriteValueLOONG64_OpPrefetchCacheStreamed(v)
   690  	case OpPubBarrier:
   691  		v.Op = OpLOONG64LoweredPubBarrier
   692  		return true
   693  	case OpRotateLeft16:
   694  		return rewriteValueLOONG64_OpRotateLeft16(v)
   695  	case OpRotateLeft32:
   696  		return rewriteValueLOONG64_OpRotateLeft32(v)
   697  	case OpRotateLeft64:
   698  		return rewriteValueLOONG64_OpRotateLeft64(v)
   699  	case OpRotateLeft8:
   700  		return rewriteValueLOONG64_OpRotateLeft8(v)
   701  	case OpRound32F:
   702  		v.Op = OpLOONG64LoweredRound32F
   703  		return true
   704  	case OpRound64F:
   705  		v.Op = OpLOONG64LoweredRound64F
   706  		return true
   707  	case OpRsh16Ux16:
   708  		return rewriteValueLOONG64_OpRsh16Ux16(v)
   709  	case OpRsh16Ux32:
   710  		return rewriteValueLOONG64_OpRsh16Ux32(v)
   711  	case OpRsh16Ux64:
   712  		return rewriteValueLOONG64_OpRsh16Ux64(v)
   713  	case OpRsh16Ux8:
   714  		return rewriteValueLOONG64_OpRsh16Ux8(v)
   715  	case OpRsh16x16:
   716  		return rewriteValueLOONG64_OpRsh16x16(v)
   717  	case OpRsh16x32:
   718  		return rewriteValueLOONG64_OpRsh16x32(v)
   719  	case OpRsh16x64:
   720  		return rewriteValueLOONG64_OpRsh16x64(v)
   721  	case OpRsh16x8:
   722  		return rewriteValueLOONG64_OpRsh16x8(v)
   723  	case OpRsh32Ux16:
   724  		return rewriteValueLOONG64_OpRsh32Ux16(v)
   725  	case OpRsh32Ux32:
   726  		return rewriteValueLOONG64_OpRsh32Ux32(v)
   727  	case OpRsh32Ux64:
   728  		return rewriteValueLOONG64_OpRsh32Ux64(v)
   729  	case OpRsh32Ux8:
   730  		return rewriteValueLOONG64_OpRsh32Ux8(v)
   731  	case OpRsh32x16:
   732  		return rewriteValueLOONG64_OpRsh32x16(v)
   733  	case OpRsh32x32:
   734  		return rewriteValueLOONG64_OpRsh32x32(v)
   735  	case OpRsh32x64:
   736  		return rewriteValueLOONG64_OpRsh32x64(v)
   737  	case OpRsh32x8:
   738  		return rewriteValueLOONG64_OpRsh32x8(v)
   739  	case OpRsh64Ux16:
   740  		return rewriteValueLOONG64_OpRsh64Ux16(v)
   741  	case OpRsh64Ux32:
   742  		return rewriteValueLOONG64_OpRsh64Ux32(v)
   743  	case OpRsh64Ux64:
   744  		return rewriteValueLOONG64_OpRsh64Ux64(v)
   745  	case OpRsh64Ux8:
   746  		return rewriteValueLOONG64_OpRsh64Ux8(v)
   747  	case OpRsh64x16:
   748  		return rewriteValueLOONG64_OpRsh64x16(v)
   749  	case OpRsh64x32:
   750  		return rewriteValueLOONG64_OpRsh64x32(v)
   751  	case OpRsh64x64:
   752  		return rewriteValueLOONG64_OpRsh64x64(v)
   753  	case OpRsh64x8:
   754  		return rewriteValueLOONG64_OpRsh64x8(v)
   755  	case OpRsh8Ux16:
   756  		return rewriteValueLOONG64_OpRsh8Ux16(v)
   757  	case OpRsh8Ux32:
   758  		return rewriteValueLOONG64_OpRsh8Ux32(v)
   759  	case OpRsh8Ux64:
   760  		return rewriteValueLOONG64_OpRsh8Ux64(v)
   761  	case OpRsh8Ux8:
   762  		return rewriteValueLOONG64_OpRsh8Ux8(v)
   763  	case OpRsh8x16:
   764  		return rewriteValueLOONG64_OpRsh8x16(v)
   765  	case OpRsh8x32:
   766  		return rewriteValueLOONG64_OpRsh8x32(v)
   767  	case OpRsh8x64:
   768  		return rewriteValueLOONG64_OpRsh8x64(v)
   769  	case OpRsh8x8:
   770  		return rewriteValueLOONG64_OpRsh8x8(v)
   771  	case OpSelect0:
   772  		return rewriteValueLOONG64_OpSelect0(v)
   773  	case OpSelect1:
   774  		return rewriteValueLOONG64_OpSelect1(v)
   775  	case OpSelectN:
   776  		return rewriteValueLOONG64_OpSelectN(v)
   777  	case OpSignExt16to32:
   778  		v.Op = OpLOONG64MOVHreg
   779  		return true
   780  	case OpSignExt16to64:
   781  		v.Op = OpLOONG64MOVHreg
   782  		return true
   783  	case OpSignExt32to64:
   784  		v.Op = OpLOONG64MOVWreg
   785  		return true
   786  	case OpSignExt8to16:
   787  		v.Op = OpLOONG64MOVBreg
   788  		return true
   789  	case OpSignExt8to32:
   790  		v.Op = OpLOONG64MOVBreg
   791  		return true
   792  	case OpSignExt8to64:
   793  		v.Op = OpLOONG64MOVBreg
   794  		return true
   795  	case OpSlicemask:
   796  		return rewriteValueLOONG64_OpSlicemask(v)
   797  	case OpSqrt:
   798  		v.Op = OpLOONG64SQRTD
   799  		return true
   800  	case OpSqrt32:
   801  		v.Op = OpLOONG64SQRTF
   802  		return true
   803  	case OpStaticCall:
   804  		v.Op = OpLOONG64CALLstatic
   805  		return true
   806  	case OpStore:
   807  		return rewriteValueLOONG64_OpStore(v)
   808  	case OpSub16:
   809  		v.Op = OpLOONG64SUBV
   810  		return true
   811  	case OpSub32:
   812  		v.Op = OpLOONG64SUBV
   813  		return true
   814  	case OpSub32F:
   815  		v.Op = OpLOONG64SUBF
   816  		return true
   817  	case OpSub64:
   818  		v.Op = OpLOONG64SUBV
   819  		return true
   820  	case OpSub64F:
   821  		v.Op = OpLOONG64SUBD
   822  		return true
   823  	case OpSub8:
   824  		v.Op = OpLOONG64SUBV
   825  		return true
   826  	case OpSubPtr:
   827  		v.Op = OpLOONG64SUBV
   828  		return true
   829  	case OpTailCall:
   830  		v.Op = OpLOONG64CALLtail
   831  		return true
   832  	case OpTrunc16to8:
   833  		v.Op = OpCopy
   834  		return true
   835  	case OpTrunc32to16:
   836  		v.Op = OpCopy
   837  		return true
   838  	case OpTrunc32to8:
   839  		v.Op = OpCopy
   840  		return true
   841  	case OpTrunc64to16:
   842  		v.Op = OpCopy
   843  		return true
   844  	case OpTrunc64to32:
   845  		v.Op = OpCopy
   846  		return true
   847  	case OpTrunc64to8:
   848  		v.Op = OpCopy
   849  		return true
   850  	case OpWB:
   851  		v.Op = OpLOONG64LoweredWB
   852  		return true
   853  	case OpXor16:
   854  		v.Op = OpLOONG64XOR
   855  		return true
   856  	case OpXor32:
   857  		v.Op = OpLOONG64XOR
   858  		return true
   859  	case OpXor64:
   860  		v.Op = OpLOONG64XOR
   861  		return true
   862  	case OpXor8:
   863  		v.Op = OpLOONG64XOR
   864  		return true
   865  	case OpZero:
   866  		return rewriteValueLOONG64_OpZero(v)
   867  	case OpZeroExt16to32:
   868  		v.Op = OpLOONG64MOVHUreg
   869  		return true
   870  	case OpZeroExt16to64:
   871  		v.Op = OpLOONG64MOVHUreg
   872  		return true
   873  	case OpZeroExt32to64:
   874  		v.Op = OpLOONG64MOVWUreg
   875  		return true
   876  	case OpZeroExt8to16:
   877  		v.Op = OpLOONG64MOVBUreg
   878  		return true
   879  	case OpZeroExt8to32:
   880  		v.Op = OpLOONG64MOVBUreg
   881  		return true
   882  	case OpZeroExt8to64:
   883  		v.Op = OpLOONG64MOVBUreg
   884  		return true
   885  	}
   886  	return false
   887  }
   888  func rewriteValueLOONG64_OpAddr(v *Value) bool {
   889  	v_0 := v.Args[0]
   890  	// match: (Addr {sym} base)
   891  	// result: (MOVVaddr {sym} base)
   892  	for {
   893  		sym := auxToSym(v.Aux)
   894  		base := v_0
   895  		v.reset(OpLOONG64MOVVaddr)
   896  		v.Aux = symToAux(sym)
   897  		v.AddArg(base)
   898  		return true
   899  	}
   900  }
   901  func rewriteValueLOONG64_OpAtomicAnd8(v *Value) bool {
   902  	v_2 := v.Args[2]
   903  	v_1 := v.Args[1]
   904  	v_0 := v.Args[0]
   905  	b := v.Block
   906  	typ := &b.Func.Config.Types
   907  	// match: (AtomicAnd8 ptr val mem)
   908  	// result: (LoweredAtomicAnd32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (NORconst [0] <typ.UInt32> (SLLV <typ.UInt32> (XORconst <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr)))) mem)
   909  	for {
   910  		ptr := v_0
   911  		val := v_1
   912  		mem := v_2
   913  		v.reset(OpLOONG64LoweredAtomicAnd32)
   914  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   915  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   916  		v1.AuxInt = int64ToAuxInt(^3)
   917  		v0.AddArg2(v1, ptr)
   918  		v2 := b.NewValue0(v.Pos, OpLOONG64NORconst, typ.UInt32)
   919  		v2.AuxInt = int64ToAuxInt(0)
   920  		v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   921  		v4 := b.NewValue0(v.Pos, OpLOONG64XORconst, typ.UInt32)
   922  		v4.AuxInt = int64ToAuxInt(0xff)
   923  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   924  		v5.AddArg(val)
   925  		v4.AddArg(v5)
   926  		v6 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   927  		v6.AuxInt = int64ToAuxInt(3)
   928  		v7 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   929  		v7.AuxInt = int64ToAuxInt(3)
   930  		v7.AddArg(ptr)
   931  		v6.AddArg(v7)
   932  		v3.AddArg2(v4, v6)
   933  		v2.AddArg(v3)
   934  		v.AddArg3(v0, v2, mem)
   935  		return true
   936  	}
   937  }
   938  func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool {
   939  	v_3 := v.Args[3]
   940  	v_2 := v.Args[2]
   941  	v_1 := v.Args[1]
   942  	v_0 := v.Args[0]
   943  	b := v.Block
   944  	typ := &b.Func.Config.Types
   945  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   946  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   947  	for {
   948  		ptr := v_0
   949  		old := v_1
   950  		new := v_2
   951  		mem := v_3
   952  		v.reset(OpLOONG64LoweredAtomicCas32)
   953  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   954  		v0.AddArg(old)
   955  		v.AddArg4(ptr, v0, new, mem)
   956  		return true
   957  	}
   958  }
   959  func rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v *Value) bool {
   960  	v_3 := v.Args[3]
   961  	v_2 := v.Args[2]
   962  	v_1 := v.Args[1]
   963  	v_0 := v.Args[0]
   964  	b := v.Block
   965  	typ := &b.Func.Config.Types
   966  	// match: (AtomicCompareAndSwap32Variant ptr old new mem)
   967  	// result: (LoweredAtomicCas32Variant ptr (SignExt32to64 old) new mem)
   968  	for {
   969  		ptr := v_0
   970  		old := v_1
   971  		new := v_2
   972  		mem := v_3
   973  		v.reset(OpLOONG64LoweredAtomicCas32Variant)
   974  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   975  		v0.AddArg(old)
   976  		v.AddArg4(ptr, v0, new, mem)
   977  		return true
   978  	}
   979  }
   980  func rewriteValueLOONG64_OpAtomicOr8(v *Value) bool {
   981  	v_2 := v.Args[2]
   982  	v_1 := v.Args[1]
   983  	v_0 := v.Args[0]
   984  	b := v.Block
   985  	typ := &b.Func.Config.Types
   986  	// match: (AtomicOr8 ptr val mem)
   987  	// result: (LoweredAtomicOr32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   988  	for {
   989  		ptr := v_0
   990  		val := v_1
   991  		mem := v_2
   992  		v.reset(OpLOONG64LoweredAtomicOr32)
   993  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   994  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   995  		v1.AuxInt = int64ToAuxInt(^3)
   996  		v0.AddArg2(v1, ptr)
   997  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   998  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   999  		v3.AddArg(val)
  1000  		v4 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
  1001  		v4.AuxInt = int64ToAuxInt(3)
  1002  		v5 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
  1003  		v5.AuxInt = int64ToAuxInt(3)
  1004  		v5.AddArg(ptr)
  1005  		v4.AddArg(v5)
  1006  		v2.AddArg2(v3, v4)
  1007  		v.AddArg3(v0, v2, mem)
  1008  		return true
  1009  	}
  1010  }
  1011  func rewriteValueLOONG64_OpAvg64u(v *Value) bool {
  1012  	v_1 := v.Args[1]
  1013  	v_0 := v.Args[0]
  1014  	b := v.Block
  1015  	// match: (Avg64u <t> x y)
  1016  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
  1017  	for {
  1018  		t := v.Type
  1019  		x := v_0
  1020  		y := v_1
  1021  		v.reset(OpLOONG64ADDV)
  1022  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t)
  1023  		v0.AuxInt = int64ToAuxInt(1)
  1024  		v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  1025  		v1.AddArg2(x, y)
  1026  		v0.AddArg(v1)
  1027  		v.AddArg2(v0, y)
  1028  		return true
  1029  	}
  1030  }
  1031  func rewriteValueLOONG64_OpBitLen16(v *Value) bool {
  1032  	v_0 := v.Args[0]
  1033  	b := v.Block
  1034  	typ := &b.Func.Config.Types
  1035  	// match: (BitLen16 x)
  1036  	// result: (BitLen64 (ZeroExt16to64 x))
  1037  	for {
  1038  		x := v_0
  1039  		v.reset(OpBitLen64)
  1040  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1041  		v0.AddArg(x)
  1042  		v.AddArg(v0)
  1043  		return true
  1044  	}
  1045  }
  1046  func rewriteValueLOONG64_OpBitLen32(v *Value) bool {
  1047  	v_0 := v.Args[0]
  1048  	b := v.Block
  1049  	// match: (BitLen32 <t> x)
  1050  	// result: (NEGV <t> (SUBVconst <t> [32] (CLZW <t> x)))
  1051  	for {
  1052  		t := v.Type
  1053  		x := v_0
  1054  		v.reset(OpLOONG64NEGV)
  1055  		v.Type = t
  1056  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1057  		v0.AuxInt = int64ToAuxInt(32)
  1058  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZW, t)
  1059  		v1.AddArg(x)
  1060  		v0.AddArg(v1)
  1061  		v.AddArg(v0)
  1062  		return true
  1063  	}
  1064  }
  1065  func rewriteValueLOONG64_OpBitLen64(v *Value) bool {
  1066  	v_0 := v.Args[0]
  1067  	b := v.Block
  1068  	// match: (BitLen64 <t> x)
  1069  	// result: (NEGV <t> (SUBVconst <t> [64] (CLZV <t> x)))
  1070  	for {
  1071  		t := v.Type
  1072  		x := v_0
  1073  		v.reset(OpLOONG64NEGV)
  1074  		v.Type = t
  1075  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1076  		v0.AuxInt = int64ToAuxInt(64)
  1077  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZV, t)
  1078  		v1.AddArg(x)
  1079  		v0.AddArg(v1)
  1080  		v.AddArg(v0)
  1081  		return true
  1082  	}
  1083  }
  1084  func rewriteValueLOONG64_OpBitLen8(v *Value) bool {
  1085  	v_0 := v.Args[0]
  1086  	b := v.Block
  1087  	typ := &b.Func.Config.Types
  1088  	// match: (BitLen8 x)
  1089  	// result: (BitLen64 (ZeroExt8to64 x))
  1090  	for {
  1091  		x := v_0
  1092  		v.reset(OpBitLen64)
  1093  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1094  		v0.AddArg(x)
  1095  		v.AddArg(v0)
  1096  		return true
  1097  	}
  1098  }
  1099  func rewriteValueLOONG64_OpBitRev16(v *Value) bool {
  1100  	v_0 := v.Args[0]
  1101  	b := v.Block
  1102  	// match: (BitRev16 <t> x)
  1103  	// result: (REVB2H (BITREV4B <t> x))
  1104  	for {
  1105  		t := v.Type
  1106  		x := v_0
  1107  		v.reset(OpLOONG64REVB2H)
  1108  		v0 := b.NewValue0(v.Pos, OpLOONG64BITREV4B, t)
  1109  		v0.AddArg(x)
  1110  		v.AddArg(v0)
  1111  		return true
  1112  	}
  1113  }
  1114  func rewriteValueLOONG64_OpCom16(v *Value) bool {
  1115  	v_0 := v.Args[0]
  1116  	b := v.Block
  1117  	typ := &b.Func.Config.Types
  1118  	// match: (Com16 x)
  1119  	// result: (NOR (MOVVconst [0]) x)
  1120  	for {
  1121  		x := v_0
  1122  		v.reset(OpLOONG64NOR)
  1123  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1124  		v0.AuxInt = int64ToAuxInt(0)
  1125  		v.AddArg2(v0, x)
  1126  		return true
  1127  	}
  1128  }
  1129  func rewriteValueLOONG64_OpCom32(v *Value) bool {
  1130  	v_0 := v.Args[0]
  1131  	b := v.Block
  1132  	typ := &b.Func.Config.Types
  1133  	// match: (Com32 x)
  1134  	// result: (NOR (MOVVconst [0]) x)
  1135  	for {
  1136  		x := v_0
  1137  		v.reset(OpLOONG64NOR)
  1138  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1139  		v0.AuxInt = int64ToAuxInt(0)
  1140  		v.AddArg2(v0, x)
  1141  		return true
  1142  	}
  1143  }
  1144  func rewriteValueLOONG64_OpCom64(v *Value) bool {
  1145  	v_0 := v.Args[0]
  1146  	b := v.Block
  1147  	typ := &b.Func.Config.Types
  1148  	// match: (Com64 x)
  1149  	// result: (NOR (MOVVconst [0]) x)
  1150  	for {
  1151  		x := v_0
  1152  		v.reset(OpLOONG64NOR)
  1153  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1154  		v0.AuxInt = int64ToAuxInt(0)
  1155  		v.AddArg2(v0, x)
  1156  		return true
  1157  	}
  1158  }
  1159  func rewriteValueLOONG64_OpCom8(v *Value) bool {
  1160  	v_0 := v.Args[0]
  1161  	b := v.Block
  1162  	typ := &b.Func.Config.Types
  1163  	// match: (Com8 x)
  1164  	// result: (NOR (MOVVconst [0]) x)
  1165  	for {
  1166  		x := v_0
  1167  		v.reset(OpLOONG64NOR)
  1168  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1169  		v0.AuxInt = int64ToAuxInt(0)
  1170  		v.AddArg2(v0, x)
  1171  		return true
  1172  	}
  1173  }
  1174  func rewriteValueLOONG64_OpCondSelect(v *Value) bool {
  1175  	v_2 := v.Args[2]
  1176  	v_1 := v.Args[1]
  1177  	v_0 := v.Args[0]
  1178  	b := v.Block
  1179  	// match: (CondSelect <t> x y cond)
  1180  	// result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond))
  1181  	for {
  1182  		t := v.Type
  1183  		x := v_0
  1184  		y := v_1
  1185  		cond := v_2
  1186  		v.reset(OpLOONG64OR)
  1187  		v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t)
  1188  		v0.AddArg2(x, cond)
  1189  		v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t)
  1190  		v1.AddArg2(y, cond)
  1191  		v.AddArg2(v0, v1)
  1192  		return true
  1193  	}
  1194  }
  1195  func rewriteValueLOONG64_OpConst16(v *Value) bool {
  1196  	// match: (Const16 [val])
  1197  	// result: (MOVVconst [int64(val)])
  1198  	for {
  1199  		val := auxIntToInt16(v.AuxInt)
  1200  		v.reset(OpLOONG64MOVVconst)
  1201  		v.AuxInt = int64ToAuxInt(int64(val))
  1202  		return true
  1203  	}
  1204  }
  1205  func rewriteValueLOONG64_OpConst32(v *Value) bool {
  1206  	// match: (Const32 [val])
  1207  	// result: (MOVVconst [int64(val)])
  1208  	for {
  1209  		val := auxIntToInt32(v.AuxInt)
  1210  		v.reset(OpLOONG64MOVVconst)
  1211  		v.AuxInt = int64ToAuxInt(int64(val))
  1212  		return true
  1213  	}
  1214  }
  1215  func rewriteValueLOONG64_OpConst32F(v *Value) bool {
  1216  	// match: (Const32F [val])
  1217  	// result: (MOVFconst [float64(val)])
  1218  	for {
  1219  		val := auxIntToFloat32(v.AuxInt)
  1220  		v.reset(OpLOONG64MOVFconst)
  1221  		v.AuxInt = float64ToAuxInt(float64(val))
  1222  		return true
  1223  	}
  1224  }
  1225  func rewriteValueLOONG64_OpConst64(v *Value) bool {
  1226  	// match: (Const64 [val])
  1227  	// result: (MOVVconst [int64(val)])
  1228  	for {
  1229  		val := auxIntToInt64(v.AuxInt)
  1230  		v.reset(OpLOONG64MOVVconst)
  1231  		v.AuxInt = int64ToAuxInt(int64(val))
  1232  		return true
  1233  	}
  1234  }
  1235  func rewriteValueLOONG64_OpConst64F(v *Value) bool {
  1236  	// match: (Const64F [val])
  1237  	// result: (MOVDconst [float64(val)])
  1238  	for {
  1239  		val := auxIntToFloat64(v.AuxInt)
  1240  		v.reset(OpLOONG64MOVDconst)
  1241  		v.AuxInt = float64ToAuxInt(float64(val))
  1242  		return true
  1243  	}
  1244  }
  1245  func rewriteValueLOONG64_OpConst8(v *Value) bool {
  1246  	// match: (Const8 [val])
  1247  	// result: (MOVVconst [int64(val)])
  1248  	for {
  1249  		val := auxIntToInt8(v.AuxInt)
  1250  		v.reset(OpLOONG64MOVVconst)
  1251  		v.AuxInt = int64ToAuxInt(int64(val))
  1252  		return true
  1253  	}
  1254  }
  1255  func rewriteValueLOONG64_OpConstBool(v *Value) bool {
  1256  	// match: (ConstBool [t])
  1257  	// result: (MOVVconst [int64(b2i(t))])
  1258  	for {
  1259  		t := auxIntToBool(v.AuxInt)
  1260  		v.reset(OpLOONG64MOVVconst)
  1261  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1262  		return true
  1263  	}
  1264  }
  1265  func rewriteValueLOONG64_OpConstNil(v *Value) bool {
  1266  	// match: (ConstNil)
  1267  	// result: (MOVVconst [0])
  1268  	for {
  1269  		v.reset(OpLOONG64MOVVconst)
  1270  		v.AuxInt = int64ToAuxInt(0)
  1271  		return true
  1272  	}
  1273  }
  1274  func rewriteValueLOONG64_OpCtz16(v *Value) bool {
  1275  	v_0 := v.Args[0]
  1276  	b := v.Block
  1277  	typ := &b.Func.Config.Types
  1278  	// match: (Ctz16 x)
  1279  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<16])))
  1280  	for {
  1281  		x := v_0
  1282  		v.reset(OpLOONG64CTZV)
  1283  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1284  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1285  		v1.AuxInt = int64ToAuxInt(1 << 16)
  1286  		v0.AddArg2(x, v1)
  1287  		v.AddArg(v0)
  1288  		return true
  1289  	}
  1290  }
  1291  func rewriteValueLOONG64_OpCtz8(v *Value) bool {
  1292  	v_0 := v.Args[0]
  1293  	b := v.Block
  1294  	typ := &b.Func.Config.Types
  1295  	// match: (Ctz8 x)
  1296  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<8])))
  1297  	for {
  1298  		x := v_0
  1299  		v.reset(OpLOONG64CTZV)
  1300  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1301  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1302  		v1.AuxInt = int64ToAuxInt(1 << 8)
  1303  		v0.AddArg2(x, v1)
  1304  		v.AddArg(v0)
  1305  		return true
  1306  	}
  1307  }
  1308  func rewriteValueLOONG64_OpDiv16(v *Value) bool {
  1309  	v_1 := v.Args[1]
  1310  	v_0 := v.Args[0]
  1311  	b := v.Block
  1312  	typ := &b.Func.Config.Types
  1313  	// match: (Div16 x y)
  1314  	// result: (DIVV (SignExt16to64 x) (SignExt16to64 y))
  1315  	for {
  1316  		x := v_0
  1317  		y := v_1
  1318  		v.reset(OpLOONG64DIVV)
  1319  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1320  		v0.AddArg(x)
  1321  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1322  		v1.AddArg(y)
  1323  		v.AddArg2(v0, v1)
  1324  		return true
  1325  	}
  1326  }
  1327  func rewriteValueLOONG64_OpDiv16u(v *Value) bool {
  1328  	v_1 := v.Args[1]
  1329  	v_0 := v.Args[0]
  1330  	b := v.Block
  1331  	typ := &b.Func.Config.Types
  1332  	// match: (Div16u x y)
  1333  	// result: (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1334  	for {
  1335  		x := v_0
  1336  		y := v_1
  1337  		v.reset(OpLOONG64DIVVU)
  1338  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1339  		v0.AddArg(x)
  1340  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1341  		v1.AddArg(y)
  1342  		v.AddArg2(v0, v1)
  1343  		return true
  1344  	}
  1345  }
  1346  func rewriteValueLOONG64_OpDiv32(v *Value) bool {
  1347  	v_1 := v.Args[1]
  1348  	v_0 := v.Args[0]
  1349  	b := v.Block
  1350  	typ := &b.Func.Config.Types
  1351  	// match: (Div32 x y)
  1352  	// result: (DIVV (SignExt32to64 x) (SignExt32to64 y))
  1353  	for {
  1354  		x := v_0
  1355  		y := v_1
  1356  		v.reset(OpLOONG64DIVV)
  1357  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1358  		v0.AddArg(x)
  1359  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1360  		v1.AddArg(y)
  1361  		v.AddArg2(v0, v1)
  1362  		return true
  1363  	}
  1364  }
  1365  func rewriteValueLOONG64_OpDiv32u(v *Value) bool {
  1366  	v_1 := v.Args[1]
  1367  	v_0 := v.Args[0]
  1368  	b := v.Block
  1369  	typ := &b.Func.Config.Types
  1370  	// match: (Div32u x y)
  1371  	// result: (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1372  	for {
  1373  		x := v_0
  1374  		y := v_1
  1375  		v.reset(OpLOONG64DIVVU)
  1376  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1377  		v0.AddArg(x)
  1378  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1379  		v1.AddArg(y)
  1380  		v.AddArg2(v0, v1)
  1381  		return true
  1382  	}
  1383  }
  1384  func rewriteValueLOONG64_OpDiv64(v *Value) bool {
  1385  	v_1 := v.Args[1]
  1386  	v_0 := v.Args[0]
  1387  	// match: (Div64 x y)
  1388  	// result: (DIVV x y)
  1389  	for {
  1390  		x := v_0
  1391  		y := v_1
  1392  		v.reset(OpLOONG64DIVV)
  1393  		v.AddArg2(x, y)
  1394  		return true
  1395  	}
  1396  }
  1397  func rewriteValueLOONG64_OpDiv8(v *Value) bool {
  1398  	v_1 := v.Args[1]
  1399  	v_0 := v.Args[0]
  1400  	b := v.Block
  1401  	typ := &b.Func.Config.Types
  1402  	// match: (Div8 x y)
  1403  	// result: (DIVV (SignExt8to64 x) (SignExt8to64 y))
  1404  	for {
  1405  		x := v_0
  1406  		y := v_1
  1407  		v.reset(OpLOONG64DIVV)
  1408  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1409  		v0.AddArg(x)
  1410  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1411  		v1.AddArg(y)
  1412  		v.AddArg2(v0, v1)
  1413  		return true
  1414  	}
  1415  }
  1416  func rewriteValueLOONG64_OpDiv8u(v *Value) bool {
  1417  	v_1 := v.Args[1]
  1418  	v_0 := v.Args[0]
  1419  	b := v.Block
  1420  	typ := &b.Func.Config.Types
  1421  	// match: (Div8u x y)
  1422  	// result: (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1423  	for {
  1424  		x := v_0
  1425  		y := v_1
  1426  		v.reset(OpLOONG64DIVVU)
  1427  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1428  		v0.AddArg(x)
  1429  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1430  		v1.AddArg(y)
  1431  		v.AddArg2(v0, v1)
  1432  		return true
  1433  	}
  1434  }
  1435  func rewriteValueLOONG64_OpEq16(v *Value) bool {
  1436  	v_1 := v.Args[1]
  1437  	v_0 := v.Args[0]
  1438  	b := v.Block
  1439  	typ := &b.Func.Config.Types
  1440  	// match: (Eq16 x y)
  1441  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1442  	for {
  1443  		x := v_0
  1444  		y := v_1
  1445  		v.reset(OpLOONG64SGTU)
  1446  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1447  		v0.AuxInt = int64ToAuxInt(1)
  1448  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1449  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1450  		v2.AddArg(x)
  1451  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1452  		v3.AddArg(y)
  1453  		v1.AddArg2(v2, v3)
  1454  		v.AddArg2(v0, v1)
  1455  		return true
  1456  	}
  1457  }
  1458  func rewriteValueLOONG64_OpEq32(v *Value) bool {
  1459  	v_1 := v.Args[1]
  1460  	v_0 := v.Args[0]
  1461  	b := v.Block
  1462  	typ := &b.Func.Config.Types
  1463  	// match: (Eq32 x y)
  1464  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1465  	for {
  1466  		x := v_0
  1467  		y := v_1
  1468  		v.reset(OpLOONG64SGTU)
  1469  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1470  		v0.AuxInt = int64ToAuxInt(1)
  1471  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1472  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1473  		v2.AddArg(x)
  1474  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1475  		v3.AddArg(y)
  1476  		v1.AddArg2(v2, v3)
  1477  		v.AddArg2(v0, v1)
  1478  		return true
  1479  	}
  1480  }
  1481  func rewriteValueLOONG64_OpEq32F(v *Value) bool {
  1482  	v_1 := v.Args[1]
  1483  	v_0 := v.Args[0]
  1484  	b := v.Block
  1485  	// match: (Eq32F x y)
  1486  	// result: (FPFlagTrue (CMPEQF x y))
  1487  	for {
  1488  		x := v_0
  1489  		y := v_1
  1490  		v.reset(OpLOONG64FPFlagTrue)
  1491  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  1492  		v0.AddArg2(x, y)
  1493  		v.AddArg(v0)
  1494  		return true
  1495  	}
  1496  }
  1497  func rewriteValueLOONG64_OpEq64(v *Value) bool {
  1498  	v_1 := v.Args[1]
  1499  	v_0 := v.Args[0]
  1500  	b := v.Block
  1501  	typ := &b.Func.Config.Types
  1502  	// match: (Eq64 x y)
  1503  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1504  	for {
  1505  		x := v_0
  1506  		y := v_1
  1507  		v.reset(OpLOONG64SGTU)
  1508  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1509  		v0.AuxInt = int64ToAuxInt(1)
  1510  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1511  		v1.AddArg2(x, y)
  1512  		v.AddArg2(v0, v1)
  1513  		return true
  1514  	}
  1515  }
  1516  func rewriteValueLOONG64_OpEq64F(v *Value) bool {
  1517  	v_1 := v.Args[1]
  1518  	v_0 := v.Args[0]
  1519  	b := v.Block
  1520  	// match: (Eq64F x y)
  1521  	// result: (FPFlagTrue (CMPEQD x y))
  1522  	for {
  1523  		x := v_0
  1524  		y := v_1
  1525  		v.reset(OpLOONG64FPFlagTrue)
  1526  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  1527  		v0.AddArg2(x, y)
  1528  		v.AddArg(v0)
  1529  		return true
  1530  	}
  1531  }
  1532  func rewriteValueLOONG64_OpEq8(v *Value) bool {
  1533  	v_1 := v.Args[1]
  1534  	v_0 := v.Args[0]
  1535  	b := v.Block
  1536  	typ := &b.Func.Config.Types
  1537  	// match: (Eq8 x y)
  1538  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1539  	for {
  1540  		x := v_0
  1541  		y := v_1
  1542  		v.reset(OpLOONG64SGTU)
  1543  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1544  		v0.AuxInt = int64ToAuxInt(1)
  1545  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1546  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1547  		v2.AddArg(x)
  1548  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1549  		v3.AddArg(y)
  1550  		v1.AddArg2(v2, v3)
  1551  		v.AddArg2(v0, v1)
  1552  		return true
  1553  	}
  1554  }
  1555  func rewriteValueLOONG64_OpEqB(v *Value) bool {
  1556  	v_1 := v.Args[1]
  1557  	v_0 := v.Args[0]
  1558  	b := v.Block
  1559  	typ := &b.Func.Config.Types
  1560  	// match: (EqB x y)
  1561  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1562  	for {
  1563  		x := v_0
  1564  		y := v_1
  1565  		v.reset(OpLOONG64XOR)
  1566  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1567  		v0.AuxInt = int64ToAuxInt(1)
  1568  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool)
  1569  		v1.AddArg2(x, y)
  1570  		v.AddArg2(v0, v1)
  1571  		return true
  1572  	}
  1573  }
  1574  func rewriteValueLOONG64_OpEqPtr(v *Value) bool {
  1575  	v_1 := v.Args[1]
  1576  	v_0 := v.Args[0]
  1577  	b := v.Block
  1578  	typ := &b.Func.Config.Types
  1579  	// match: (EqPtr x y)
  1580  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1581  	for {
  1582  		x := v_0
  1583  		y := v_1
  1584  		v.reset(OpLOONG64SGTU)
  1585  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1586  		v0.AuxInt = int64ToAuxInt(1)
  1587  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1588  		v1.AddArg2(x, y)
  1589  		v.AddArg2(v0, v1)
  1590  		return true
  1591  	}
  1592  }
  1593  func rewriteValueLOONG64_OpHmul32(v *Value) bool {
  1594  	v_1 := v.Args[1]
  1595  	v_0 := v.Args[0]
  1596  	b := v.Block
  1597  	typ := &b.Func.Config.Types
  1598  	// match: (Hmul32 x y)
  1599  	// result: (SRAVconst (MULV (SignExt32to64 x) (SignExt32to64 y)) [32])
  1600  	for {
  1601  		x := v_0
  1602  		y := v_1
  1603  		v.reset(OpLOONG64SRAVconst)
  1604  		v.AuxInt = int64ToAuxInt(32)
  1605  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1606  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1607  		v1.AddArg(x)
  1608  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1609  		v2.AddArg(y)
  1610  		v0.AddArg2(v1, v2)
  1611  		v.AddArg(v0)
  1612  		return true
  1613  	}
  1614  }
  1615  func rewriteValueLOONG64_OpHmul32u(v *Value) bool {
  1616  	v_1 := v.Args[1]
  1617  	v_0 := v.Args[0]
  1618  	b := v.Block
  1619  	typ := &b.Func.Config.Types
  1620  	// match: (Hmul32u x y)
  1621  	// result: (SRLVconst (MULV (ZeroExt32to64 x) (ZeroExt32to64 y)) [32])
  1622  	for {
  1623  		x := v_0
  1624  		y := v_1
  1625  		v.reset(OpLOONG64SRLVconst)
  1626  		v.AuxInt = int64ToAuxInt(32)
  1627  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1628  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1629  		v1.AddArg(x)
  1630  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1631  		v2.AddArg(y)
  1632  		v0.AddArg2(v1, v2)
  1633  		v.AddArg(v0)
  1634  		return true
  1635  	}
  1636  }
  1637  func rewriteValueLOONG64_OpIsInBounds(v *Value) bool {
  1638  	v_1 := v.Args[1]
  1639  	v_0 := v.Args[0]
  1640  	// match: (IsInBounds idx len)
  1641  	// result: (SGTU len idx)
  1642  	for {
  1643  		idx := v_0
  1644  		len := v_1
  1645  		v.reset(OpLOONG64SGTU)
  1646  		v.AddArg2(len, idx)
  1647  		return true
  1648  	}
  1649  }
  1650  func rewriteValueLOONG64_OpIsNonNil(v *Value) bool {
  1651  	v_0 := v.Args[0]
  1652  	b := v.Block
  1653  	typ := &b.Func.Config.Types
  1654  	// match: (IsNonNil ptr)
  1655  	// result: (SGTU ptr (MOVVconst [0]))
  1656  	for {
  1657  		ptr := v_0
  1658  		v.reset(OpLOONG64SGTU)
  1659  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1660  		v0.AuxInt = int64ToAuxInt(0)
  1661  		v.AddArg2(ptr, v0)
  1662  		return true
  1663  	}
  1664  }
  1665  func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool {
  1666  	v_1 := v.Args[1]
  1667  	v_0 := v.Args[0]
  1668  	b := v.Block
  1669  	typ := &b.Func.Config.Types
  1670  	// match: (IsSliceInBounds idx len)
  1671  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1672  	for {
  1673  		idx := v_0
  1674  		len := v_1
  1675  		v.reset(OpLOONG64XOR)
  1676  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1677  		v0.AuxInt = int64ToAuxInt(1)
  1678  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  1679  		v1.AddArg2(idx, len)
  1680  		v.AddArg2(v0, v1)
  1681  		return true
  1682  	}
  1683  }
  1684  func rewriteValueLOONG64_OpLOONG64ADDD(v *Value) bool {
  1685  	v_1 := v.Args[1]
  1686  	v_0 := v.Args[0]
  1687  	// match: (ADDD (MULD x y) z)
  1688  	// cond: z.Block.Func.useFMA(v)
  1689  	// result: (FMADDD x y z)
  1690  	for {
  1691  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1692  			if v_0.Op != OpLOONG64MULD {
  1693  				continue
  1694  			}
  1695  			y := v_0.Args[1]
  1696  			x := v_0.Args[0]
  1697  			z := v_1
  1698  			if !(z.Block.Func.useFMA(v)) {
  1699  				continue
  1700  			}
  1701  			v.reset(OpLOONG64FMADDD)
  1702  			v.AddArg3(x, y, z)
  1703  			return true
  1704  		}
  1705  		break
  1706  	}
  1707  	// match: (ADDD z (NEGD (MULD x y)))
  1708  	// cond: z.Block.Func.useFMA(v)
  1709  	// result: (FNMSUBD x y z)
  1710  	for {
  1711  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1712  			z := v_0
  1713  			if v_1.Op != OpLOONG64NEGD {
  1714  				continue
  1715  			}
  1716  			v_1_0 := v_1.Args[0]
  1717  			if v_1_0.Op != OpLOONG64MULD {
  1718  				continue
  1719  			}
  1720  			y := v_1_0.Args[1]
  1721  			x := v_1_0.Args[0]
  1722  			if !(z.Block.Func.useFMA(v)) {
  1723  				continue
  1724  			}
  1725  			v.reset(OpLOONG64FNMSUBD)
  1726  			v.AddArg3(x, y, z)
  1727  			return true
  1728  		}
  1729  		break
  1730  	}
  1731  	return false
  1732  }
  1733  func rewriteValueLOONG64_OpLOONG64ADDF(v *Value) bool {
  1734  	v_1 := v.Args[1]
  1735  	v_0 := v.Args[0]
  1736  	// match: (ADDF (MULF x y) z)
  1737  	// cond: z.Block.Func.useFMA(v)
  1738  	// result: (FMADDF x y z)
  1739  	for {
  1740  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1741  			if v_0.Op != OpLOONG64MULF {
  1742  				continue
  1743  			}
  1744  			y := v_0.Args[1]
  1745  			x := v_0.Args[0]
  1746  			z := v_1
  1747  			if !(z.Block.Func.useFMA(v)) {
  1748  				continue
  1749  			}
  1750  			v.reset(OpLOONG64FMADDF)
  1751  			v.AddArg3(x, y, z)
  1752  			return true
  1753  		}
  1754  		break
  1755  	}
  1756  	// match: (ADDF z (NEGF (MULF x y)))
  1757  	// cond: z.Block.Func.useFMA(v)
  1758  	// result: (FNMSUBF x y z)
  1759  	for {
  1760  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1761  			z := v_0
  1762  			if v_1.Op != OpLOONG64NEGF {
  1763  				continue
  1764  			}
  1765  			v_1_0 := v_1.Args[0]
  1766  			if v_1_0.Op != OpLOONG64MULF {
  1767  				continue
  1768  			}
  1769  			y := v_1_0.Args[1]
  1770  			x := v_1_0.Args[0]
  1771  			if !(z.Block.Func.useFMA(v)) {
  1772  				continue
  1773  			}
  1774  			v.reset(OpLOONG64FNMSUBF)
  1775  			v.AddArg3(x, y, z)
  1776  			return true
  1777  		}
  1778  		break
  1779  	}
  1780  	return false
  1781  }
  1782  func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool {
  1783  	v_1 := v.Args[1]
  1784  	v_0 := v.Args[0]
  1785  	// match: (ADDV x (MOVVconst <t> [c]))
  1786  	// cond: is32Bit(c) && !t.IsPtr()
  1787  	// result: (ADDVconst [c] x)
  1788  	for {
  1789  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1790  			x := v_0
  1791  			if v_1.Op != OpLOONG64MOVVconst {
  1792  				continue
  1793  			}
  1794  			t := v_1.Type
  1795  			c := auxIntToInt64(v_1.AuxInt)
  1796  			if !(is32Bit(c) && !t.IsPtr()) {
  1797  				continue
  1798  			}
  1799  			v.reset(OpLOONG64ADDVconst)
  1800  			v.AuxInt = int64ToAuxInt(c)
  1801  			v.AddArg(x)
  1802  			return true
  1803  		}
  1804  		break
  1805  	}
  1806  	// match: (ADDV x (NEGV y))
  1807  	// result: (SUBV x y)
  1808  	for {
  1809  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1810  			x := v_0
  1811  			if v_1.Op != OpLOONG64NEGV {
  1812  				continue
  1813  			}
  1814  			y := v_1.Args[0]
  1815  			v.reset(OpLOONG64SUBV)
  1816  			v.AddArg2(x, y)
  1817  			return true
  1818  		}
  1819  		break
  1820  	}
  1821  	return false
  1822  }
  1823  func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool {
  1824  	v_0 := v.Args[0]
  1825  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  1826  	// cond: is32Bit(off1+int64(off2))
  1827  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  1828  	for {
  1829  		off1 := auxIntToInt64(v.AuxInt)
  1830  		if v_0.Op != OpLOONG64MOVVaddr {
  1831  			break
  1832  		}
  1833  		off2 := auxIntToInt32(v_0.AuxInt)
  1834  		sym := auxToSym(v_0.Aux)
  1835  		ptr := v_0.Args[0]
  1836  		if !(is32Bit(off1 + int64(off2))) {
  1837  			break
  1838  		}
  1839  		v.reset(OpLOONG64MOVVaddr)
  1840  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  1841  		v.Aux = symToAux(sym)
  1842  		v.AddArg(ptr)
  1843  		return true
  1844  	}
  1845  	// match: (ADDVconst [0] x)
  1846  	// result: x
  1847  	for {
  1848  		if auxIntToInt64(v.AuxInt) != 0 {
  1849  			break
  1850  		}
  1851  		x := v_0
  1852  		v.copyOf(x)
  1853  		return true
  1854  	}
  1855  	// match: (ADDVconst [c] (MOVVconst [d]))
  1856  	// result: (MOVVconst [c+d])
  1857  	for {
  1858  		c := auxIntToInt64(v.AuxInt)
  1859  		if v_0.Op != OpLOONG64MOVVconst {
  1860  			break
  1861  		}
  1862  		d := auxIntToInt64(v_0.AuxInt)
  1863  		v.reset(OpLOONG64MOVVconst)
  1864  		v.AuxInt = int64ToAuxInt(c + d)
  1865  		return true
  1866  	}
  1867  	// match: (ADDVconst [c] (ADDVconst [d] x))
  1868  	// cond: is32Bit(c+d)
  1869  	// result: (ADDVconst [c+d] x)
  1870  	for {
  1871  		c := auxIntToInt64(v.AuxInt)
  1872  		if v_0.Op != OpLOONG64ADDVconst {
  1873  			break
  1874  		}
  1875  		d := auxIntToInt64(v_0.AuxInt)
  1876  		x := v_0.Args[0]
  1877  		if !(is32Bit(c + d)) {
  1878  			break
  1879  		}
  1880  		v.reset(OpLOONG64ADDVconst)
  1881  		v.AuxInt = int64ToAuxInt(c + d)
  1882  		v.AddArg(x)
  1883  		return true
  1884  	}
  1885  	// match: (ADDVconst [c] (SUBVconst [d] x))
  1886  	// cond: is32Bit(c-d)
  1887  	// result: (ADDVconst [c-d] x)
  1888  	for {
  1889  		c := auxIntToInt64(v.AuxInt)
  1890  		if v_0.Op != OpLOONG64SUBVconst {
  1891  			break
  1892  		}
  1893  		d := auxIntToInt64(v_0.AuxInt)
  1894  		x := v_0.Args[0]
  1895  		if !(is32Bit(c - d)) {
  1896  			break
  1897  		}
  1898  		v.reset(OpLOONG64ADDVconst)
  1899  		v.AuxInt = int64ToAuxInt(c - d)
  1900  		v.AddArg(x)
  1901  		return true
  1902  	}
  1903  	return false
  1904  }
  1905  func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool {
  1906  	v_1 := v.Args[1]
  1907  	v_0 := v.Args[0]
  1908  	// match: (AND x (MOVVconst [c]))
  1909  	// cond: is32Bit(c)
  1910  	// result: (ANDconst [c] x)
  1911  	for {
  1912  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1913  			x := v_0
  1914  			if v_1.Op != OpLOONG64MOVVconst {
  1915  				continue
  1916  			}
  1917  			c := auxIntToInt64(v_1.AuxInt)
  1918  			if !(is32Bit(c)) {
  1919  				continue
  1920  			}
  1921  			v.reset(OpLOONG64ANDconst)
  1922  			v.AuxInt = int64ToAuxInt(c)
  1923  			v.AddArg(x)
  1924  			return true
  1925  		}
  1926  		break
  1927  	}
  1928  	// match: (AND x x)
  1929  	// result: x
  1930  	for {
  1931  		x := v_0
  1932  		if x != v_1 {
  1933  			break
  1934  		}
  1935  		v.copyOf(x)
  1936  		return true
  1937  	}
  1938  	// match: (AND x (NORconst [0] y))
  1939  	// result: (ANDN x y)
  1940  	for {
  1941  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1942  			x := v_0
  1943  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  1944  				continue
  1945  			}
  1946  			y := v_1.Args[0]
  1947  			v.reset(OpLOONG64ANDN)
  1948  			v.AddArg2(x, y)
  1949  			return true
  1950  		}
  1951  		break
  1952  	}
  1953  	return false
  1954  }
  1955  func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool {
  1956  	v_0 := v.Args[0]
  1957  	// match: (ANDconst [0] _)
  1958  	// result: (MOVVconst [0])
  1959  	for {
  1960  		if auxIntToInt64(v.AuxInt) != 0 {
  1961  			break
  1962  		}
  1963  		v.reset(OpLOONG64MOVVconst)
  1964  		v.AuxInt = int64ToAuxInt(0)
  1965  		return true
  1966  	}
  1967  	// match: (ANDconst [-1] x)
  1968  	// result: x
  1969  	for {
  1970  		if auxIntToInt64(v.AuxInt) != -1 {
  1971  			break
  1972  		}
  1973  		x := v_0
  1974  		v.copyOf(x)
  1975  		return true
  1976  	}
  1977  	// match: (ANDconst [c] (MOVVconst [d]))
  1978  	// result: (MOVVconst [c&d])
  1979  	for {
  1980  		c := auxIntToInt64(v.AuxInt)
  1981  		if v_0.Op != OpLOONG64MOVVconst {
  1982  			break
  1983  		}
  1984  		d := auxIntToInt64(v_0.AuxInt)
  1985  		v.reset(OpLOONG64MOVVconst)
  1986  		v.AuxInt = int64ToAuxInt(c & d)
  1987  		return true
  1988  	}
  1989  	// match: (ANDconst [c] (ANDconst [d] x))
  1990  	// result: (ANDconst [c&d] x)
  1991  	for {
  1992  		c := auxIntToInt64(v.AuxInt)
  1993  		if v_0.Op != OpLOONG64ANDconst {
  1994  			break
  1995  		}
  1996  		d := auxIntToInt64(v_0.AuxInt)
  1997  		x := v_0.Args[0]
  1998  		v.reset(OpLOONG64ANDconst)
  1999  		v.AuxInt = int64ToAuxInt(c & d)
  2000  		v.AddArg(x)
  2001  		return true
  2002  	}
  2003  	return false
  2004  }
  2005  func rewriteValueLOONG64_OpLOONG64DIVV(v *Value) bool {
  2006  	v_1 := v.Args[1]
  2007  	v_0 := v.Args[0]
  2008  	// match: (DIVV (MOVVconst [c]) (MOVVconst [d]))
  2009  	// cond: d != 0
  2010  	// result: (MOVVconst [c/d])
  2011  	for {
  2012  		if v_0.Op != OpLOONG64MOVVconst {
  2013  			break
  2014  		}
  2015  		c := auxIntToInt64(v_0.AuxInt)
  2016  		if v_1.Op != OpLOONG64MOVVconst {
  2017  			break
  2018  		}
  2019  		d := auxIntToInt64(v_1.AuxInt)
  2020  		if !(d != 0) {
  2021  			break
  2022  		}
  2023  		v.reset(OpLOONG64MOVVconst)
  2024  		v.AuxInt = int64ToAuxInt(c / d)
  2025  		return true
  2026  	}
  2027  	return false
  2028  }
  2029  func rewriteValueLOONG64_OpLOONG64DIVVU(v *Value) bool {
  2030  	v_1 := v.Args[1]
  2031  	v_0 := v.Args[0]
  2032  	// match: (DIVVU x (MOVVconst [1]))
  2033  	// result: x
  2034  	for {
  2035  		x := v_0
  2036  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  2037  			break
  2038  		}
  2039  		v.copyOf(x)
  2040  		return true
  2041  	}
  2042  	// match: (DIVVU x (MOVVconst [c]))
  2043  	// cond: isPowerOfTwo(c)
  2044  	// result: (SRLVconst [log64(c)] x)
  2045  	for {
  2046  		x := v_0
  2047  		if v_1.Op != OpLOONG64MOVVconst {
  2048  			break
  2049  		}
  2050  		c := auxIntToInt64(v_1.AuxInt)
  2051  		if !(isPowerOfTwo(c)) {
  2052  			break
  2053  		}
  2054  		v.reset(OpLOONG64SRLVconst)
  2055  		v.AuxInt = int64ToAuxInt(log64(c))
  2056  		v.AddArg(x)
  2057  		return true
  2058  	}
  2059  	// match: (DIVVU (MOVVconst [c]) (MOVVconst [d]))
  2060  	// cond: d != 0
  2061  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  2062  	for {
  2063  		if v_0.Op != OpLOONG64MOVVconst {
  2064  			break
  2065  		}
  2066  		c := auxIntToInt64(v_0.AuxInt)
  2067  		if v_1.Op != OpLOONG64MOVVconst {
  2068  			break
  2069  		}
  2070  		d := auxIntToInt64(v_1.AuxInt)
  2071  		if !(d != 0) {
  2072  			break
  2073  		}
  2074  		v.reset(OpLOONG64MOVVconst)
  2075  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  2076  		return true
  2077  	}
  2078  	return false
  2079  }
  2080  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsCR(v *Value) bool {
  2081  	v_1 := v.Args[1]
  2082  	v_0 := v.Args[0]
  2083  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVVconst [c]) mem)
  2084  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  2085  	for {
  2086  		kind := auxIntToInt64(v.AuxInt)
  2087  		p := auxToPanicBoundsC(v.Aux)
  2088  		if v_0.Op != OpLOONG64MOVVconst {
  2089  			break
  2090  		}
  2091  		c := auxIntToInt64(v_0.AuxInt)
  2092  		mem := v_1
  2093  		v.reset(OpLOONG64LoweredPanicBoundsCC)
  2094  		v.AuxInt = int64ToAuxInt(kind)
  2095  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  2096  		v.AddArg(mem)
  2097  		return true
  2098  	}
  2099  	return false
  2100  }
  2101  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRC(v *Value) bool {
  2102  	v_1 := v.Args[1]
  2103  	v_0 := v.Args[0]
  2104  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVVconst [c]) mem)
  2105  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  2106  	for {
  2107  		kind := auxIntToInt64(v.AuxInt)
  2108  		p := auxToPanicBoundsC(v.Aux)
  2109  		if v_0.Op != OpLOONG64MOVVconst {
  2110  			break
  2111  		}
  2112  		c := auxIntToInt64(v_0.AuxInt)
  2113  		mem := v_1
  2114  		v.reset(OpLOONG64LoweredPanicBoundsCC)
  2115  		v.AuxInt = int64ToAuxInt(kind)
  2116  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  2117  		v.AddArg(mem)
  2118  		return true
  2119  	}
  2120  	return false
  2121  }
  2122  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRR(v *Value) bool {
  2123  	v_2 := v.Args[2]
  2124  	v_1 := v.Args[1]
  2125  	v_0 := v.Args[0]
  2126  	// match: (LoweredPanicBoundsRR [kind] x (MOVVconst [c]) mem)
  2127  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  2128  	for {
  2129  		kind := auxIntToInt64(v.AuxInt)
  2130  		x := v_0
  2131  		if v_1.Op != OpLOONG64MOVVconst {
  2132  			break
  2133  		}
  2134  		c := auxIntToInt64(v_1.AuxInt)
  2135  		mem := v_2
  2136  		v.reset(OpLOONG64LoweredPanicBoundsRC)
  2137  		v.AuxInt = int64ToAuxInt(kind)
  2138  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2139  		v.AddArg2(x, mem)
  2140  		return true
  2141  	}
  2142  	// match: (LoweredPanicBoundsRR [kind] (MOVVconst [c]) y mem)
  2143  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  2144  	for {
  2145  		kind := auxIntToInt64(v.AuxInt)
  2146  		if v_0.Op != OpLOONG64MOVVconst {
  2147  			break
  2148  		}
  2149  		c := auxIntToInt64(v_0.AuxInt)
  2150  		y := v_1
  2151  		mem := v_2
  2152  		v.reset(OpLOONG64LoweredPanicBoundsCR)
  2153  		v.AuxInt = int64ToAuxInt(kind)
  2154  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2155  		v.AddArg2(y, mem)
  2156  		return true
  2157  	}
  2158  	return false
  2159  }
  2160  func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool {
  2161  	v_1 := v.Args[1]
  2162  	v_0 := v.Args[0]
  2163  	// match: (MASKEQZ (MOVVconst [0]) cond)
  2164  	// result: (MOVVconst [0])
  2165  	for {
  2166  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2167  			break
  2168  		}
  2169  		v.reset(OpLOONG64MOVVconst)
  2170  		v.AuxInt = int64ToAuxInt(0)
  2171  		return true
  2172  	}
  2173  	// match: (MASKEQZ x (MOVVconst [c]))
  2174  	// cond: c == 0
  2175  	// result: (MOVVconst [0])
  2176  	for {
  2177  		if v_1.Op != OpLOONG64MOVVconst {
  2178  			break
  2179  		}
  2180  		c := auxIntToInt64(v_1.AuxInt)
  2181  		if !(c == 0) {
  2182  			break
  2183  		}
  2184  		v.reset(OpLOONG64MOVVconst)
  2185  		v.AuxInt = int64ToAuxInt(0)
  2186  		return true
  2187  	}
  2188  	// match: (MASKEQZ x (MOVVconst [c]))
  2189  	// cond: c != 0
  2190  	// result: x
  2191  	for {
  2192  		x := v_0
  2193  		if v_1.Op != OpLOONG64MOVVconst {
  2194  			break
  2195  		}
  2196  		c := auxIntToInt64(v_1.AuxInt)
  2197  		if !(c != 0) {
  2198  			break
  2199  		}
  2200  		v.copyOf(x)
  2201  		return true
  2202  	}
  2203  	return false
  2204  }
  2205  func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool {
  2206  	v_0 := v.Args[0]
  2207  	// match: (MASKNEZ (MOVVconst [0]) cond)
  2208  	// result: (MOVVconst [0])
  2209  	for {
  2210  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2211  			break
  2212  		}
  2213  		v.reset(OpLOONG64MOVVconst)
  2214  		v.AuxInt = int64ToAuxInt(0)
  2215  		return true
  2216  	}
  2217  	return false
  2218  }
  2219  func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
  2220  	v_1 := v.Args[1]
  2221  	v_0 := v.Args[0]
  2222  	b := v.Block
  2223  	config := b.Func.Config
  2224  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2225  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2226  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2227  	for {
  2228  		off1 := auxIntToInt32(v.AuxInt)
  2229  		sym := auxToSym(v.Aux)
  2230  		if v_0.Op != OpLOONG64ADDVconst {
  2231  			break
  2232  		}
  2233  		off2 := auxIntToInt64(v_0.AuxInt)
  2234  		ptr := v_0.Args[0]
  2235  		mem := v_1
  2236  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2237  			break
  2238  		}
  2239  		v.reset(OpLOONG64MOVBUload)
  2240  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2241  		v.Aux = symToAux(sym)
  2242  		v.AddArg2(ptr, mem)
  2243  		return true
  2244  	}
  2245  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2246  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2247  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2248  	for {
  2249  		off1 := auxIntToInt32(v.AuxInt)
  2250  		sym1 := auxToSym(v.Aux)
  2251  		if v_0.Op != OpLOONG64MOVVaddr {
  2252  			break
  2253  		}
  2254  		off2 := auxIntToInt32(v_0.AuxInt)
  2255  		sym2 := auxToSym(v_0.Aux)
  2256  		ptr := v_0.Args[0]
  2257  		mem := v_1
  2258  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2259  			break
  2260  		}
  2261  		v.reset(OpLOONG64MOVBUload)
  2262  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2263  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2264  		v.AddArg2(ptr, mem)
  2265  		return true
  2266  	}
  2267  	// match: (MOVBUload [off] {sym} (ADDV ptr idx) mem)
  2268  	// cond: off == 0 && sym == nil
  2269  	// result: (MOVBUloadidx ptr idx mem)
  2270  	for {
  2271  		off := auxIntToInt32(v.AuxInt)
  2272  		sym := auxToSym(v.Aux)
  2273  		if v_0.Op != OpLOONG64ADDV {
  2274  			break
  2275  		}
  2276  		idx := v_0.Args[1]
  2277  		ptr := v_0.Args[0]
  2278  		mem := v_1
  2279  		if !(off == 0 && sym == nil) {
  2280  			break
  2281  		}
  2282  		v.reset(OpLOONG64MOVBUloadidx)
  2283  		v.AddArg3(ptr, idx, mem)
  2284  		return true
  2285  	}
  2286  	return false
  2287  }
  2288  func rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v *Value) bool {
  2289  	v_2 := v.Args[2]
  2290  	v_1 := v.Args[1]
  2291  	v_0 := v.Args[0]
  2292  	// match: (MOVBUloadidx ptr (MOVVconst [c]) mem)
  2293  	// cond: is32Bit(c)
  2294  	// result: (MOVBUload [int32(c)] ptr mem)
  2295  	for {
  2296  		ptr := v_0
  2297  		if v_1.Op != OpLOONG64MOVVconst {
  2298  			break
  2299  		}
  2300  		c := auxIntToInt64(v_1.AuxInt)
  2301  		mem := v_2
  2302  		if !(is32Bit(c)) {
  2303  			break
  2304  		}
  2305  		v.reset(OpLOONG64MOVBUload)
  2306  		v.AuxInt = int32ToAuxInt(int32(c))
  2307  		v.AddArg2(ptr, mem)
  2308  		return true
  2309  	}
  2310  	// match: (MOVBUloadidx (MOVVconst [c]) ptr mem)
  2311  	// cond: is32Bit(c)
  2312  	// result: (MOVBUload [int32(c)] ptr mem)
  2313  	for {
  2314  		if v_0.Op != OpLOONG64MOVVconst {
  2315  			break
  2316  		}
  2317  		c := auxIntToInt64(v_0.AuxInt)
  2318  		ptr := v_1
  2319  		mem := v_2
  2320  		if !(is32Bit(c)) {
  2321  			break
  2322  		}
  2323  		v.reset(OpLOONG64MOVBUload)
  2324  		v.AuxInt = int32ToAuxInt(int32(c))
  2325  		v.AddArg2(ptr, mem)
  2326  		return true
  2327  	}
  2328  	return false
  2329  }
  2330  func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool {
  2331  	v_0 := v.Args[0]
  2332  	// match: (MOVBUreg (SRLVconst [rc] x))
  2333  	// cond: rc < 8
  2334  	// result: (BSTRPICKV [rc + (7+rc)<<6] x)
  2335  	for {
  2336  		if v_0.Op != OpLOONG64SRLVconst {
  2337  			break
  2338  		}
  2339  		rc := auxIntToInt64(v_0.AuxInt)
  2340  		x := v_0.Args[0]
  2341  		if !(rc < 8) {
  2342  			break
  2343  		}
  2344  		v.reset(OpLOONG64BSTRPICKV)
  2345  		v.AuxInt = int64ToAuxInt(rc + (7+rc)<<6)
  2346  		v.AddArg(x)
  2347  		return true
  2348  	}
  2349  	// match: (MOVBUreg x:(SGT _ _))
  2350  	// result: x
  2351  	for {
  2352  		x := v_0
  2353  		if x.Op != OpLOONG64SGT {
  2354  			break
  2355  		}
  2356  		v.copyOf(x)
  2357  		return true
  2358  	}
  2359  	// match: (MOVBUreg x:(SGTU _ _))
  2360  	// result: x
  2361  	for {
  2362  		x := v_0
  2363  		if x.Op != OpLOONG64SGTU {
  2364  			break
  2365  		}
  2366  		v.copyOf(x)
  2367  		return true
  2368  	}
  2369  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGT _ _)))
  2370  	// result: x
  2371  	for {
  2372  		x := v_0
  2373  		if x.Op != OpLOONG64XOR {
  2374  			break
  2375  		}
  2376  		_ = x.Args[1]
  2377  		x_0 := x.Args[0]
  2378  		x_1 := x.Args[1]
  2379  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2380  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGT {
  2381  				continue
  2382  			}
  2383  			v.copyOf(x)
  2384  			return true
  2385  		}
  2386  		break
  2387  	}
  2388  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGTU _ _)))
  2389  	// result: x
  2390  	for {
  2391  		x := v_0
  2392  		if x.Op != OpLOONG64XOR {
  2393  			break
  2394  		}
  2395  		_ = x.Args[1]
  2396  		x_0 := x.Args[0]
  2397  		x_1 := x.Args[1]
  2398  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2399  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGTU {
  2400  				continue
  2401  			}
  2402  			v.copyOf(x)
  2403  			return true
  2404  		}
  2405  		break
  2406  	}
  2407  	// match: (MOVBUreg x:(MOVBUload _ _))
  2408  	// result: (MOVVreg x)
  2409  	for {
  2410  		x := v_0
  2411  		if x.Op != OpLOONG64MOVBUload {
  2412  			break
  2413  		}
  2414  		v.reset(OpLOONG64MOVVreg)
  2415  		v.AddArg(x)
  2416  		return true
  2417  	}
  2418  	// match: (MOVBUreg x:(MOVBUreg _))
  2419  	// result: (MOVVreg x)
  2420  	for {
  2421  		x := v_0
  2422  		if x.Op != OpLOONG64MOVBUreg {
  2423  			break
  2424  		}
  2425  		v.reset(OpLOONG64MOVVreg)
  2426  		v.AddArg(x)
  2427  		return true
  2428  	}
  2429  	// match: (MOVBUreg (SLLVconst [lc] x))
  2430  	// cond: lc >= 8
  2431  	// result: (MOVVconst [0])
  2432  	for {
  2433  		if v_0.Op != OpLOONG64SLLVconst {
  2434  			break
  2435  		}
  2436  		lc := auxIntToInt64(v_0.AuxInt)
  2437  		if !(lc >= 8) {
  2438  			break
  2439  		}
  2440  		v.reset(OpLOONG64MOVVconst)
  2441  		v.AuxInt = int64ToAuxInt(0)
  2442  		return true
  2443  	}
  2444  	// match: (MOVBUreg (MOVVconst [c]))
  2445  	// result: (MOVVconst [int64(uint8(c))])
  2446  	for {
  2447  		if v_0.Op != OpLOONG64MOVVconst {
  2448  			break
  2449  		}
  2450  		c := auxIntToInt64(v_0.AuxInt)
  2451  		v.reset(OpLOONG64MOVVconst)
  2452  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2453  		return true
  2454  	}
  2455  	// match: (MOVBUreg (ANDconst [c] x))
  2456  	// result: (ANDconst [c&0xff] x)
  2457  	for {
  2458  		if v_0.Op != OpLOONG64ANDconst {
  2459  			break
  2460  		}
  2461  		c := auxIntToInt64(v_0.AuxInt)
  2462  		x := v_0.Args[0]
  2463  		v.reset(OpLOONG64ANDconst)
  2464  		v.AuxInt = int64ToAuxInt(c & 0xff)
  2465  		v.AddArg(x)
  2466  		return true
  2467  	}
  2468  	// match: (MOVBUreg x:(ANDconst [c] y))
  2469  	// cond: c >= 0 && int64(uint8(c)) == c
  2470  	// result: x
  2471  	for {
  2472  		x := v_0
  2473  		if x.Op != OpLOONG64ANDconst {
  2474  			break
  2475  		}
  2476  		c := auxIntToInt64(x.AuxInt)
  2477  		if !(c >= 0 && int64(uint8(c)) == c) {
  2478  			break
  2479  		}
  2480  		v.copyOf(x)
  2481  		return true
  2482  	}
  2483  	return false
  2484  }
  2485  func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
  2486  	v_1 := v.Args[1]
  2487  	v_0 := v.Args[0]
  2488  	b := v.Block
  2489  	config := b.Func.Config
  2490  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2491  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2492  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2493  	for {
  2494  		off1 := auxIntToInt32(v.AuxInt)
  2495  		sym := auxToSym(v.Aux)
  2496  		if v_0.Op != OpLOONG64ADDVconst {
  2497  			break
  2498  		}
  2499  		off2 := auxIntToInt64(v_0.AuxInt)
  2500  		ptr := v_0.Args[0]
  2501  		mem := v_1
  2502  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2503  			break
  2504  		}
  2505  		v.reset(OpLOONG64MOVBload)
  2506  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2507  		v.Aux = symToAux(sym)
  2508  		v.AddArg2(ptr, mem)
  2509  		return true
  2510  	}
  2511  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2512  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2513  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2514  	for {
  2515  		off1 := auxIntToInt32(v.AuxInt)
  2516  		sym1 := auxToSym(v.Aux)
  2517  		if v_0.Op != OpLOONG64MOVVaddr {
  2518  			break
  2519  		}
  2520  		off2 := auxIntToInt32(v_0.AuxInt)
  2521  		sym2 := auxToSym(v_0.Aux)
  2522  		ptr := v_0.Args[0]
  2523  		mem := v_1
  2524  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2525  			break
  2526  		}
  2527  		v.reset(OpLOONG64MOVBload)
  2528  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2529  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2530  		v.AddArg2(ptr, mem)
  2531  		return true
  2532  	}
  2533  	// match: (MOVBload [off] {sym} (ADDV ptr idx) mem)
  2534  	// cond: off == 0 && sym == nil
  2535  	// result: (MOVBloadidx ptr idx mem)
  2536  	for {
  2537  		off := auxIntToInt32(v.AuxInt)
  2538  		sym := auxToSym(v.Aux)
  2539  		if v_0.Op != OpLOONG64ADDV {
  2540  			break
  2541  		}
  2542  		idx := v_0.Args[1]
  2543  		ptr := v_0.Args[0]
  2544  		mem := v_1
  2545  		if !(off == 0 && sym == nil) {
  2546  			break
  2547  		}
  2548  		v.reset(OpLOONG64MOVBloadidx)
  2549  		v.AddArg3(ptr, idx, mem)
  2550  		return true
  2551  	}
  2552  	return false
  2553  }
  2554  func rewriteValueLOONG64_OpLOONG64MOVBloadidx(v *Value) bool {
  2555  	v_2 := v.Args[2]
  2556  	v_1 := v.Args[1]
  2557  	v_0 := v.Args[0]
  2558  	// match: (MOVBloadidx ptr (MOVVconst [c]) mem)
  2559  	// cond: is32Bit(c)
  2560  	// result: (MOVBload [int32(c)] ptr mem)
  2561  	for {
  2562  		ptr := v_0
  2563  		if v_1.Op != OpLOONG64MOVVconst {
  2564  			break
  2565  		}
  2566  		c := auxIntToInt64(v_1.AuxInt)
  2567  		mem := v_2
  2568  		if !(is32Bit(c)) {
  2569  			break
  2570  		}
  2571  		v.reset(OpLOONG64MOVBload)
  2572  		v.AuxInt = int32ToAuxInt(int32(c))
  2573  		v.AddArg2(ptr, mem)
  2574  		return true
  2575  	}
  2576  	// match: (MOVBloadidx (MOVVconst [c]) ptr mem)
  2577  	// cond: is32Bit(c)
  2578  	// result: (MOVBload [int32(c)] ptr mem)
  2579  	for {
  2580  		if v_0.Op != OpLOONG64MOVVconst {
  2581  			break
  2582  		}
  2583  		c := auxIntToInt64(v_0.AuxInt)
  2584  		ptr := v_1
  2585  		mem := v_2
  2586  		if !(is32Bit(c)) {
  2587  			break
  2588  		}
  2589  		v.reset(OpLOONG64MOVBload)
  2590  		v.AuxInt = int32ToAuxInt(int32(c))
  2591  		v.AddArg2(ptr, mem)
  2592  		return true
  2593  	}
  2594  	return false
  2595  }
  2596  func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool {
  2597  	v_0 := v.Args[0]
  2598  	// match: (MOVBreg x:(MOVBload _ _))
  2599  	// result: (MOVVreg x)
  2600  	for {
  2601  		x := v_0
  2602  		if x.Op != OpLOONG64MOVBload {
  2603  			break
  2604  		}
  2605  		v.reset(OpLOONG64MOVVreg)
  2606  		v.AddArg(x)
  2607  		return true
  2608  	}
  2609  	// match: (MOVBreg x:(MOVBreg _))
  2610  	// result: (MOVVreg x)
  2611  	for {
  2612  		x := v_0
  2613  		if x.Op != OpLOONG64MOVBreg {
  2614  			break
  2615  		}
  2616  		v.reset(OpLOONG64MOVVreg)
  2617  		v.AddArg(x)
  2618  		return true
  2619  	}
  2620  	// match: (MOVBreg (MOVVconst [c]))
  2621  	// result: (MOVVconst [int64(int8(c))])
  2622  	for {
  2623  		if v_0.Op != OpLOONG64MOVVconst {
  2624  			break
  2625  		}
  2626  		c := auxIntToInt64(v_0.AuxInt)
  2627  		v.reset(OpLOONG64MOVVconst)
  2628  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2629  		return true
  2630  	}
  2631  	// match: (MOVBreg x:(ANDconst [c] y))
  2632  	// cond: c >= 0 && int64(int8(c)) == c
  2633  	// result: x
  2634  	for {
  2635  		x := v_0
  2636  		if x.Op != OpLOONG64ANDconst {
  2637  			break
  2638  		}
  2639  		c := auxIntToInt64(x.AuxInt)
  2640  		if !(c >= 0 && int64(int8(c)) == c) {
  2641  			break
  2642  		}
  2643  		v.copyOf(x)
  2644  		return true
  2645  	}
  2646  	return false
  2647  }
  2648  func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
  2649  	v_2 := v.Args[2]
  2650  	v_1 := v.Args[1]
  2651  	v_0 := v.Args[0]
  2652  	b := v.Block
  2653  	config := b.Func.Config
  2654  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2655  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2656  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2657  	for {
  2658  		off1 := auxIntToInt32(v.AuxInt)
  2659  		sym := auxToSym(v.Aux)
  2660  		if v_0.Op != OpLOONG64ADDVconst {
  2661  			break
  2662  		}
  2663  		off2 := auxIntToInt64(v_0.AuxInt)
  2664  		ptr := v_0.Args[0]
  2665  		val := v_1
  2666  		mem := v_2
  2667  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2668  			break
  2669  		}
  2670  		v.reset(OpLOONG64MOVBstore)
  2671  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2672  		v.Aux = symToAux(sym)
  2673  		v.AddArg3(ptr, val, mem)
  2674  		return true
  2675  	}
  2676  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2677  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2678  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2679  	for {
  2680  		off1 := auxIntToInt32(v.AuxInt)
  2681  		sym1 := auxToSym(v.Aux)
  2682  		if v_0.Op != OpLOONG64MOVVaddr {
  2683  			break
  2684  		}
  2685  		off2 := auxIntToInt32(v_0.AuxInt)
  2686  		sym2 := auxToSym(v_0.Aux)
  2687  		ptr := v_0.Args[0]
  2688  		val := v_1
  2689  		mem := v_2
  2690  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2691  			break
  2692  		}
  2693  		v.reset(OpLOONG64MOVBstore)
  2694  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2695  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2696  		v.AddArg3(ptr, val, mem)
  2697  		return true
  2698  	}
  2699  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  2700  	// result: (MOVBstore [off] {sym} ptr x mem)
  2701  	for {
  2702  		off := auxIntToInt32(v.AuxInt)
  2703  		sym := auxToSym(v.Aux)
  2704  		ptr := v_0
  2705  		if v_1.Op != OpLOONG64MOVBreg {
  2706  			break
  2707  		}
  2708  		x := v_1.Args[0]
  2709  		mem := v_2
  2710  		v.reset(OpLOONG64MOVBstore)
  2711  		v.AuxInt = int32ToAuxInt(off)
  2712  		v.Aux = symToAux(sym)
  2713  		v.AddArg3(ptr, x, mem)
  2714  		return true
  2715  	}
  2716  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  2717  	// result: (MOVBstore [off] {sym} ptr x mem)
  2718  	for {
  2719  		off := auxIntToInt32(v.AuxInt)
  2720  		sym := auxToSym(v.Aux)
  2721  		ptr := v_0
  2722  		if v_1.Op != OpLOONG64MOVBUreg {
  2723  			break
  2724  		}
  2725  		x := v_1.Args[0]
  2726  		mem := v_2
  2727  		v.reset(OpLOONG64MOVBstore)
  2728  		v.AuxInt = int32ToAuxInt(off)
  2729  		v.Aux = symToAux(sym)
  2730  		v.AddArg3(ptr, x, mem)
  2731  		return true
  2732  	}
  2733  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  2734  	// result: (MOVBstore [off] {sym} ptr x mem)
  2735  	for {
  2736  		off := auxIntToInt32(v.AuxInt)
  2737  		sym := auxToSym(v.Aux)
  2738  		ptr := v_0
  2739  		if v_1.Op != OpLOONG64MOVHreg {
  2740  			break
  2741  		}
  2742  		x := v_1.Args[0]
  2743  		mem := v_2
  2744  		v.reset(OpLOONG64MOVBstore)
  2745  		v.AuxInt = int32ToAuxInt(off)
  2746  		v.Aux = symToAux(sym)
  2747  		v.AddArg3(ptr, x, mem)
  2748  		return true
  2749  	}
  2750  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  2751  	// result: (MOVBstore [off] {sym} ptr x mem)
  2752  	for {
  2753  		off := auxIntToInt32(v.AuxInt)
  2754  		sym := auxToSym(v.Aux)
  2755  		ptr := v_0
  2756  		if v_1.Op != OpLOONG64MOVHUreg {
  2757  			break
  2758  		}
  2759  		x := v_1.Args[0]
  2760  		mem := v_2
  2761  		v.reset(OpLOONG64MOVBstore)
  2762  		v.AuxInt = int32ToAuxInt(off)
  2763  		v.Aux = symToAux(sym)
  2764  		v.AddArg3(ptr, x, mem)
  2765  		return true
  2766  	}
  2767  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  2768  	// result: (MOVBstore [off] {sym} ptr x mem)
  2769  	for {
  2770  		off := auxIntToInt32(v.AuxInt)
  2771  		sym := auxToSym(v.Aux)
  2772  		ptr := v_0
  2773  		if v_1.Op != OpLOONG64MOVWreg {
  2774  			break
  2775  		}
  2776  		x := v_1.Args[0]
  2777  		mem := v_2
  2778  		v.reset(OpLOONG64MOVBstore)
  2779  		v.AuxInt = int32ToAuxInt(off)
  2780  		v.Aux = symToAux(sym)
  2781  		v.AddArg3(ptr, x, mem)
  2782  		return true
  2783  	}
  2784  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  2785  	// result: (MOVBstore [off] {sym} ptr x mem)
  2786  	for {
  2787  		off := auxIntToInt32(v.AuxInt)
  2788  		sym := auxToSym(v.Aux)
  2789  		ptr := v_0
  2790  		if v_1.Op != OpLOONG64MOVWUreg {
  2791  			break
  2792  		}
  2793  		x := v_1.Args[0]
  2794  		mem := v_2
  2795  		v.reset(OpLOONG64MOVBstore)
  2796  		v.AuxInt = int32ToAuxInt(off)
  2797  		v.Aux = symToAux(sym)
  2798  		v.AddArg3(ptr, x, mem)
  2799  		return true
  2800  	}
  2801  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  2802  	// result: (MOVBstorezero [off] {sym} ptr mem)
  2803  	for {
  2804  		off := auxIntToInt32(v.AuxInt)
  2805  		sym := auxToSym(v.Aux)
  2806  		ptr := v_0
  2807  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2808  			break
  2809  		}
  2810  		mem := v_2
  2811  		v.reset(OpLOONG64MOVBstorezero)
  2812  		v.AuxInt = int32ToAuxInt(off)
  2813  		v.Aux = symToAux(sym)
  2814  		v.AddArg2(ptr, mem)
  2815  		return true
  2816  	}
  2817  	// match: (MOVBstore [off] {sym} (ADDV ptr idx) val mem)
  2818  	// cond: off == 0 && sym == nil
  2819  	// result: (MOVBstoreidx ptr idx val mem)
  2820  	for {
  2821  		off := auxIntToInt32(v.AuxInt)
  2822  		sym := auxToSym(v.Aux)
  2823  		if v_0.Op != OpLOONG64ADDV {
  2824  			break
  2825  		}
  2826  		idx := v_0.Args[1]
  2827  		ptr := v_0.Args[0]
  2828  		val := v_1
  2829  		mem := v_2
  2830  		if !(off == 0 && sym == nil) {
  2831  			break
  2832  		}
  2833  		v.reset(OpLOONG64MOVBstoreidx)
  2834  		v.AddArg4(ptr, idx, val, mem)
  2835  		return true
  2836  	}
  2837  	return false
  2838  }
  2839  func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool {
  2840  	v_3 := v.Args[3]
  2841  	v_2 := v.Args[2]
  2842  	v_1 := v.Args[1]
  2843  	v_0 := v.Args[0]
  2844  	// match: (MOVBstoreidx ptr (MOVVconst [c]) val mem)
  2845  	// cond: is32Bit(c)
  2846  	// result: (MOVBstore [int32(c)] ptr val mem)
  2847  	for {
  2848  		ptr := v_0
  2849  		if v_1.Op != OpLOONG64MOVVconst {
  2850  			break
  2851  		}
  2852  		c := auxIntToInt64(v_1.AuxInt)
  2853  		val := v_2
  2854  		mem := v_3
  2855  		if !(is32Bit(c)) {
  2856  			break
  2857  		}
  2858  		v.reset(OpLOONG64MOVBstore)
  2859  		v.AuxInt = int32ToAuxInt(int32(c))
  2860  		v.AddArg3(ptr, val, mem)
  2861  		return true
  2862  	}
  2863  	// match: (MOVBstoreidx (MOVVconst [c]) idx val mem)
  2864  	// cond: is32Bit(c)
  2865  	// result: (MOVBstore [int32(c)] idx val mem)
  2866  	for {
  2867  		if v_0.Op != OpLOONG64MOVVconst {
  2868  			break
  2869  		}
  2870  		c := auxIntToInt64(v_0.AuxInt)
  2871  		idx := v_1
  2872  		val := v_2
  2873  		mem := v_3
  2874  		if !(is32Bit(c)) {
  2875  			break
  2876  		}
  2877  		v.reset(OpLOONG64MOVBstore)
  2878  		v.AuxInt = int32ToAuxInt(int32(c))
  2879  		v.AddArg3(idx, val, mem)
  2880  		return true
  2881  	}
  2882  	// match: (MOVBstoreidx ptr idx (MOVVconst [0]) mem)
  2883  	// result: (MOVBstorezeroidx ptr idx mem)
  2884  	for {
  2885  		ptr := v_0
  2886  		idx := v_1
  2887  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  2888  			break
  2889  		}
  2890  		mem := v_3
  2891  		v.reset(OpLOONG64MOVBstorezeroidx)
  2892  		v.AddArg3(ptr, idx, mem)
  2893  		return true
  2894  	}
  2895  	return false
  2896  }
  2897  func rewriteValueLOONG64_OpLOONG64MOVBstorezero(v *Value) bool {
  2898  	v_1 := v.Args[1]
  2899  	v_0 := v.Args[0]
  2900  	b := v.Block
  2901  	config := b.Func.Config
  2902  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2903  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2904  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  2905  	for {
  2906  		off1 := auxIntToInt32(v.AuxInt)
  2907  		sym := auxToSym(v.Aux)
  2908  		if v_0.Op != OpLOONG64ADDVconst {
  2909  			break
  2910  		}
  2911  		off2 := auxIntToInt64(v_0.AuxInt)
  2912  		ptr := v_0.Args[0]
  2913  		mem := v_1
  2914  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2915  			break
  2916  		}
  2917  		v.reset(OpLOONG64MOVBstorezero)
  2918  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2919  		v.Aux = symToAux(sym)
  2920  		v.AddArg2(ptr, mem)
  2921  		return true
  2922  	}
  2923  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2924  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2925  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2926  	for {
  2927  		off1 := auxIntToInt32(v.AuxInt)
  2928  		sym1 := auxToSym(v.Aux)
  2929  		if v_0.Op != OpLOONG64MOVVaddr {
  2930  			break
  2931  		}
  2932  		off2 := auxIntToInt32(v_0.AuxInt)
  2933  		sym2 := auxToSym(v_0.Aux)
  2934  		ptr := v_0.Args[0]
  2935  		mem := v_1
  2936  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2937  			break
  2938  		}
  2939  		v.reset(OpLOONG64MOVBstorezero)
  2940  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2941  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2942  		v.AddArg2(ptr, mem)
  2943  		return true
  2944  	}
  2945  	// match: (MOVBstorezero [off] {sym} (ADDV ptr idx) mem)
  2946  	// cond: off == 0 && sym == nil
  2947  	// result: (MOVBstorezeroidx ptr idx mem)
  2948  	for {
  2949  		off := auxIntToInt32(v.AuxInt)
  2950  		sym := auxToSym(v.Aux)
  2951  		if v_0.Op != OpLOONG64ADDV {
  2952  			break
  2953  		}
  2954  		idx := v_0.Args[1]
  2955  		ptr := v_0.Args[0]
  2956  		mem := v_1
  2957  		if !(off == 0 && sym == nil) {
  2958  			break
  2959  		}
  2960  		v.reset(OpLOONG64MOVBstorezeroidx)
  2961  		v.AddArg3(ptr, idx, mem)
  2962  		return true
  2963  	}
  2964  	return false
  2965  }
  2966  func rewriteValueLOONG64_OpLOONG64MOVBstorezeroidx(v *Value) bool {
  2967  	v_2 := v.Args[2]
  2968  	v_1 := v.Args[1]
  2969  	v_0 := v.Args[0]
  2970  	// match: (MOVBstorezeroidx ptr (MOVVconst [c]) mem)
  2971  	// cond: is32Bit(c)
  2972  	// result: (MOVBstorezero [int32(c)] ptr mem)
  2973  	for {
  2974  		ptr := v_0
  2975  		if v_1.Op != OpLOONG64MOVVconst {
  2976  			break
  2977  		}
  2978  		c := auxIntToInt64(v_1.AuxInt)
  2979  		mem := v_2
  2980  		if !(is32Bit(c)) {
  2981  			break
  2982  		}
  2983  		v.reset(OpLOONG64MOVBstorezero)
  2984  		v.AuxInt = int32ToAuxInt(int32(c))
  2985  		v.AddArg2(ptr, mem)
  2986  		return true
  2987  	}
  2988  	// match: (MOVBstorezeroidx (MOVVconst [c]) idx mem)
  2989  	// cond: is32Bit(c)
  2990  	// result: (MOVBstorezero [int32(c)] idx mem)
  2991  	for {
  2992  		if v_0.Op != OpLOONG64MOVVconst {
  2993  			break
  2994  		}
  2995  		c := auxIntToInt64(v_0.AuxInt)
  2996  		idx := v_1
  2997  		mem := v_2
  2998  		if !(is32Bit(c)) {
  2999  			break
  3000  		}
  3001  		v.reset(OpLOONG64MOVBstorezero)
  3002  		v.AuxInt = int32ToAuxInt(int32(c))
  3003  		v.AddArg2(idx, mem)
  3004  		return true
  3005  	}
  3006  	return false
  3007  }
  3008  func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
  3009  	v_1 := v.Args[1]
  3010  	v_0 := v.Args[0]
  3011  	b := v.Block
  3012  	config := b.Func.Config
  3013  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  3014  	// result: (MOVVgpfp val)
  3015  	for {
  3016  		off := auxIntToInt32(v.AuxInt)
  3017  		sym := auxToSym(v.Aux)
  3018  		ptr := v_0
  3019  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3020  			break
  3021  		}
  3022  		val := v_1.Args[1]
  3023  		if ptr != v_1.Args[0] {
  3024  			break
  3025  		}
  3026  		v.reset(OpLOONG64MOVVgpfp)
  3027  		v.AddArg(val)
  3028  		return true
  3029  	}
  3030  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3031  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3032  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  3033  	for {
  3034  		off1 := auxIntToInt32(v.AuxInt)
  3035  		sym := auxToSym(v.Aux)
  3036  		if v_0.Op != OpLOONG64ADDVconst {
  3037  			break
  3038  		}
  3039  		off2 := auxIntToInt64(v_0.AuxInt)
  3040  		ptr := v_0.Args[0]
  3041  		mem := v_1
  3042  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3043  			break
  3044  		}
  3045  		v.reset(OpLOONG64MOVDload)
  3046  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3047  		v.Aux = symToAux(sym)
  3048  		v.AddArg2(ptr, mem)
  3049  		return true
  3050  	}
  3051  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3052  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3053  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3054  	for {
  3055  		off1 := auxIntToInt32(v.AuxInt)
  3056  		sym1 := auxToSym(v.Aux)
  3057  		if v_0.Op != OpLOONG64MOVVaddr {
  3058  			break
  3059  		}
  3060  		off2 := auxIntToInt32(v_0.AuxInt)
  3061  		sym2 := auxToSym(v_0.Aux)
  3062  		ptr := v_0.Args[0]
  3063  		mem := v_1
  3064  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3065  			break
  3066  		}
  3067  		v.reset(OpLOONG64MOVDload)
  3068  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3069  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3070  		v.AddArg2(ptr, mem)
  3071  		return true
  3072  	}
  3073  	// match: (MOVDload [off] {sym} (ADDV ptr idx) mem)
  3074  	// cond: off == 0 && sym == nil
  3075  	// result: (MOVDloadidx ptr idx mem)
  3076  	for {
  3077  		off := auxIntToInt32(v.AuxInt)
  3078  		sym := auxToSym(v.Aux)
  3079  		if v_0.Op != OpLOONG64ADDV {
  3080  			break
  3081  		}
  3082  		idx := v_0.Args[1]
  3083  		ptr := v_0.Args[0]
  3084  		mem := v_1
  3085  		if !(off == 0 && sym == nil) {
  3086  			break
  3087  		}
  3088  		v.reset(OpLOONG64MOVDloadidx)
  3089  		v.AddArg3(ptr, idx, mem)
  3090  		return true
  3091  	}
  3092  	return false
  3093  }
  3094  func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool {
  3095  	v_2 := v.Args[2]
  3096  	v_1 := v.Args[1]
  3097  	v_0 := v.Args[0]
  3098  	// match: (MOVDloadidx ptr (MOVVconst [c]) mem)
  3099  	// cond: is32Bit(c)
  3100  	// result: (MOVDload [int32(c)] ptr mem)
  3101  	for {
  3102  		ptr := v_0
  3103  		if v_1.Op != OpLOONG64MOVVconst {
  3104  			break
  3105  		}
  3106  		c := auxIntToInt64(v_1.AuxInt)
  3107  		mem := v_2
  3108  		if !(is32Bit(c)) {
  3109  			break
  3110  		}
  3111  		v.reset(OpLOONG64MOVDload)
  3112  		v.AuxInt = int32ToAuxInt(int32(c))
  3113  		v.AddArg2(ptr, mem)
  3114  		return true
  3115  	}
  3116  	// match: (MOVDloadidx (MOVVconst [c]) ptr mem)
  3117  	// cond: is32Bit(c)
  3118  	// result: (MOVDload [int32(c)] ptr mem)
  3119  	for {
  3120  		if v_0.Op != OpLOONG64MOVVconst {
  3121  			break
  3122  		}
  3123  		c := auxIntToInt64(v_0.AuxInt)
  3124  		ptr := v_1
  3125  		mem := v_2
  3126  		if !(is32Bit(c)) {
  3127  			break
  3128  		}
  3129  		v.reset(OpLOONG64MOVDload)
  3130  		v.AuxInt = int32ToAuxInt(int32(c))
  3131  		v.AddArg2(ptr, mem)
  3132  		return true
  3133  	}
  3134  	return false
  3135  }
  3136  func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
  3137  	v_2 := v.Args[2]
  3138  	v_1 := v.Args[1]
  3139  	v_0 := v.Args[0]
  3140  	b := v.Block
  3141  	config := b.Func.Config
  3142  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3143  	// result: (MOVVstore [off] {sym} ptr val mem)
  3144  	for {
  3145  		off := auxIntToInt32(v.AuxInt)
  3146  		sym := auxToSym(v.Aux)
  3147  		ptr := v_0
  3148  		if v_1.Op != OpLOONG64MOVVgpfp {
  3149  			break
  3150  		}
  3151  		val := v_1.Args[0]
  3152  		mem := v_2
  3153  		v.reset(OpLOONG64MOVVstore)
  3154  		v.AuxInt = int32ToAuxInt(off)
  3155  		v.Aux = symToAux(sym)
  3156  		v.AddArg3(ptr, val, mem)
  3157  		return true
  3158  	}
  3159  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3160  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3161  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3162  	for {
  3163  		off1 := auxIntToInt32(v.AuxInt)
  3164  		sym := auxToSym(v.Aux)
  3165  		if v_0.Op != OpLOONG64ADDVconst {
  3166  			break
  3167  		}
  3168  		off2 := auxIntToInt64(v_0.AuxInt)
  3169  		ptr := v_0.Args[0]
  3170  		val := v_1
  3171  		mem := v_2
  3172  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3173  			break
  3174  		}
  3175  		v.reset(OpLOONG64MOVDstore)
  3176  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3177  		v.Aux = symToAux(sym)
  3178  		v.AddArg3(ptr, val, mem)
  3179  		return true
  3180  	}
  3181  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3182  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3183  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3184  	for {
  3185  		off1 := auxIntToInt32(v.AuxInt)
  3186  		sym1 := auxToSym(v.Aux)
  3187  		if v_0.Op != OpLOONG64MOVVaddr {
  3188  			break
  3189  		}
  3190  		off2 := auxIntToInt32(v_0.AuxInt)
  3191  		sym2 := auxToSym(v_0.Aux)
  3192  		ptr := v_0.Args[0]
  3193  		val := v_1
  3194  		mem := v_2
  3195  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3196  			break
  3197  		}
  3198  		v.reset(OpLOONG64MOVDstore)
  3199  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3200  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3201  		v.AddArg3(ptr, val, mem)
  3202  		return true
  3203  	}
  3204  	// match: (MOVDstore [off] {sym} (ADDV ptr idx) val mem)
  3205  	// cond: off == 0 && sym == nil
  3206  	// result: (MOVDstoreidx ptr idx val mem)
  3207  	for {
  3208  		off := auxIntToInt32(v.AuxInt)
  3209  		sym := auxToSym(v.Aux)
  3210  		if v_0.Op != OpLOONG64ADDV {
  3211  			break
  3212  		}
  3213  		idx := v_0.Args[1]
  3214  		ptr := v_0.Args[0]
  3215  		val := v_1
  3216  		mem := v_2
  3217  		if !(off == 0 && sym == nil) {
  3218  			break
  3219  		}
  3220  		v.reset(OpLOONG64MOVDstoreidx)
  3221  		v.AddArg4(ptr, idx, val, mem)
  3222  		return true
  3223  	}
  3224  	return false
  3225  }
  3226  func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool {
  3227  	v_3 := v.Args[3]
  3228  	v_2 := v.Args[2]
  3229  	v_1 := v.Args[1]
  3230  	v_0 := v.Args[0]
  3231  	// match: (MOVDstoreidx ptr (MOVVconst [c]) val mem)
  3232  	// cond: is32Bit(c)
  3233  	// result: (MOVDstore [int32(c)] ptr val mem)
  3234  	for {
  3235  		ptr := v_0
  3236  		if v_1.Op != OpLOONG64MOVVconst {
  3237  			break
  3238  		}
  3239  		c := auxIntToInt64(v_1.AuxInt)
  3240  		val := v_2
  3241  		mem := v_3
  3242  		if !(is32Bit(c)) {
  3243  			break
  3244  		}
  3245  		v.reset(OpLOONG64MOVDstore)
  3246  		v.AuxInt = int32ToAuxInt(int32(c))
  3247  		v.AddArg3(ptr, val, mem)
  3248  		return true
  3249  	}
  3250  	// match: (MOVDstoreidx (MOVVconst [c]) idx val mem)
  3251  	// cond: is32Bit(c)
  3252  	// result: (MOVDstore [int32(c)] idx val mem)
  3253  	for {
  3254  		if v_0.Op != OpLOONG64MOVVconst {
  3255  			break
  3256  		}
  3257  		c := auxIntToInt64(v_0.AuxInt)
  3258  		idx := v_1
  3259  		val := v_2
  3260  		mem := v_3
  3261  		if !(is32Bit(c)) {
  3262  			break
  3263  		}
  3264  		v.reset(OpLOONG64MOVDstore)
  3265  		v.AuxInt = int32ToAuxInt(int32(c))
  3266  		v.AddArg3(idx, val, mem)
  3267  		return true
  3268  	}
  3269  	return false
  3270  }
  3271  func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
  3272  	v_1 := v.Args[1]
  3273  	v_0 := v.Args[0]
  3274  	b := v.Block
  3275  	config := b.Func.Config
  3276  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3277  	// result: (MOVWgpfp val)
  3278  	for {
  3279  		off := auxIntToInt32(v.AuxInt)
  3280  		sym := auxToSym(v.Aux)
  3281  		ptr := v_0
  3282  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3283  			break
  3284  		}
  3285  		val := v_1.Args[1]
  3286  		if ptr != v_1.Args[0] {
  3287  			break
  3288  		}
  3289  		v.reset(OpLOONG64MOVWgpfp)
  3290  		v.AddArg(val)
  3291  		return true
  3292  	}
  3293  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3294  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3295  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3296  	for {
  3297  		off1 := auxIntToInt32(v.AuxInt)
  3298  		sym := auxToSym(v.Aux)
  3299  		if v_0.Op != OpLOONG64ADDVconst {
  3300  			break
  3301  		}
  3302  		off2 := auxIntToInt64(v_0.AuxInt)
  3303  		ptr := v_0.Args[0]
  3304  		mem := v_1
  3305  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3306  			break
  3307  		}
  3308  		v.reset(OpLOONG64MOVFload)
  3309  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3310  		v.Aux = symToAux(sym)
  3311  		v.AddArg2(ptr, mem)
  3312  		return true
  3313  	}
  3314  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3315  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3316  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3317  	for {
  3318  		off1 := auxIntToInt32(v.AuxInt)
  3319  		sym1 := auxToSym(v.Aux)
  3320  		if v_0.Op != OpLOONG64MOVVaddr {
  3321  			break
  3322  		}
  3323  		off2 := auxIntToInt32(v_0.AuxInt)
  3324  		sym2 := auxToSym(v_0.Aux)
  3325  		ptr := v_0.Args[0]
  3326  		mem := v_1
  3327  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3328  			break
  3329  		}
  3330  		v.reset(OpLOONG64MOVFload)
  3331  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3332  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3333  		v.AddArg2(ptr, mem)
  3334  		return true
  3335  	}
  3336  	// match: (MOVFload [off] {sym} (ADDV ptr idx) mem)
  3337  	// cond: off == 0 && sym == nil
  3338  	// result: (MOVFloadidx ptr idx mem)
  3339  	for {
  3340  		off := auxIntToInt32(v.AuxInt)
  3341  		sym := auxToSym(v.Aux)
  3342  		if v_0.Op != OpLOONG64ADDV {
  3343  			break
  3344  		}
  3345  		idx := v_0.Args[1]
  3346  		ptr := v_0.Args[0]
  3347  		mem := v_1
  3348  		if !(off == 0 && sym == nil) {
  3349  			break
  3350  		}
  3351  		v.reset(OpLOONG64MOVFloadidx)
  3352  		v.AddArg3(ptr, idx, mem)
  3353  		return true
  3354  	}
  3355  	return false
  3356  }
  3357  func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool {
  3358  	v_2 := v.Args[2]
  3359  	v_1 := v.Args[1]
  3360  	v_0 := v.Args[0]
  3361  	// match: (MOVFloadidx ptr (MOVVconst [c]) mem)
  3362  	// cond: is32Bit(c)
  3363  	// result: (MOVFload [int32(c)] ptr mem)
  3364  	for {
  3365  		ptr := v_0
  3366  		if v_1.Op != OpLOONG64MOVVconst {
  3367  			break
  3368  		}
  3369  		c := auxIntToInt64(v_1.AuxInt)
  3370  		mem := v_2
  3371  		if !(is32Bit(c)) {
  3372  			break
  3373  		}
  3374  		v.reset(OpLOONG64MOVFload)
  3375  		v.AuxInt = int32ToAuxInt(int32(c))
  3376  		v.AddArg2(ptr, mem)
  3377  		return true
  3378  	}
  3379  	// match: (MOVFloadidx (MOVVconst [c]) ptr mem)
  3380  	// cond: is32Bit(c)
  3381  	// result: (MOVFload [int32(c)] ptr mem)
  3382  	for {
  3383  		if v_0.Op != OpLOONG64MOVVconst {
  3384  			break
  3385  		}
  3386  		c := auxIntToInt64(v_0.AuxInt)
  3387  		ptr := v_1
  3388  		mem := v_2
  3389  		if !(is32Bit(c)) {
  3390  			break
  3391  		}
  3392  		v.reset(OpLOONG64MOVFload)
  3393  		v.AuxInt = int32ToAuxInt(int32(c))
  3394  		v.AddArg2(ptr, mem)
  3395  		return true
  3396  	}
  3397  	return false
  3398  }
  3399  func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
  3400  	v_2 := v.Args[2]
  3401  	v_1 := v.Args[1]
  3402  	v_0 := v.Args[0]
  3403  	b := v.Block
  3404  	config := b.Func.Config
  3405  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3406  	// result: (MOVWstore [off] {sym} ptr val mem)
  3407  	for {
  3408  		off := auxIntToInt32(v.AuxInt)
  3409  		sym := auxToSym(v.Aux)
  3410  		ptr := v_0
  3411  		if v_1.Op != OpLOONG64MOVWgpfp {
  3412  			break
  3413  		}
  3414  		val := v_1.Args[0]
  3415  		mem := v_2
  3416  		v.reset(OpLOONG64MOVWstore)
  3417  		v.AuxInt = int32ToAuxInt(off)
  3418  		v.Aux = symToAux(sym)
  3419  		v.AddArg3(ptr, val, mem)
  3420  		return true
  3421  	}
  3422  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3423  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3424  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3425  	for {
  3426  		off1 := auxIntToInt32(v.AuxInt)
  3427  		sym := auxToSym(v.Aux)
  3428  		if v_0.Op != OpLOONG64ADDVconst {
  3429  			break
  3430  		}
  3431  		off2 := auxIntToInt64(v_0.AuxInt)
  3432  		ptr := v_0.Args[0]
  3433  		val := v_1
  3434  		mem := v_2
  3435  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3436  			break
  3437  		}
  3438  		v.reset(OpLOONG64MOVFstore)
  3439  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3440  		v.Aux = symToAux(sym)
  3441  		v.AddArg3(ptr, val, mem)
  3442  		return true
  3443  	}
  3444  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3445  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3446  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3447  	for {
  3448  		off1 := auxIntToInt32(v.AuxInt)
  3449  		sym1 := auxToSym(v.Aux)
  3450  		if v_0.Op != OpLOONG64MOVVaddr {
  3451  			break
  3452  		}
  3453  		off2 := auxIntToInt32(v_0.AuxInt)
  3454  		sym2 := auxToSym(v_0.Aux)
  3455  		ptr := v_0.Args[0]
  3456  		val := v_1
  3457  		mem := v_2
  3458  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3459  			break
  3460  		}
  3461  		v.reset(OpLOONG64MOVFstore)
  3462  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3463  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3464  		v.AddArg3(ptr, val, mem)
  3465  		return true
  3466  	}
  3467  	// match: (MOVFstore [off] {sym} (ADDV ptr idx) val mem)
  3468  	// cond: off == 0 && sym == nil
  3469  	// result: (MOVFstoreidx ptr idx val mem)
  3470  	for {
  3471  		off := auxIntToInt32(v.AuxInt)
  3472  		sym := auxToSym(v.Aux)
  3473  		if v_0.Op != OpLOONG64ADDV {
  3474  			break
  3475  		}
  3476  		idx := v_0.Args[1]
  3477  		ptr := v_0.Args[0]
  3478  		val := v_1
  3479  		mem := v_2
  3480  		if !(off == 0 && sym == nil) {
  3481  			break
  3482  		}
  3483  		v.reset(OpLOONG64MOVFstoreidx)
  3484  		v.AddArg4(ptr, idx, val, mem)
  3485  		return true
  3486  	}
  3487  	return false
  3488  }
  3489  func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool {
  3490  	v_3 := v.Args[3]
  3491  	v_2 := v.Args[2]
  3492  	v_1 := v.Args[1]
  3493  	v_0 := v.Args[0]
  3494  	// match: (MOVFstoreidx ptr (MOVVconst [c]) val mem)
  3495  	// cond: is32Bit(c)
  3496  	// result: (MOVFstore [int32(c)] ptr val mem)
  3497  	for {
  3498  		ptr := v_0
  3499  		if v_1.Op != OpLOONG64MOVVconst {
  3500  			break
  3501  		}
  3502  		c := auxIntToInt64(v_1.AuxInt)
  3503  		val := v_2
  3504  		mem := v_3
  3505  		if !(is32Bit(c)) {
  3506  			break
  3507  		}
  3508  		v.reset(OpLOONG64MOVFstore)
  3509  		v.AuxInt = int32ToAuxInt(int32(c))
  3510  		v.AddArg3(ptr, val, mem)
  3511  		return true
  3512  	}
  3513  	// match: (MOVFstoreidx (MOVVconst [c]) idx val mem)
  3514  	// cond: is32Bit(c)
  3515  	// result: (MOVFstore [int32(c)] idx val mem)
  3516  	for {
  3517  		if v_0.Op != OpLOONG64MOVVconst {
  3518  			break
  3519  		}
  3520  		c := auxIntToInt64(v_0.AuxInt)
  3521  		idx := v_1
  3522  		val := v_2
  3523  		mem := v_3
  3524  		if !(is32Bit(c)) {
  3525  			break
  3526  		}
  3527  		v.reset(OpLOONG64MOVFstore)
  3528  		v.AuxInt = int32ToAuxInt(int32(c))
  3529  		v.AddArg3(idx, val, mem)
  3530  		return true
  3531  	}
  3532  	return false
  3533  }
  3534  func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
  3535  	v_1 := v.Args[1]
  3536  	v_0 := v.Args[0]
  3537  	b := v.Block
  3538  	config := b.Func.Config
  3539  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3540  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3541  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3542  	for {
  3543  		off1 := auxIntToInt32(v.AuxInt)
  3544  		sym := auxToSym(v.Aux)
  3545  		if v_0.Op != OpLOONG64ADDVconst {
  3546  			break
  3547  		}
  3548  		off2 := auxIntToInt64(v_0.AuxInt)
  3549  		ptr := v_0.Args[0]
  3550  		mem := v_1
  3551  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3552  			break
  3553  		}
  3554  		v.reset(OpLOONG64MOVHUload)
  3555  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3556  		v.Aux = symToAux(sym)
  3557  		v.AddArg2(ptr, mem)
  3558  		return true
  3559  	}
  3560  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3561  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3562  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3563  	for {
  3564  		off1 := auxIntToInt32(v.AuxInt)
  3565  		sym1 := auxToSym(v.Aux)
  3566  		if v_0.Op != OpLOONG64MOVVaddr {
  3567  			break
  3568  		}
  3569  		off2 := auxIntToInt32(v_0.AuxInt)
  3570  		sym2 := auxToSym(v_0.Aux)
  3571  		ptr := v_0.Args[0]
  3572  		mem := v_1
  3573  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3574  			break
  3575  		}
  3576  		v.reset(OpLOONG64MOVHUload)
  3577  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3578  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3579  		v.AddArg2(ptr, mem)
  3580  		return true
  3581  	}
  3582  	// match: (MOVHUload [off] {sym} (ADDV ptr idx) mem)
  3583  	// cond: off == 0 && sym == nil
  3584  	// result: (MOVHUloadidx ptr idx mem)
  3585  	for {
  3586  		off := auxIntToInt32(v.AuxInt)
  3587  		sym := auxToSym(v.Aux)
  3588  		if v_0.Op != OpLOONG64ADDV {
  3589  			break
  3590  		}
  3591  		idx := v_0.Args[1]
  3592  		ptr := v_0.Args[0]
  3593  		mem := v_1
  3594  		if !(off == 0 && sym == nil) {
  3595  			break
  3596  		}
  3597  		v.reset(OpLOONG64MOVHUloadidx)
  3598  		v.AddArg3(ptr, idx, mem)
  3599  		return true
  3600  	}
  3601  	return false
  3602  }
  3603  func rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v *Value) bool {
  3604  	v_2 := v.Args[2]
  3605  	v_1 := v.Args[1]
  3606  	v_0 := v.Args[0]
  3607  	// match: (MOVHUloadidx ptr (MOVVconst [c]) mem)
  3608  	// cond: is32Bit(c)
  3609  	// result: (MOVHUload [int32(c)] ptr mem)
  3610  	for {
  3611  		ptr := v_0
  3612  		if v_1.Op != OpLOONG64MOVVconst {
  3613  			break
  3614  		}
  3615  		c := auxIntToInt64(v_1.AuxInt)
  3616  		mem := v_2
  3617  		if !(is32Bit(c)) {
  3618  			break
  3619  		}
  3620  		v.reset(OpLOONG64MOVHUload)
  3621  		v.AuxInt = int32ToAuxInt(int32(c))
  3622  		v.AddArg2(ptr, mem)
  3623  		return true
  3624  	}
  3625  	// match: (MOVHUloadidx (MOVVconst [c]) ptr mem)
  3626  	// cond: is32Bit(c)
  3627  	// result: (MOVHUload [int32(c)] ptr mem)
  3628  	for {
  3629  		if v_0.Op != OpLOONG64MOVVconst {
  3630  			break
  3631  		}
  3632  		c := auxIntToInt64(v_0.AuxInt)
  3633  		ptr := v_1
  3634  		mem := v_2
  3635  		if !(is32Bit(c)) {
  3636  			break
  3637  		}
  3638  		v.reset(OpLOONG64MOVHUload)
  3639  		v.AuxInt = int32ToAuxInt(int32(c))
  3640  		v.AddArg2(ptr, mem)
  3641  		return true
  3642  	}
  3643  	return false
  3644  }
  3645  func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool {
  3646  	v_0 := v.Args[0]
  3647  	// match: (MOVHUreg (SRLVconst [rc] x))
  3648  	// cond: rc < 16
  3649  	// result: (BSTRPICKV [rc + (15+rc)<<6] x)
  3650  	for {
  3651  		if v_0.Op != OpLOONG64SRLVconst {
  3652  			break
  3653  		}
  3654  		rc := auxIntToInt64(v_0.AuxInt)
  3655  		x := v_0.Args[0]
  3656  		if !(rc < 16) {
  3657  			break
  3658  		}
  3659  		v.reset(OpLOONG64BSTRPICKV)
  3660  		v.AuxInt = int64ToAuxInt(rc + (15+rc)<<6)
  3661  		v.AddArg(x)
  3662  		return true
  3663  	}
  3664  	// match: (MOVHUreg x:(MOVBUload _ _))
  3665  	// result: (MOVVreg x)
  3666  	for {
  3667  		x := v_0
  3668  		if x.Op != OpLOONG64MOVBUload {
  3669  			break
  3670  		}
  3671  		v.reset(OpLOONG64MOVVreg)
  3672  		v.AddArg(x)
  3673  		return true
  3674  	}
  3675  	// match: (MOVHUreg x:(MOVHUload _ _))
  3676  	// result: (MOVVreg x)
  3677  	for {
  3678  		x := v_0
  3679  		if x.Op != OpLOONG64MOVHUload {
  3680  			break
  3681  		}
  3682  		v.reset(OpLOONG64MOVVreg)
  3683  		v.AddArg(x)
  3684  		return true
  3685  	}
  3686  	// match: (MOVHUreg x:(MOVBUreg _))
  3687  	// result: (MOVVreg x)
  3688  	for {
  3689  		x := v_0
  3690  		if x.Op != OpLOONG64MOVBUreg {
  3691  			break
  3692  		}
  3693  		v.reset(OpLOONG64MOVVreg)
  3694  		v.AddArg(x)
  3695  		return true
  3696  	}
  3697  	// match: (MOVHUreg x:(MOVHUreg _))
  3698  	// result: (MOVVreg x)
  3699  	for {
  3700  		x := v_0
  3701  		if x.Op != OpLOONG64MOVHUreg {
  3702  			break
  3703  		}
  3704  		v.reset(OpLOONG64MOVVreg)
  3705  		v.AddArg(x)
  3706  		return true
  3707  	}
  3708  	// match: (MOVHUreg (SLLVconst [lc] x))
  3709  	// cond: lc >= 16
  3710  	// result: (MOVVconst [0])
  3711  	for {
  3712  		if v_0.Op != OpLOONG64SLLVconst {
  3713  			break
  3714  		}
  3715  		lc := auxIntToInt64(v_0.AuxInt)
  3716  		if !(lc >= 16) {
  3717  			break
  3718  		}
  3719  		v.reset(OpLOONG64MOVVconst)
  3720  		v.AuxInt = int64ToAuxInt(0)
  3721  		return true
  3722  	}
  3723  	// match: (MOVHUreg (MOVVconst [c]))
  3724  	// result: (MOVVconst [int64(uint16(c))])
  3725  	for {
  3726  		if v_0.Op != OpLOONG64MOVVconst {
  3727  			break
  3728  		}
  3729  		c := auxIntToInt64(v_0.AuxInt)
  3730  		v.reset(OpLOONG64MOVVconst)
  3731  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3732  		return true
  3733  	}
  3734  	// match: (MOVHUreg x:(ANDconst [c] y))
  3735  	// cond: c >= 0 && int64(uint16(c)) == c
  3736  	// result: x
  3737  	for {
  3738  		x := v_0
  3739  		if x.Op != OpLOONG64ANDconst {
  3740  			break
  3741  		}
  3742  		c := auxIntToInt64(x.AuxInt)
  3743  		if !(c >= 0 && int64(uint16(c)) == c) {
  3744  			break
  3745  		}
  3746  		v.copyOf(x)
  3747  		return true
  3748  	}
  3749  	return false
  3750  }
  3751  func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
  3752  	v_1 := v.Args[1]
  3753  	v_0 := v.Args[0]
  3754  	b := v.Block
  3755  	config := b.Func.Config
  3756  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3757  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3758  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3759  	for {
  3760  		off1 := auxIntToInt32(v.AuxInt)
  3761  		sym := auxToSym(v.Aux)
  3762  		if v_0.Op != OpLOONG64ADDVconst {
  3763  			break
  3764  		}
  3765  		off2 := auxIntToInt64(v_0.AuxInt)
  3766  		ptr := v_0.Args[0]
  3767  		mem := v_1
  3768  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3769  			break
  3770  		}
  3771  		v.reset(OpLOONG64MOVHload)
  3772  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3773  		v.Aux = symToAux(sym)
  3774  		v.AddArg2(ptr, mem)
  3775  		return true
  3776  	}
  3777  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3778  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3779  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3780  	for {
  3781  		off1 := auxIntToInt32(v.AuxInt)
  3782  		sym1 := auxToSym(v.Aux)
  3783  		if v_0.Op != OpLOONG64MOVVaddr {
  3784  			break
  3785  		}
  3786  		off2 := auxIntToInt32(v_0.AuxInt)
  3787  		sym2 := auxToSym(v_0.Aux)
  3788  		ptr := v_0.Args[0]
  3789  		mem := v_1
  3790  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3791  			break
  3792  		}
  3793  		v.reset(OpLOONG64MOVHload)
  3794  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3795  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3796  		v.AddArg2(ptr, mem)
  3797  		return true
  3798  	}
  3799  	// match: (MOVHload [off] {sym} (ADDV ptr idx) mem)
  3800  	// cond: off == 0 && sym == nil
  3801  	// result: (MOVHloadidx ptr idx mem)
  3802  	for {
  3803  		off := auxIntToInt32(v.AuxInt)
  3804  		sym := auxToSym(v.Aux)
  3805  		if v_0.Op != OpLOONG64ADDV {
  3806  			break
  3807  		}
  3808  		idx := v_0.Args[1]
  3809  		ptr := v_0.Args[0]
  3810  		mem := v_1
  3811  		if !(off == 0 && sym == nil) {
  3812  			break
  3813  		}
  3814  		v.reset(OpLOONG64MOVHloadidx)
  3815  		v.AddArg3(ptr, idx, mem)
  3816  		return true
  3817  	}
  3818  	return false
  3819  }
  3820  func rewriteValueLOONG64_OpLOONG64MOVHloadidx(v *Value) bool {
  3821  	v_2 := v.Args[2]
  3822  	v_1 := v.Args[1]
  3823  	v_0 := v.Args[0]
  3824  	// match: (MOVHloadidx ptr (MOVVconst [c]) mem)
  3825  	// cond: is32Bit(c)
  3826  	// result: (MOVHload [int32(c)] ptr mem)
  3827  	for {
  3828  		ptr := v_0
  3829  		if v_1.Op != OpLOONG64MOVVconst {
  3830  			break
  3831  		}
  3832  		c := auxIntToInt64(v_1.AuxInt)
  3833  		mem := v_2
  3834  		if !(is32Bit(c)) {
  3835  			break
  3836  		}
  3837  		v.reset(OpLOONG64MOVHload)
  3838  		v.AuxInt = int32ToAuxInt(int32(c))
  3839  		v.AddArg2(ptr, mem)
  3840  		return true
  3841  	}
  3842  	// match: (MOVHloadidx (MOVVconst [c]) ptr mem)
  3843  	// cond: is32Bit(c)
  3844  	// result: (MOVHload [int32(c)] ptr mem)
  3845  	for {
  3846  		if v_0.Op != OpLOONG64MOVVconst {
  3847  			break
  3848  		}
  3849  		c := auxIntToInt64(v_0.AuxInt)
  3850  		ptr := v_1
  3851  		mem := v_2
  3852  		if !(is32Bit(c)) {
  3853  			break
  3854  		}
  3855  		v.reset(OpLOONG64MOVHload)
  3856  		v.AuxInt = int32ToAuxInt(int32(c))
  3857  		v.AddArg2(ptr, mem)
  3858  		return true
  3859  	}
  3860  	return false
  3861  }
  3862  func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool {
  3863  	v_0 := v.Args[0]
  3864  	// match: (MOVHreg x:(MOVBload _ _))
  3865  	// result: (MOVVreg x)
  3866  	for {
  3867  		x := v_0
  3868  		if x.Op != OpLOONG64MOVBload {
  3869  			break
  3870  		}
  3871  		v.reset(OpLOONG64MOVVreg)
  3872  		v.AddArg(x)
  3873  		return true
  3874  	}
  3875  	// match: (MOVHreg x:(MOVBUload _ _))
  3876  	// result: (MOVVreg x)
  3877  	for {
  3878  		x := v_0
  3879  		if x.Op != OpLOONG64MOVBUload {
  3880  			break
  3881  		}
  3882  		v.reset(OpLOONG64MOVVreg)
  3883  		v.AddArg(x)
  3884  		return true
  3885  	}
  3886  	// match: (MOVHreg x:(MOVHload _ _))
  3887  	// result: (MOVVreg x)
  3888  	for {
  3889  		x := v_0
  3890  		if x.Op != OpLOONG64MOVHload {
  3891  			break
  3892  		}
  3893  		v.reset(OpLOONG64MOVVreg)
  3894  		v.AddArg(x)
  3895  		return true
  3896  	}
  3897  	// match: (MOVHreg x:(MOVBreg _))
  3898  	// result: (MOVVreg x)
  3899  	for {
  3900  		x := v_0
  3901  		if x.Op != OpLOONG64MOVBreg {
  3902  			break
  3903  		}
  3904  		v.reset(OpLOONG64MOVVreg)
  3905  		v.AddArg(x)
  3906  		return true
  3907  	}
  3908  	// match: (MOVHreg x:(MOVBUreg _))
  3909  	// result: (MOVVreg x)
  3910  	for {
  3911  		x := v_0
  3912  		if x.Op != OpLOONG64MOVBUreg {
  3913  			break
  3914  		}
  3915  		v.reset(OpLOONG64MOVVreg)
  3916  		v.AddArg(x)
  3917  		return true
  3918  	}
  3919  	// match: (MOVHreg x:(MOVHreg _))
  3920  	// result: (MOVVreg x)
  3921  	for {
  3922  		x := v_0
  3923  		if x.Op != OpLOONG64MOVHreg {
  3924  			break
  3925  		}
  3926  		v.reset(OpLOONG64MOVVreg)
  3927  		v.AddArg(x)
  3928  		return true
  3929  	}
  3930  	// match: (MOVHreg (MOVVconst [c]))
  3931  	// result: (MOVVconst [int64(int16(c))])
  3932  	for {
  3933  		if v_0.Op != OpLOONG64MOVVconst {
  3934  			break
  3935  		}
  3936  		c := auxIntToInt64(v_0.AuxInt)
  3937  		v.reset(OpLOONG64MOVVconst)
  3938  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3939  		return true
  3940  	}
  3941  	// match: (MOVHreg x:(ANDconst [c] y))
  3942  	// cond: c >= 0 && int64(int16(c)) == c
  3943  	// result: x
  3944  	for {
  3945  		x := v_0
  3946  		if x.Op != OpLOONG64ANDconst {
  3947  			break
  3948  		}
  3949  		c := auxIntToInt64(x.AuxInt)
  3950  		if !(c >= 0 && int64(int16(c)) == c) {
  3951  			break
  3952  		}
  3953  		v.copyOf(x)
  3954  		return true
  3955  	}
  3956  	return false
  3957  }
  3958  func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
  3959  	v_2 := v.Args[2]
  3960  	v_1 := v.Args[1]
  3961  	v_0 := v.Args[0]
  3962  	b := v.Block
  3963  	config := b.Func.Config
  3964  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3965  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3966  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3967  	for {
  3968  		off1 := auxIntToInt32(v.AuxInt)
  3969  		sym := auxToSym(v.Aux)
  3970  		if v_0.Op != OpLOONG64ADDVconst {
  3971  			break
  3972  		}
  3973  		off2 := auxIntToInt64(v_0.AuxInt)
  3974  		ptr := v_0.Args[0]
  3975  		val := v_1
  3976  		mem := v_2
  3977  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3978  			break
  3979  		}
  3980  		v.reset(OpLOONG64MOVHstore)
  3981  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3982  		v.Aux = symToAux(sym)
  3983  		v.AddArg3(ptr, val, mem)
  3984  		return true
  3985  	}
  3986  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3987  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3988  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3989  	for {
  3990  		off1 := auxIntToInt32(v.AuxInt)
  3991  		sym1 := auxToSym(v.Aux)
  3992  		if v_0.Op != OpLOONG64MOVVaddr {
  3993  			break
  3994  		}
  3995  		off2 := auxIntToInt32(v_0.AuxInt)
  3996  		sym2 := auxToSym(v_0.Aux)
  3997  		ptr := v_0.Args[0]
  3998  		val := v_1
  3999  		mem := v_2
  4000  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4001  			break
  4002  		}
  4003  		v.reset(OpLOONG64MOVHstore)
  4004  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4005  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4006  		v.AddArg3(ptr, val, mem)
  4007  		return true
  4008  	}
  4009  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4010  	// result: (MOVHstore [off] {sym} ptr x mem)
  4011  	for {
  4012  		off := auxIntToInt32(v.AuxInt)
  4013  		sym := auxToSym(v.Aux)
  4014  		ptr := v_0
  4015  		if v_1.Op != OpLOONG64MOVHreg {
  4016  			break
  4017  		}
  4018  		x := v_1.Args[0]
  4019  		mem := v_2
  4020  		v.reset(OpLOONG64MOVHstore)
  4021  		v.AuxInt = int32ToAuxInt(off)
  4022  		v.Aux = symToAux(sym)
  4023  		v.AddArg3(ptr, x, mem)
  4024  		return true
  4025  	}
  4026  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4027  	// result: (MOVHstore [off] {sym} ptr x mem)
  4028  	for {
  4029  		off := auxIntToInt32(v.AuxInt)
  4030  		sym := auxToSym(v.Aux)
  4031  		ptr := v_0
  4032  		if v_1.Op != OpLOONG64MOVHUreg {
  4033  			break
  4034  		}
  4035  		x := v_1.Args[0]
  4036  		mem := v_2
  4037  		v.reset(OpLOONG64MOVHstore)
  4038  		v.AuxInt = int32ToAuxInt(off)
  4039  		v.Aux = symToAux(sym)
  4040  		v.AddArg3(ptr, x, mem)
  4041  		return true
  4042  	}
  4043  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4044  	// result: (MOVHstore [off] {sym} ptr x mem)
  4045  	for {
  4046  		off := auxIntToInt32(v.AuxInt)
  4047  		sym := auxToSym(v.Aux)
  4048  		ptr := v_0
  4049  		if v_1.Op != OpLOONG64MOVWreg {
  4050  			break
  4051  		}
  4052  		x := v_1.Args[0]
  4053  		mem := v_2
  4054  		v.reset(OpLOONG64MOVHstore)
  4055  		v.AuxInt = int32ToAuxInt(off)
  4056  		v.Aux = symToAux(sym)
  4057  		v.AddArg3(ptr, x, mem)
  4058  		return true
  4059  	}
  4060  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4061  	// result: (MOVHstore [off] {sym} ptr x mem)
  4062  	for {
  4063  		off := auxIntToInt32(v.AuxInt)
  4064  		sym := auxToSym(v.Aux)
  4065  		ptr := v_0
  4066  		if v_1.Op != OpLOONG64MOVWUreg {
  4067  			break
  4068  		}
  4069  		x := v_1.Args[0]
  4070  		mem := v_2
  4071  		v.reset(OpLOONG64MOVHstore)
  4072  		v.AuxInt = int32ToAuxInt(off)
  4073  		v.Aux = symToAux(sym)
  4074  		v.AddArg3(ptr, x, mem)
  4075  		return true
  4076  	}
  4077  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4078  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4079  	for {
  4080  		off := auxIntToInt32(v.AuxInt)
  4081  		sym := auxToSym(v.Aux)
  4082  		ptr := v_0
  4083  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4084  			break
  4085  		}
  4086  		mem := v_2
  4087  		v.reset(OpLOONG64MOVHstorezero)
  4088  		v.AuxInt = int32ToAuxInt(off)
  4089  		v.Aux = symToAux(sym)
  4090  		v.AddArg2(ptr, mem)
  4091  		return true
  4092  	}
  4093  	// match: (MOVHstore [off] {sym} (ADDV ptr idx) val mem)
  4094  	// cond: off == 0 && sym == nil
  4095  	// result: (MOVHstoreidx ptr idx val mem)
  4096  	for {
  4097  		off := auxIntToInt32(v.AuxInt)
  4098  		sym := auxToSym(v.Aux)
  4099  		if v_0.Op != OpLOONG64ADDV {
  4100  			break
  4101  		}
  4102  		idx := v_0.Args[1]
  4103  		ptr := v_0.Args[0]
  4104  		val := v_1
  4105  		mem := v_2
  4106  		if !(off == 0 && sym == nil) {
  4107  			break
  4108  		}
  4109  		v.reset(OpLOONG64MOVHstoreidx)
  4110  		v.AddArg4(ptr, idx, val, mem)
  4111  		return true
  4112  	}
  4113  	return false
  4114  }
  4115  func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool {
  4116  	v_3 := v.Args[3]
  4117  	v_2 := v.Args[2]
  4118  	v_1 := v.Args[1]
  4119  	v_0 := v.Args[0]
  4120  	// match: (MOVHstoreidx ptr (MOVVconst [c]) val mem)
  4121  	// cond: is32Bit(c)
  4122  	// result: (MOVHstore [int32(c)] ptr val mem)
  4123  	for {
  4124  		ptr := v_0
  4125  		if v_1.Op != OpLOONG64MOVVconst {
  4126  			break
  4127  		}
  4128  		c := auxIntToInt64(v_1.AuxInt)
  4129  		val := v_2
  4130  		mem := v_3
  4131  		if !(is32Bit(c)) {
  4132  			break
  4133  		}
  4134  		v.reset(OpLOONG64MOVHstore)
  4135  		v.AuxInt = int32ToAuxInt(int32(c))
  4136  		v.AddArg3(ptr, val, mem)
  4137  		return true
  4138  	}
  4139  	// match: (MOVHstoreidx (MOVVconst [c]) idx val mem)
  4140  	// cond: is32Bit(c)
  4141  	// result: (MOVHstore [int32(c)] idx val mem)
  4142  	for {
  4143  		if v_0.Op != OpLOONG64MOVVconst {
  4144  			break
  4145  		}
  4146  		c := auxIntToInt64(v_0.AuxInt)
  4147  		idx := v_1
  4148  		val := v_2
  4149  		mem := v_3
  4150  		if !(is32Bit(c)) {
  4151  			break
  4152  		}
  4153  		v.reset(OpLOONG64MOVHstore)
  4154  		v.AuxInt = int32ToAuxInt(int32(c))
  4155  		v.AddArg3(idx, val, mem)
  4156  		return true
  4157  	}
  4158  	// match: (MOVHstoreidx ptr idx (MOVVconst [0]) mem)
  4159  	// result: (MOVHstorezeroidx ptr idx mem)
  4160  	for {
  4161  		ptr := v_0
  4162  		idx := v_1
  4163  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  4164  			break
  4165  		}
  4166  		mem := v_3
  4167  		v.reset(OpLOONG64MOVHstorezeroidx)
  4168  		v.AddArg3(ptr, idx, mem)
  4169  		return true
  4170  	}
  4171  	return false
  4172  }
  4173  func rewriteValueLOONG64_OpLOONG64MOVHstorezero(v *Value) bool {
  4174  	v_1 := v.Args[1]
  4175  	v_0 := v.Args[0]
  4176  	b := v.Block
  4177  	config := b.Func.Config
  4178  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4179  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4180  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  4181  	for {
  4182  		off1 := auxIntToInt32(v.AuxInt)
  4183  		sym := auxToSym(v.Aux)
  4184  		if v_0.Op != OpLOONG64ADDVconst {
  4185  			break
  4186  		}
  4187  		off2 := auxIntToInt64(v_0.AuxInt)
  4188  		ptr := v_0.Args[0]
  4189  		mem := v_1
  4190  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4191  			break
  4192  		}
  4193  		v.reset(OpLOONG64MOVHstorezero)
  4194  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4195  		v.Aux = symToAux(sym)
  4196  		v.AddArg2(ptr, mem)
  4197  		return true
  4198  	}
  4199  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4200  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4201  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4202  	for {
  4203  		off1 := auxIntToInt32(v.AuxInt)
  4204  		sym1 := auxToSym(v.Aux)
  4205  		if v_0.Op != OpLOONG64MOVVaddr {
  4206  			break
  4207  		}
  4208  		off2 := auxIntToInt32(v_0.AuxInt)
  4209  		sym2 := auxToSym(v_0.Aux)
  4210  		ptr := v_0.Args[0]
  4211  		mem := v_1
  4212  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4213  			break
  4214  		}
  4215  		v.reset(OpLOONG64MOVHstorezero)
  4216  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4217  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4218  		v.AddArg2(ptr, mem)
  4219  		return true
  4220  	}
  4221  	// match: (MOVHstorezero [off] {sym} (ADDV ptr idx) mem)
  4222  	// cond: off == 0 && sym == nil
  4223  	// result: (MOVHstorezeroidx ptr idx mem)
  4224  	for {
  4225  		off := auxIntToInt32(v.AuxInt)
  4226  		sym := auxToSym(v.Aux)
  4227  		if v_0.Op != OpLOONG64ADDV {
  4228  			break
  4229  		}
  4230  		idx := v_0.Args[1]
  4231  		ptr := v_0.Args[0]
  4232  		mem := v_1
  4233  		if !(off == 0 && sym == nil) {
  4234  			break
  4235  		}
  4236  		v.reset(OpLOONG64MOVHstorezeroidx)
  4237  		v.AddArg3(ptr, idx, mem)
  4238  		return true
  4239  	}
  4240  	return false
  4241  }
  4242  func rewriteValueLOONG64_OpLOONG64MOVHstorezeroidx(v *Value) bool {
  4243  	v_2 := v.Args[2]
  4244  	v_1 := v.Args[1]
  4245  	v_0 := v.Args[0]
  4246  	// match: (MOVHstorezeroidx ptr (MOVVconst [c]) mem)
  4247  	// cond: is32Bit(c)
  4248  	// result: (MOVHstorezero [int32(c)] ptr mem)
  4249  	for {
  4250  		ptr := v_0
  4251  		if v_1.Op != OpLOONG64MOVVconst {
  4252  			break
  4253  		}
  4254  		c := auxIntToInt64(v_1.AuxInt)
  4255  		mem := v_2
  4256  		if !(is32Bit(c)) {
  4257  			break
  4258  		}
  4259  		v.reset(OpLOONG64MOVHstorezero)
  4260  		v.AuxInt = int32ToAuxInt(int32(c))
  4261  		v.AddArg2(ptr, mem)
  4262  		return true
  4263  	}
  4264  	// match: (MOVHstorezeroidx (MOVVconst [c]) idx mem)
  4265  	// cond: is32Bit(c)
  4266  	// result: (MOVHstorezero [int32(c)] idx mem)
  4267  	for {
  4268  		if v_0.Op != OpLOONG64MOVVconst {
  4269  			break
  4270  		}
  4271  		c := auxIntToInt64(v_0.AuxInt)
  4272  		idx := v_1
  4273  		mem := v_2
  4274  		if !(is32Bit(c)) {
  4275  			break
  4276  		}
  4277  		v.reset(OpLOONG64MOVHstorezero)
  4278  		v.AuxInt = int32ToAuxInt(int32(c))
  4279  		v.AddArg2(idx, mem)
  4280  		return true
  4281  	}
  4282  	return false
  4283  }
  4284  func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
  4285  	v_1 := v.Args[1]
  4286  	v_0 := v.Args[0]
  4287  	b := v.Block
  4288  	config := b.Func.Config
  4289  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4290  	// result: (MOVVfpgp val)
  4291  	for {
  4292  		off := auxIntToInt32(v.AuxInt)
  4293  		sym := auxToSym(v.Aux)
  4294  		ptr := v_0
  4295  		if v_1.Op != OpLOONG64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4296  			break
  4297  		}
  4298  		val := v_1.Args[1]
  4299  		if ptr != v_1.Args[0] {
  4300  			break
  4301  		}
  4302  		v.reset(OpLOONG64MOVVfpgp)
  4303  		v.AddArg(val)
  4304  		return true
  4305  	}
  4306  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4307  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4308  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  4309  	for {
  4310  		off1 := auxIntToInt32(v.AuxInt)
  4311  		sym := auxToSym(v.Aux)
  4312  		if v_0.Op != OpLOONG64ADDVconst {
  4313  			break
  4314  		}
  4315  		off2 := auxIntToInt64(v_0.AuxInt)
  4316  		ptr := v_0.Args[0]
  4317  		mem := v_1
  4318  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4319  			break
  4320  		}
  4321  		v.reset(OpLOONG64MOVVload)
  4322  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4323  		v.Aux = symToAux(sym)
  4324  		v.AddArg2(ptr, mem)
  4325  		return true
  4326  	}
  4327  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4328  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4329  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4330  	for {
  4331  		off1 := auxIntToInt32(v.AuxInt)
  4332  		sym1 := auxToSym(v.Aux)
  4333  		if v_0.Op != OpLOONG64MOVVaddr {
  4334  			break
  4335  		}
  4336  		off2 := auxIntToInt32(v_0.AuxInt)
  4337  		sym2 := auxToSym(v_0.Aux)
  4338  		ptr := v_0.Args[0]
  4339  		mem := v_1
  4340  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4341  			break
  4342  		}
  4343  		v.reset(OpLOONG64MOVVload)
  4344  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4345  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4346  		v.AddArg2(ptr, mem)
  4347  		return true
  4348  	}
  4349  	// match: (MOVVload [off] {sym} (ADDV ptr idx) mem)
  4350  	// cond: off == 0 && sym == nil
  4351  	// result: (MOVVloadidx ptr idx mem)
  4352  	for {
  4353  		off := auxIntToInt32(v.AuxInt)
  4354  		sym := auxToSym(v.Aux)
  4355  		if v_0.Op != OpLOONG64ADDV {
  4356  			break
  4357  		}
  4358  		idx := v_0.Args[1]
  4359  		ptr := v_0.Args[0]
  4360  		mem := v_1
  4361  		if !(off == 0 && sym == nil) {
  4362  			break
  4363  		}
  4364  		v.reset(OpLOONG64MOVVloadidx)
  4365  		v.AddArg3(ptr, idx, mem)
  4366  		return true
  4367  	}
  4368  	return false
  4369  }
  4370  func rewriteValueLOONG64_OpLOONG64MOVVloadidx(v *Value) bool {
  4371  	v_2 := v.Args[2]
  4372  	v_1 := v.Args[1]
  4373  	v_0 := v.Args[0]
  4374  	// match: (MOVVloadidx ptr (MOVVconst [c]) mem)
  4375  	// cond: is32Bit(c)
  4376  	// result: (MOVVload [int32(c)] ptr mem)
  4377  	for {
  4378  		ptr := v_0
  4379  		if v_1.Op != OpLOONG64MOVVconst {
  4380  			break
  4381  		}
  4382  		c := auxIntToInt64(v_1.AuxInt)
  4383  		mem := v_2
  4384  		if !(is32Bit(c)) {
  4385  			break
  4386  		}
  4387  		v.reset(OpLOONG64MOVVload)
  4388  		v.AuxInt = int32ToAuxInt(int32(c))
  4389  		v.AddArg2(ptr, mem)
  4390  		return true
  4391  	}
  4392  	// match: (MOVVloadidx (MOVVconst [c]) ptr mem)
  4393  	// cond: is32Bit(c)
  4394  	// result: (MOVVload [int32(c)] ptr mem)
  4395  	for {
  4396  		if v_0.Op != OpLOONG64MOVVconst {
  4397  			break
  4398  		}
  4399  		c := auxIntToInt64(v_0.AuxInt)
  4400  		ptr := v_1
  4401  		mem := v_2
  4402  		if !(is32Bit(c)) {
  4403  			break
  4404  		}
  4405  		v.reset(OpLOONG64MOVVload)
  4406  		v.AuxInt = int32ToAuxInt(int32(c))
  4407  		v.AddArg2(ptr, mem)
  4408  		return true
  4409  	}
  4410  	return false
  4411  }
  4412  func rewriteValueLOONG64_OpLOONG64MOVVnop(v *Value) bool {
  4413  	v_0 := v.Args[0]
  4414  	// match: (MOVVnop (MOVVconst [c]))
  4415  	// result: (MOVVconst [c])
  4416  	for {
  4417  		if v_0.Op != OpLOONG64MOVVconst {
  4418  			break
  4419  		}
  4420  		c := auxIntToInt64(v_0.AuxInt)
  4421  		v.reset(OpLOONG64MOVVconst)
  4422  		v.AuxInt = int64ToAuxInt(c)
  4423  		return true
  4424  	}
  4425  	return false
  4426  }
  4427  func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool {
  4428  	v_0 := v.Args[0]
  4429  	// match: (MOVVreg x)
  4430  	// cond: x.Uses == 1
  4431  	// result: (MOVVnop x)
  4432  	for {
  4433  		x := v_0
  4434  		if !(x.Uses == 1) {
  4435  			break
  4436  		}
  4437  		v.reset(OpLOONG64MOVVnop)
  4438  		v.AddArg(x)
  4439  		return true
  4440  	}
  4441  	// match: (MOVVreg (MOVVconst [c]))
  4442  	// result: (MOVVconst [c])
  4443  	for {
  4444  		if v_0.Op != OpLOONG64MOVVconst {
  4445  			break
  4446  		}
  4447  		c := auxIntToInt64(v_0.AuxInt)
  4448  		v.reset(OpLOONG64MOVVconst)
  4449  		v.AuxInt = int64ToAuxInt(c)
  4450  		return true
  4451  	}
  4452  	return false
  4453  }
  4454  func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
  4455  	v_2 := v.Args[2]
  4456  	v_1 := v.Args[1]
  4457  	v_0 := v.Args[0]
  4458  	b := v.Block
  4459  	config := b.Func.Config
  4460  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4461  	// result: (MOVDstore [off] {sym} ptr val mem)
  4462  	for {
  4463  		off := auxIntToInt32(v.AuxInt)
  4464  		sym := auxToSym(v.Aux)
  4465  		ptr := v_0
  4466  		if v_1.Op != OpLOONG64MOVVfpgp {
  4467  			break
  4468  		}
  4469  		val := v_1.Args[0]
  4470  		mem := v_2
  4471  		v.reset(OpLOONG64MOVDstore)
  4472  		v.AuxInt = int32ToAuxInt(off)
  4473  		v.Aux = symToAux(sym)
  4474  		v.AddArg3(ptr, val, mem)
  4475  		return true
  4476  	}
  4477  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4478  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4479  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4480  	for {
  4481  		off1 := auxIntToInt32(v.AuxInt)
  4482  		sym := auxToSym(v.Aux)
  4483  		if v_0.Op != OpLOONG64ADDVconst {
  4484  			break
  4485  		}
  4486  		off2 := auxIntToInt64(v_0.AuxInt)
  4487  		ptr := v_0.Args[0]
  4488  		val := v_1
  4489  		mem := v_2
  4490  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4491  			break
  4492  		}
  4493  		v.reset(OpLOONG64MOVVstore)
  4494  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4495  		v.Aux = symToAux(sym)
  4496  		v.AddArg3(ptr, val, mem)
  4497  		return true
  4498  	}
  4499  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4500  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4501  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4502  	for {
  4503  		off1 := auxIntToInt32(v.AuxInt)
  4504  		sym1 := auxToSym(v.Aux)
  4505  		if v_0.Op != OpLOONG64MOVVaddr {
  4506  			break
  4507  		}
  4508  		off2 := auxIntToInt32(v_0.AuxInt)
  4509  		sym2 := auxToSym(v_0.Aux)
  4510  		ptr := v_0.Args[0]
  4511  		val := v_1
  4512  		mem := v_2
  4513  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4514  			break
  4515  		}
  4516  		v.reset(OpLOONG64MOVVstore)
  4517  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4518  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4519  		v.AddArg3(ptr, val, mem)
  4520  		return true
  4521  	}
  4522  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4523  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4524  	for {
  4525  		off := auxIntToInt32(v.AuxInt)
  4526  		sym := auxToSym(v.Aux)
  4527  		ptr := v_0
  4528  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4529  			break
  4530  		}
  4531  		mem := v_2
  4532  		v.reset(OpLOONG64MOVVstorezero)
  4533  		v.AuxInt = int32ToAuxInt(off)
  4534  		v.Aux = symToAux(sym)
  4535  		v.AddArg2(ptr, mem)
  4536  		return true
  4537  	}
  4538  	// match: (MOVVstore [off] {sym} (ADDV ptr idx) val mem)
  4539  	// cond: off == 0 && sym == nil
  4540  	// result: (MOVVstoreidx ptr idx val mem)
  4541  	for {
  4542  		off := auxIntToInt32(v.AuxInt)
  4543  		sym := auxToSym(v.Aux)
  4544  		if v_0.Op != OpLOONG64ADDV {
  4545  			break
  4546  		}
  4547  		idx := v_0.Args[1]
  4548  		ptr := v_0.Args[0]
  4549  		val := v_1
  4550  		mem := v_2
  4551  		if !(off == 0 && sym == nil) {
  4552  			break
  4553  		}
  4554  		v.reset(OpLOONG64MOVVstoreidx)
  4555  		v.AddArg4(ptr, idx, val, mem)
  4556  		return true
  4557  	}
  4558  	return false
  4559  }
  4560  func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool {
  4561  	v_3 := v.Args[3]
  4562  	v_2 := v.Args[2]
  4563  	v_1 := v.Args[1]
  4564  	v_0 := v.Args[0]
  4565  	// match: (MOVVstoreidx ptr (MOVVconst [c]) val mem)
  4566  	// cond: is32Bit(c)
  4567  	// result: (MOVVstore [int32(c)] ptr val mem)
  4568  	for {
  4569  		ptr := v_0
  4570  		if v_1.Op != OpLOONG64MOVVconst {
  4571  			break
  4572  		}
  4573  		c := auxIntToInt64(v_1.AuxInt)
  4574  		val := v_2
  4575  		mem := v_3
  4576  		if !(is32Bit(c)) {
  4577  			break
  4578  		}
  4579  		v.reset(OpLOONG64MOVVstore)
  4580  		v.AuxInt = int32ToAuxInt(int32(c))
  4581  		v.AddArg3(ptr, val, mem)
  4582  		return true
  4583  	}
  4584  	// match: (MOVVstoreidx (MOVVconst [c]) idx val mem)
  4585  	// cond: is32Bit(c)
  4586  	// result: (MOVVstore [int32(c)] idx val mem)
  4587  	for {
  4588  		if v_0.Op != OpLOONG64MOVVconst {
  4589  			break
  4590  		}
  4591  		c := auxIntToInt64(v_0.AuxInt)
  4592  		idx := v_1
  4593  		val := v_2
  4594  		mem := v_3
  4595  		if !(is32Bit(c)) {
  4596  			break
  4597  		}
  4598  		v.reset(OpLOONG64MOVVstore)
  4599  		v.AuxInt = int32ToAuxInt(int32(c))
  4600  		v.AddArg3(idx, val, mem)
  4601  		return true
  4602  	}
  4603  	// match: (MOVVstoreidx ptr idx (MOVVconst [0]) mem)
  4604  	// result: (MOVVstorezeroidx ptr idx mem)
  4605  	for {
  4606  		ptr := v_0
  4607  		idx := v_1
  4608  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  4609  			break
  4610  		}
  4611  		mem := v_3
  4612  		v.reset(OpLOONG64MOVVstorezeroidx)
  4613  		v.AddArg3(ptr, idx, mem)
  4614  		return true
  4615  	}
  4616  	return false
  4617  }
  4618  func rewriteValueLOONG64_OpLOONG64MOVVstorezero(v *Value) bool {
  4619  	v_1 := v.Args[1]
  4620  	v_0 := v.Args[0]
  4621  	b := v.Block
  4622  	config := b.Func.Config
  4623  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4624  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4625  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  4626  	for {
  4627  		off1 := auxIntToInt32(v.AuxInt)
  4628  		sym := auxToSym(v.Aux)
  4629  		if v_0.Op != OpLOONG64ADDVconst {
  4630  			break
  4631  		}
  4632  		off2 := auxIntToInt64(v_0.AuxInt)
  4633  		ptr := v_0.Args[0]
  4634  		mem := v_1
  4635  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4636  			break
  4637  		}
  4638  		v.reset(OpLOONG64MOVVstorezero)
  4639  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4640  		v.Aux = symToAux(sym)
  4641  		v.AddArg2(ptr, mem)
  4642  		return true
  4643  	}
  4644  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4645  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4646  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4647  	for {
  4648  		off1 := auxIntToInt32(v.AuxInt)
  4649  		sym1 := auxToSym(v.Aux)
  4650  		if v_0.Op != OpLOONG64MOVVaddr {
  4651  			break
  4652  		}
  4653  		off2 := auxIntToInt32(v_0.AuxInt)
  4654  		sym2 := auxToSym(v_0.Aux)
  4655  		ptr := v_0.Args[0]
  4656  		mem := v_1
  4657  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4658  			break
  4659  		}
  4660  		v.reset(OpLOONG64MOVVstorezero)
  4661  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4662  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4663  		v.AddArg2(ptr, mem)
  4664  		return true
  4665  	}
  4666  	// match: (MOVVstorezero [off] {sym} (ADDV ptr idx) mem)
  4667  	// cond: off == 0 && sym == nil
  4668  	// result: (MOVVstorezeroidx ptr idx mem)
  4669  	for {
  4670  		off := auxIntToInt32(v.AuxInt)
  4671  		sym := auxToSym(v.Aux)
  4672  		if v_0.Op != OpLOONG64ADDV {
  4673  			break
  4674  		}
  4675  		idx := v_0.Args[1]
  4676  		ptr := v_0.Args[0]
  4677  		mem := v_1
  4678  		if !(off == 0 && sym == nil) {
  4679  			break
  4680  		}
  4681  		v.reset(OpLOONG64MOVVstorezeroidx)
  4682  		v.AddArg3(ptr, idx, mem)
  4683  		return true
  4684  	}
  4685  	return false
  4686  }
  4687  func rewriteValueLOONG64_OpLOONG64MOVVstorezeroidx(v *Value) bool {
  4688  	v_2 := v.Args[2]
  4689  	v_1 := v.Args[1]
  4690  	v_0 := v.Args[0]
  4691  	// match: (MOVVstorezeroidx ptr (MOVVconst [c]) mem)
  4692  	// cond: is32Bit(c)
  4693  	// result: (MOVVstorezero [int32(c)] ptr mem)
  4694  	for {
  4695  		ptr := v_0
  4696  		if v_1.Op != OpLOONG64MOVVconst {
  4697  			break
  4698  		}
  4699  		c := auxIntToInt64(v_1.AuxInt)
  4700  		mem := v_2
  4701  		if !(is32Bit(c)) {
  4702  			break
  4703  		}
  4704  		v.reset(OpLOONG64MOVVstorezero)
  4705  		v.AuxInt = int32ToAuxInt(int32(c))
  4706  		v.AddArg2(ptr, mem)
  4707  		return true
  4708  	}
  4709  	// match: (MOVVstorezeroidx (MOVVconst [c]) idx mem)
  4710  	// cond: is32Bit(c)
  4711  	// result: (MOVVstorezero [int32(c)] idx mem)
  4712  	for {
  4713  		if v_0.Op != OpLOONG64MOVVconst {
  4714  			break
  4715  		}
  4716  		c := auxIntToInt64(v_0.AuxInt)
  4717  		idx := v_1
  4718  		mem := v_2
  4719  		if !(is32Bit(c)) {
  4720  			break
  4721  		}
  4722  		v.reset(OpLOONG64MOVVstorezero)
  4723  		v.AuxInt = int32ToAuxInt(int32(c))
  4724  		v.AddArg2(idx, mem)
  4725  		return true
  4726  	}
  4727  	return false
  4728  }
  4729  func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
  4730  	v_1 := v.Args[1]
  4731  	v_0 := v.Args[0]
  4732  	b := v.Block
  4733  	config := b.Func.Config
  4734  	typ := &b.Func.Config.Types
  4735  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  4736  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  4737  	for {
  4738  		off := auxIntToInt32(v.AuxInt)
  4739  		sym := auxToSym(v.Aux)
  4740  		ptr := v_0
  4741  		if v_1.Op != OpLOONG64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4742  			break
  4743  		}
  4744  		val := v_1.Args[1]
  4745  		if ptr != v_1.Args[0] {
  4746  			break
  4747  		}
  4748  		v.reset(OpZeroExt32to64)
  4749  		v0 := b.NewValue0(v_1.Pos, OpLOONG64MOVWfpgp, typ.Float32)
  4750  		v0.AddArg(val)
  4751  		v.AddArg(v0)
  4752  		return true
  4753  	}
  4754  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4755  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4756  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  4757  	for {
  4758  		off1 := auxIntToInt32(v.AuxInt)
  4759  		sym := auxToSym(v.Aux)
  4760  		if v_0.Op != OpLOONG64ADDVconst {
  4761  			break
  4762  		}
  4763  		off2 := auxIntToInt64(v_0.AuxInt)
  4764  		ptr := v_0.Args[0]
  4765  		mem := v_1
  4766  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4767  			break
  4768  		}
  4769  		v.reset(OpLOONG64MOVWUload)
  4770  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4771  		v.Aux = symToAux(sym)
  4772  		v.AddArg2(ptr, mem)
  4773  		return true
  4774  	}
  4775  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4776  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4777  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4778  	for {
  4779  		off1 := auxIntToInt32(v.AuxInt)
  4780  		sym1 := auxToSym(v.Aux)
  4781  		if v_0.Op != OpLOONG64MOVVaddr {
  4782  			break
  4783  		}
  4784  		off2 := auxIntToInt32(v_0.AuxInt)
  4785  		sym2 := auxToSym(v_0.Aux)
  4786  		ptr := v_0.Args[0]
  4787  		mem := v_1
  4788  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4789  			break
  4790  		}
  4791  		v.reset(OpLOONG64MOVWUload)
  4792  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4793  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4794  		v.AddArg2(ptr, mem)
  4795  		return true
  4796  	}
  4797  	// match: (MOVWUload [off] {sym} (ADDV ptr idx) mem)
  4798  	// cond: off == 0 && sym == nil
  4799  	// result: (MOVWUloadidx ptr idx mem)
  4800  	for {
  4801  		off := auxIntToInt32(v.AuxInt)
  4802  		sym := auxToSym(v.Aux)
  4803  		if v_0.Op != OpLOONG64ADDV {
  4804  			break
  4805  		}
  4806  		idx := v_0.Args[1]
  4807  		ptr := v_0.Args[0]
  4808  		mem := v_1
  4809  		if !(off == 0 && sym == nil) {
  4810  			break
  4811  		}
  4812  		v.reset(OpLOONG64MOVWUloadidx)
  4813  		v.AddArg3(ptr, idx, mem)
  4814  		return true
  4815  	}
  4816  	return false
  4817  }
  4818  func rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v *Value) bool {
  4819  	v_2 := v.Args[2]
  4820  	v_1 := v.Args[1]
  4821  	v_0 := v.Args[0]
  4822  	// match: (MOVWUloadidx ptr (MOVVconst [c]) mem)
  4823  	// cond: is32Bit(c)
  4824  	// result: (MOVWUload [int32(c)] ptr mem)
  4825  	for {
  4826  		ptr := v_0
  4827  		if v_1.Op != OpLOONG64MOVVconst {
  4828  			break
  4829  		}
  4830  		c := auxIntToInt64(v_1.AuxInt)
  4831  		mem := v_2
  4832  		if !(is32Bit(c)) {
  4833  			break
  4834  		}
  4835  		v.reset(OpLOONG64MOVWUload)
  4836  		v.AuxInt = int32ToAuxInt(int32(c))
  4837  		v.AddArg2(ptr, mem)
  4838  		return true
  4839  	}
  4840  	// match: (MOVWUloadidx (MOVVconst [c]) ptr mem)
  4841  	// cond: is32Bit(c)
  4842  	// result: (MOVWUload [int32(c)] ptr mem)
  4843  	for {
  4844  		if v_0.Op != OpLOONG64MOVVconst {
  4845  			break
  4846  		}
  4847  		c := auxIntToInt64(v_0.AuxInt)
  4848  		ptr := v_1
  4849  		mem := v_2
  4850  		if !(is32Bit(c)) {
  4851  			break
  4852  		}
  4853  		v.reset(OpLOONG64MOVWUload)
  4854  		v.AuxInt = int32ToAuxInt(int32(c))
  4855  		v.AddArg2(ptr, mem)
  4856  		return true
  4857  	}
  4858  	return false
  4859  }
  4860  func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool {
  4861  	v_0 := v.Args[0]
  4862  	// match: (MOVWUreg (SRLVconst [rc] x))
  4863  	// cond: rc < 32
  4864  	// result: (BSTRPICKV [rc + (31+rc)<<6] x)
  4865  	for {
  4866  		if v_0.Op != OpLOONG64SRLVconst {
  4867  			break
  4868  		}
  4869  		rc := auxIntToInt64(v_0.AuxInt)
  4870  		x := v_0.Args[0]
  4871  		if !(rc < 32) {
  4872  			break
  4873  		}
  4874  		v.reset(OpLOONG64BSTRPICKV)
  4875  		v.AuxInt = int64ToAuxInt(rc + (31+rc)<<6)
  4876  		v.AddArg(x)
  4877  		return true
  4878  	}
  4879  	// match: (MOVWUreg x:(MOVBUload _ _))
  4880  	// result: (MOVVreg x)
  4881  	for {
  4882  		x := v_0
  4883  		if x.Op != OpLOONG64MOVBUload {
  4884  			break
  4885  		}
  4886  		v.reset(OpLOONG64MOVVreg)
  4887  		v.AddArg(x)
  4888  		return true
  4889  	}
  4890  	// match: (MOVWUreg x:(MOVHUload _ _))
  4891  	// result: (MOVVreg x)
  4892  	for {
  4893  		x := v_0
  4894  		if x.Op != OpLOONG64MOVHUload {
  4895  			break
  4896  		}
  4897  		v.reset(OpLOONG64MOVVreg)
  4898  		v.AddArg(x)
  4899  		return true
  4900  	}
  4901  	// match: (MOVWUreg x:(MOVWUload _ _))
  4902  	// result: (MOVVreg x)
  4903  	for {
  4904  		x := v_0
  4905  		if x.Op != OpLOONG64MOVWUload {
  4906  			break
  4907  		}
  4908  		v.reset(OpLOONG64MOVVreg)
  4909  		v.AddArg(x)
  4910  		return true
  4911  	}
  4912  	// match: (MOVWUreg x:(MOVBUreg _))
  4913  	// result: (MOVVreg x)
  4914  	for {
  4915  		x := v_0
  4916  		if x.Op != OpLOONG64MOVBUreg {
  4917  			break
  4918  		}
  4919  		v.reset(OpLOONG64MOVVreg)
  4920  		v.AddArg(x)
  4921  		return true
  4922  	}
  4923  	// match: (MOVWUreg x:(MOVHUreg _))
  4924  	// result: (MOVVreg x)
  4925  	for {
  4926  		x := v_0
  4927  		if x.Op != OpLOONG64MOVHUreg {
  4928  			break
  4929  		}
  4930  		v.reset(OpLOONG64MOVVreg)
  4931  		v.AddArg(x)
  4932  		return true
  4933  	}
  4934  	// match: (MOVWUreg x:(MOVWUreg _))
  4935  	// result: (MOVVreg x)
  4936  	for {
  4937  		x := v_0
  4938  		if x.Op != OpLOONG64MOVWUreg {
  4939  			break
  4940  		}
  4941  		v.reset(OpLOONG64MOVVreg)
  4942  		v.AddArg(x)
  4943  		return true
  4944  	}
  4945  	// match: (MOVWUreg (SLLVconst [lc] x))
  4946  	// cond: lc >= 32
  4947  	// result: (MOVVconst [0])
  4948  	for {
  4949  		if v_0.Op != OpLOONG64SLLVconst {
  4950  			break
  4951  		}
  4952  		lc := auxIntToInt64(v_0.AuxInt)
  4953  		if !(lc >= 32) {
  4954  			break
  4955  		}
  4956  		v.reset(OpLOONG64MOVVconst)
  4957  		v.AuxInt = int64ToAuxInt(0)
  4958  		return true
  4959  	}
  4960  	// match: (MOVWUreg (MOVVconst [c]))
  4961  	// result: (MOVVconst [int64(uint32(c))])
  4962  	for {
  4963  		if v_0.Op != OpLOONG64MOVVconst {
  4964  			break
  4965  		}
  4966  		c := auxIntToInt64(v_0.AuxInt)
  4967  		v.reset(OpLOONG64MOVVconst)
  4968  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  4969  		return true
  4970  	}
  4971  	// match: (MOVWUreg x:(ANDconst [c] y))
  4972  	// cond: c >= 0 && int64(uint32(c)) == c
  4973  	// result: x
  4974  	for {
  4975  		x := v_0
  4976  		if x.Op != OpLOONG64ANDconst {
  4977  			break
  4978  		}
  4979  		c := auxIntToInt64(x.AuxInt)
  4980  		if !(c >= 0 && int64(uint32(c)) == c) {
  4981  			break
  4982  		}
  4983  		v.copyOf(x)
  4984  		return true
  4985  	}
  4986  	return false
  4987  }
  4988  func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
  4989  	v_1 := v.Args[1]
  4990  	v_0 := v.Args[0]
  4991  	b := v.Block
  4992  	config := b.Func.Config
  4993  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4994  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4995  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  4996  	for {
  4997  		off1 := auxIntToInt32(v.AuxInt)
  4998  		sym := auxToSym(v.Aux)
  4999  		if v_0.Op != OpLOONG64ADDVconst {
  5000  			break
  5001  		}
  5002  		off2 := auxIntToInt64(v_0.AuxInt)
  5003  		ptr := v_0.Args[0]
  5004  		mem := v_1
  5005  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5006  			break
  5007  		}
  5008  		v.reset(OpLOONG64MOVWload)
  5009  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5010  		v.Aux = symToAux(sym)
  5011  		v.AddArg2(ptr, mem)
  5012  		return true
  5013  	}
  5014  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5015  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5016  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5017  	for {
  5018  		off1 := auxIntToInt32(v.AuxInt)
  5019  		sym1 := auxToSym(v.Aux)
  5020  		if v_0.Op != OpLOONG64MOVVaddr {
  5021  			break
  5022  		}
  5023  		off2 := auxIntToInt32(v_0.AuxInt)
  5024  		sym2 := auxToSym(v_0.Aux)
  5025  		ptr := v_0.Args[0]
  5026  		mem := v_1
  5027  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5028  			break
  5029  		}
  5030  		v.reset(OpLOONG64MOVWload)
  5031  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5032  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5033  		v.AddArg2(ptr, mem)
  5034  		return true
  5035  	}
  5036  	// match: (MOVWload [off] {sym} (ADDV ptr idx) mem)
  5037  	// cond: off == 0 && sym == nil
  5038  	// result: (MOVWloadidx ptr idx mem)
  5039  	for {
  5040  		off := auxIntToInt32(v.AuxInt)
  5041  		sym := auxToSym(v.Aux)
  5042  		if v_0.Op != OpLOONG64ADDV {
  5043  			break
  5044  		}
  5045  		idx := v_0.Args[1]
  5046  		ptr := v_0.Args[0]
  5047  		mem := v_1
  5048  		if !(off == 0 && sym == nil) {
  5049  			break
  5050  		}
  5051  		v.reset(OpLOONG64MOVWloadidx)
  5052  		v.AddArg3(ptr, idx, mem)
  5053  		return true
  5054  	}
  5055  	return false
  5056  }
  5057  func rewriteValueLOONG64_OpLOONG64MOVWloadidx(v *Value) bool {
  5058  	v_2 := v.Args[2]
  5059  	v_1 := v.Args[1]
  5060  	v_0 := v.Args[0]
  5061  	// match: (MOVWloadidx ptr (MOVVconst [c]) mem)
  5062  	// cond: is32Bit(c)
  5063  	// result: (MOVWload [int32(c)] ptr mem)
  5064  	for {
  5065  		ptr := v_0
  5066  		if v_1.Op != OpLOONG64MOVVconst {
  5067  			break
  5068  		}
  5069  		c := auxIntToInt64(v_1.AuxInt)
  5070  		mem := v_2
  5071  		if !(is32Bit(c)) {
  5072  			break
  5073  		}
  5074  		v.reset(OpLOONG64MOVWload)
  5075  		v.AuxInt = int32ToAuxInt(int32(c))
  5076  		v.AddArg2(ptr, mem)
  5077  		return true
  5078  	}
  5079  	// match: (MOVWloadidx (MOVVconst [c]) ptr mem)
  5080  	// cond: is32Bit(c)
  5081  	// result: (MOVWload [int32(c)] ptr mem)
  5082  	for {
  5083  		if v_0.Op != OpLOONG64MOVVconst {
  5084  			break
  5085  		}
  5086  		c := auxIntToInt64(v_0.AuxInt)
  5087  		ptr := v_1
  5088  		mem := v_2
  5089  		if !(is32Bit(c)) {
  5090  			break
  5091  		}
  5092  		v.reset(OpLOONG64MOVWload)
  5093  		v.AuxInt = int32ToAuxInt(int32(c))
  5094  		v.AddArg2(ptr, mem)
  5095  		return true
  5096  	}
  5097  	return false
  5098  }
  5099  func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool {
  5100  	v_0 := v.Args[0]
  5101  	// match: (MOVWreg x:(MOVBload _ _))
  5102  	// result: (MOVVreg x)
  5103  	for {
  5104  		x := v_0
  5105  		if x.Op != OpLOONG64MOVBload {
  5106  			break
  5107  		}
  5108  		v.reset(OpLOONG64MOVVreg)
  5109  		v.AddArg(x)
  5110  		return true
  5111  	}
  5112  	// match: (MOVWreg x:(MOVBUload _ _))
  5113  	// result: (MOVVreg x)
  5114  	for {
  5115  		x := v_0
  5116  		if x.Op != OpLOONG64MOVBUload {
  5117  			break
  5118  		}
  5119  		v.reset(OpLOONG64MOVVreg)
  5120  		v.AddArg(x)
  5121  		return true
  5122  	}
  5123  	// match: (MOVWreg x:(MOVHload _ _))
  5124  	// result: (MOVVreg x)
  5125  	for {
  5126  		x := v_0
  5127  		if x.Op != OpLOONG64MOVHload {
  5128  			break
  5129  		}
  5130  		v.reset(OpLOONG64MOVVreg)
  5131  		v.AddArg(x)
  5132  		return true
  5133  	}
  5134  	// match: (MOVWreg x:(MOVHUload _ _))
  5135  	// result: (MOVVreg x)
  5136  	for {
  5137  		x := v_0
  5138  		if x.Op != OpLOONG64MOVHUload {
  5139  			break
  5140  		}
  5141  		v.reset(OpLOONG64MOVVreg)
  5142  		v.AddArg(x)
  5143  		return true
  5144  	}
  5145  	// match: (MOVWreg x:(MOVWload _ _))
  5146  	// result: (MOVVreg x)
  5147  	for {
  5148  		x := v_0
  5149  		if x.Op != OpLOONG64MOVWload {
  5150  			break
  5151  		}
  5152  		v.reset(OpLOONG64MOVVreg)
  5153  		v.AddArg(x)
  5154  		return true
  5155  	}
  5156  	// match: (MOVWreg x:(MOVBreg _))
  5157  	// result: (MOVVreg x)
  5158  	for {
  5159  		x := v_0
  5160  		if x.Op != OpLOONG64MOVBreg {
  5161  			break
  5162  		}
  5163  		v.reset(OpLOONG64MOVVreg)
  5164  		v.AddArg(x)
  5165  		return true
  5166  	}
  5167  	// match: (MOVWreg x:(MOVBUreg _))
  5168  	// result: (MOVVreg x)
  5169  	for {
  5170  		x := v_0
  5171  		if x.Op != OpLOONG64MOVBUreg {
  5172  			break
  5173  		}
  5174  		v.reset(OpLOONG64MOVVreg)
  5175  		v.AddArg(x)
  5176  		return true
  5177  	}
  5178  	// match: (MOVWreg x:(MOVHreg _))
  5179  	// result: (MOVVreg x)
  5180  	for {
  5181  		x := v_0
  5182  		if x.Op != OpLOONG64MOVHreg {
  5183  			break
  5184  		}
  5185  		v.reset(OpLOONG64MOVVreg)
  5186  		v.AddArg(x)
  5187  		return true
  5188  	}
  5189  	// match: (MOVWreg x:(MOVWreg _))
  5190  	// result: (MOVVreg x)
  5191  	for {
  5192  		x := v_0
  5193  		if x.Op != OpLOONG64MOVWreg {
  5194  			break
  5195  		}
  5196  		v.reset(OpLOONG64MOVVreg)
  5197  		v.AddArg(x)
  5198  		return true
  5199  	}
  5200  	// match: (MOVWreg (MOVVconst [c]))
  5201  	// result: (MOVVconst [int64(int32(c))])
  5202  	for {
  5203  		if v_0.Op != OpLOONG64MOVVconst {
  5204  			break
  5205  		}
  5206  		c := auxIntToInt64(v_0.AuxInt)
  5207  		v.reset(OpLOONG64MOVVconst)
  5208  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5209  		return true
  5210  	}
  5211  	// match: (MOVWreg x:(ANDconst [c] y))
  5212  	// cond: c >= 0 && int64(int32(c)) == c
  5213  	// result: x
  5214  	for {
  5215  		x := v_0
  5216  		if x.Op != OpLOONG64ANDconst {
  5217  			break
  5218  		}
  5219  		c := auxIntToInt64(x.AuxInt)
  5220  		if !(c >= 0 && int64(int32(c)) == c) {
  5221  			break
  5222  		}
  5223  		v.copyOf(x)
  5224  		return true
  5225  	}
  5226  	return false
  5227  }
  5228  func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
  5229  	v_2 := v.Args[2]
  5230  	v_1 := v.Args[1]
  5231  	v_0 := v.Args[0]
  5232  	b := v.Block
  5233  	config := b.Func.Config
  5234  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  5235  	// result: (MOVFstore [off] {sym} ptr val mem)
  5236  	for {
  5237  		off := auxIntToInt32(v.AuxInt)
  5238  		sym := auxToSym(v.Aux)
  5239  		ptr := v_0
  5240  		if v_1.Op != OpLOONG64MOVWfpgp {
  5241  			break
  5242  		}
  5243  		val := v_1.Args[0]
  5244  		mem := v_2
  5245  		v.reset(OpLOONG64MOVFstore)
  5246  		v.AuxInt = int32ToAuxInt(off)
  5247  		v.Aux = symToAux(sym)
  5248  		v.AddArg3(ptr, val, mem)
  5249  		return true
  5250  	}
  5251  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5252  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5253  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  5254  	for {
  5255  		off1 := auxIntToInt32(v.AuxInt)
  5256  		sym := auxToSym(v.Aux)
  5257  		if v_0.Op != OpLOONG64ADDVconst {
  5258  			break
  5259  		}
  5260  		off2 := auxIntToInt64(v_0.AuxInt)
  5261  		ptr := v_0.Args[0]
  5262  		val := v_1
  5263  		mem := v_2
  5264  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5265  			break
  5266  		}
  5267  		v.reset(OpLOONG64MOVWstore)
  5268  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5269  		v.Aux = symToAux(sym)
  5270  		v.AddArg3(ptr, val, mem)
  5271  		return true
  5272  	}
  5273  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5274  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5275  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  5276  	for {
  5277  		off1 := auxIntToInt32(v.AuxInt)
  5278  		sym1 := auxToSym(v.Aux)
  5279  		if v_0.Op != OpLOONG64MOVVaddr {
  5280  			break
  5281  		}
  5282  		off2 := auxIntToInt32(v_0.AuxInt)
  5283  		sym2 := auxToSym(v_0.Aux)
  5284  		ptr := v_0.Args[0]
  5285  		val := v_1
  5286  		mem := v_2
  5287  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5288  			break
  5289  		}
  5290  		v.reset(OpLOONG64MOVWstore)
  5291  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5292  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5293  		v.AddArg3(ptr, val, mem)
  5294  		return true
  5295  	}
  5296  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5297  	// result: (MOVWstore [off] {sym} ptr x mem)
  5298  	for {
  5299  		off := auxIntToInt32(v.AuxInt)
  5300  		sym := auxToSym(v.Aux)
  5301  		ptr := v_0
  5302  		if v_1.Op != OpLOONG64MOVWreg {
  5303  			break
  5304  		}
  5305  		x := v_1.Args[0]
  5306  		mem := v_2
  5307  		v.reset(OpLOONG64MOVWstore)
  5308  		v.AuxInt = int32ToAuxInt(off)
  5309  		v.Aux = symToAux(sym)
  5310  		v.AddArg3(ptr, x, mem)
  5311  		return true
  5312  	}
  5313  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5314  	// result: (MOVWstore [off] {sym} ptr x mem)
  5315  	for {
  5316  		off := auxIntToInt32(v.AuxInt)
  5317  		sym := auxToSym(v.Aux)
  5318  		ptr := v_0
  5319  		if v_1.Op != OpLOONG64MOVWUreg {
  5320  			break
  5321  		}
  5322  		x := v_1.Args[0]
  5323  		mem := v_2
  5324  		v.reset(OpLOONG64MOVWstore)
  5325  		v.AuxInt = int32ToAuxInt(off)
  5326  		v.Aux = symToAux(sym)
  5327  		v.AddArg3(ptr, x, mem)
  5328  		return true
  5329  	}
  5330  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5331  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5332  	for {
  5333  		off := auxIntToInt32(v.AuxInt)
  5334  		sym := auxToSym(v.Aux)
  5335  		ptr := v_0
  5336  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5337  			break
  5338  		}
  5339  		mem := v_2
  5340  		v.reset(OpLOONG64MOVWstorezero)
  5341  		v.AuxInt = int32ToAuxInt(off)
  5342  		v.Aux = symToAux(sym)
  5343  		v.AddArg2(ptr, mem)
  5344  		return true
  5345  	}
  5346  	// match: (MOVWstore [off] {sym} (ADDV ptr idx) val mem)
  5347  	// cond: off == 0 && sym == nil
  5348  	// result: (MOVWstoreidx ptr idx val mem)
  5349  	for {
  5350  		off := auxIntToInt32(v.AuxInt)
  5351  		sym := auxToSym(v.Aux)
  5352  		if v_0.Op != OpLOONG64ADDV {
  5353  			break
  5354  		}
  5355  		idx := v_0.Args[1]
  5356  		ptr := v_0.Args[0]
  5357  		val := v_1
  5358  		mem := v_2
  5359  		if !(off == 0 && sym == nil) {
  5360  			break
  5361  		}
  5362  		v.reset(OpLOONG64MOVWstoreidx)
  5363  		v.AddArg4(ptr, idx, val, mem)
  5364  		return true
  5365  	}
  5366  	return false
  5367  }
  5368  func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool {
  5369  	v_3 := v.Args[3]
  5370  	v_2 := v.Args[2]
  5371  	v_1 := v.Args[1]
  5372  	v_0 := v.Args[0]
  5373  	// match: (MOVWstoreidx ptr (MOVVconst [c]) val mem)
  5374  	// cond: is32Bit(c)
  5375  	// result: (MOVWstore [int32(c)] ptr val mem)
  5376  	for {
  5377  		ptr := v_0
  5378  		if v_1.Op != OpLOONG64MOVVconst {
  5379  			break
  5380  		}
  5381  		c := auxIntToInt64(v_1.AuxInt)
  5382  		val := v_2
  5383  		mem := v_3
  5384  		if !(is32Bit(c)) {
  5385  			break
  5386  		}
  5387  		v.reset(OpLOONG64MOVWstore)
  5388  		v.AuxInt = int32ToAuxInt(int32(c))
  5389  		v.AddArg3(ptr, val, mem)
  5390  		return true
  5391  	}
  5392  	// match: (MOVWstoreidx (MOVVconst [c]) idx val mem)
  5393  	// cond: is32Bit(c)
  5394  	// result: (MOVWstore [int32(c)] idx val mem)
  5395  	for {
  5396  		if v_0.Op != OpLOONG64MOVVconst {
  5397  			break
  5398  		}
  5399  		c := auxIntToInt64(v_0.AuxInt)
  5400  		idx := v_1
  5401  		val := v_2
  5402  		mem := v_3
  5403  		if !(is32Bit(c)) {
  5404  			break
  5405  		}
  5406  		v.reset(OpLOONG64MOVWstore)
  5407  		v.AuxInt = int32ToAuxInt(int32(c))
  5408  		v.AddArg3(idx, val, mem)
  5409  		return true
  5410  	}
  5411  	// match: (MOVWstoreidx ptr idx (MOVVconst [0]) mem)
  5412  	// result: (MOVWstorezeroidx ptr idx mem)
  5413  	for {
  5414  		ptr := v_0
  5415  		idx := v_1
  5416  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  5417  			break
  5418  		}
  5419  		mem := v_3
  5420  		v.reset(OpLOONG64MOVWstorezeroidx)
  5421  		v.AddArg3(ptr, idx, mem)
  5422  		return true
  5423  	}
  5424  	return false
  5425  }
  5426  func rewriteValueLOONG64_OpLOONG64MOVWstorezero(v *Value) bool {
  5427  	v_1 := v.Args[1]
  5428  	v_0 := v.Args[0]
  5429  	b := v.Block
  5430  	config := b.Func.Config
  5431  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5432  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5433  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  5434  	for {
  5435  		off1 := auxIntToInt32(v.AuxInt)
  5436  		sym := auxToSym(v.Aux)
  5437  		if v_0.Op != OpLOONG64ADDVconst {
  5438  			break
  5439  		}
  5440  		off2 := auxIntToInt64(v_0.AuxInt)
  5441  		ptr := v_0.Args[0]
  5442  		mem := v_1
  5443  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5444  			break
  5445  		}
  5446  		v.reset(OpLOONG64MOVWstorezero)
  5447  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5448  		v.Aux = symToAux(sym)
  5449  		v.AddArg2(ptr, mem)
  5450  		return true
  5451  	}
  5452  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5453  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5454  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5455  	for {
  5456  		off1 := auxIntToInt32(v.AuxInt)
  5457  		sym1 := auxToSym(v.Aux)
  5458  		if v_0.Op != OpLOONG64MOVVaddr {
  5459  			break
  5460  		}
  5461  		off2 := auxIntToInt32(v_0.AuxInt)
  5462  		sym2 := auxToSym(v_0.Aux)
  5463  		ptr := v_0.Args[0]
  5464  		mem := v_1
  5465  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5466  			break
  5467  		}
  5468  		v.reset(OpLOONG64MOVWstorezero)
  5469  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5470  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5471  		v.AddArg2(ptr, mem)
  5472  		return true
  5473  	}
  5474  	// match: (MOVWstorezero [off] {sym} (ADDV ptr idx) mem)
  5475  	// cond: off == 0 && sym == nil
  5476  	// result: (MOVWstorezeroidx ptr idx mem)
  5477  	for {
  5478  		off := auxIntToInt32(v.AuxInt)
  5479  		sym := auxToSym(v.Aux)
  5480  		if v_0.Op != OpLOONG64ADDV {
  5481  			break
  5482  		}
  5483  		idx := v_0.Args[1]
  5484  		ptr := v_0.Args[0]
  5485  		mem := v_1
  5486  		if !(off == 0 && sym == nil) {
  5487  			break
  5488  		}
  5489  		v.reset(OpLOONG64MOVWstorezeroidx)
  5490  		v.AddArg3(ptr, idx, mem)
  5491  		return true
  5492  	}
  5493  	return false
  5494  }
  5495  func rewriteValueLOONG64_OpLOONG64MOVWstorezeroidx(v *Value) bool {
  5496  	v_2 := v.Args[2]
  5497  	v_1 := v.Args[1]
  5498  	v_0 := v.Args[0]
  5499  	// match: (MOVWstorezeroidx ptr (MOVVconst [c]) mem)
  5500  	// cond: is32Bit(c)
  5501  	// result: (MOVWstorezero [int32(c)] ptr mem)
  5502  	for {
  5503  		ptr := v_0
  5504  		if v_1.Op != OpLOONG64MOVVconst {
  5505  			break
  5506  		}
  5507  		c := auxIntToInt64(v_1.AuxInt)
  5508  		mem := v_2
  5509  		if !(is32Bit(c)) {
  5510  			break
  5511  		}
  5512  		v.reset(OpLOONG64MOVWstorezero)
  5513  		v.AuxInt = int32ToAuxInt(int32(c))
  5514  		v.AddArg2(ptr, mem)
  5515  		return true
  5516  	}
  5517  	// match: (MOVWstorezeroidx (MOVVconst [c]) idx mem)
  5518  	// cond: is32Bit(c)
  5519  	// result: (MOVWstorezero [int32(c)] idx mem)
  5520  	for {
  5521  		if v_0.Op != OpLOONG64MOVVconst {
  5522  			break
  5523  		}
  5524  		c := auxIntToInt64(v_0.AuxInt)
  5525  		idx := v_1
  5526  		mem := v_2
  5527  		if !(is32Bit(c)) {
  5528  			break
  5529  		}
  5530  		v.reset(OpLOONG64MOVWstorezero)
  5531  		v.AuxInt = int32ToAuxInt(int32(c))
  5532  		v.AddArg2(idx, mem)
  5533  		return true
  5534  	}
  5535  	return false
  5536  }
  5537  func rewriteValueLOONG64_OpLOONG64MULV(v *Value) bool {
  5538  	v_1 := v.Args[1]
  5539  	v_0 := v.Args[0]
  5540  	b := v.Block
  5541  	config := b.Func.Config
  5542  	typ := &b.Func.Config.Types
  5543  	// match: (MULV _ (MOVVconst [0]))
  5544  	// result: (MOVVconst [0])
  5545  	for {
  5546  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5547  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5548  				continue
  5549  			}
  5550  			v.reset(OpLOONG64MOVVconst)
  5551  			v.AuxInt = int64ToAuxInt(0)
  5552  			return true
  5553  		}
  5554  		break
  5555  	}
  5556  	// match: (MULV x (MOVVconst [1]))
  5557  	// result: x
  5558  	for {
  5559  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5560  			x := v_0
  5561  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  5562  				continue
  5563  			}
  5564  			v.copyOf(x)
  5565  			return true
  5566  		}
  5567  		break
  5568  	}
  5569  	// match: (MULV x (MOVVconst [c]))
  5570  	// cond: canMulStrengthReduce(config, c)
  5571  	// result: {mulStrengthReduce(v, x, c)}
  5572  	for {
  5573  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5574  			x := v_0
  5575  			if v_1.Op != OpLOONG64MOVVconst {
  5576  				continue
  5577  			}
  5578  			c := auxIntToInt64(v_1.AuxInt)
  5579  			if !(canMulStrengthReduce(config, c)) {
  5580  				continue
  5581  			}
  5582  			v.copyOf(mulStrengthReduce(v, x, c))
  5583  			return true
  5584  		}
  5585  		break
  5586  	}
  5587  	// match: (MULV (NEGV x) (MOVVconst [c]))
  5588  	// result: (MULV x (MOVVconst [-c]))
  5589  	for {
  5590  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5591  			if v_0.Op != OpLOONG64NEGV {
  5592  				continue
  5593  			}
  5594  			x := v_0.Args[0]
  5595  			if v_1.Op != OpLOONG64MOVVconst {
  5596  				continue
  5597  			}
  5598  			c := auxIntToInt64(v_1.AuxInt)
  5599  			v.reset(OpLOONG64MULV)
  5600  			v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5601  			v0.AuxInt = int64ToAuxInt(-c)
  5602  			v.AddArg2(x, v0)
  5603  			return true
  5604  		}
  5605  		break
  5606  	}
  5607  	// match: (MULV (NEGV x) (NEGV y))
  5608  	// result: (MULV x y)
  5609  	for {
  5610  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5611  			if v_0.Op != OpLOONG64NEGV {
  5612  				continue
  5613  			}
  5614  			x := v_0.Args[0]
  5615  			if v_1.Op != OpLOONG64NEGV {
  5616  				continue
  5617  			}
  5618  			y := v_1.Args[0]
  5619  			v.reset(OpLOONG64MULV)
  5620  			v.AddArg2(x, y)
  5621  			return true
  5622  		}
  5623  		break
  5624  	}
  5625  	// match: (MULV (MOVVconst [c]) (MOVVconst [d]))
  5626  	// result: (MOVVconst [c*d])
  5627  	for {
  5628  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5629  			if v_0.Op != OpLOONG64MOVVconst {
  5630  				continue
  5631  			}
  5632  			c := auxIntToInt64(v_0.AuxInt)
  5633  			if v_1.Op != OpLOONG64MOVVconst {
  5634  				continue
  5635  			}
  5636  			d := auxIntToInt64(v_1.AuxInt)
  5637  			v.reset(OpLOONG64MOVVconst)
  5638  			v.AuxInt = int64ToAuxInt(c * d)
  5639  			return true
  5640  		}
  5641  		break
  5642  	}
  5643  	return false
  5644  }
  5645  func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool {
  5646  	v_0 := v.Args[0]
  5647  	b := v.Block
  5648  	// match: (NEGV (SUBV x y))
  5649  	// result: (SUBV y x)
  5650  	for {
  5651  		if v_0.Op != OpLOONG64SUBV {
  5652  			break
  5653  		}
  5654  		y := v_0.Args[1]
  5655  		x := v_0.Args[0]
  5656  		v.reset(OpLOONG64SUBV)
  5657  		v.AddArg2(y, x)
  5658  		return true
  5659  	}
  5660  	// match: (NEGV <t> s:(ADDVconst [c] (SUBV x y)))
  5661  	// cond: s.Uses == 1 && is12Bit(-c)
  5662  	// result: (ADDVconst [-c] (SUBV <t> y x))
  5663  	for {
  5664  		t := v.Type
  5665  		s := v_0
  5666  		if s.Op != OpLOONG64ADDVconst {
  5667  			break
  5668  		}
  5669  		c := auxIntToInt64(s.AuxInt)
  5670  		s_0 := s.Args[0]
  5671  		if s_0.Op != OpLOONG64SUBV {
  5672  			break
  5673  		}
  5674  		y := s_0.Args[1]
  5675  		x := s_0.Args[0]
  5676  		if !(s.Uses == 1 && is12Bit(-c)) {
  5677  			break
  5678  		}
  5679  		v.reset(OpLOONG64ADDVconst)
  5680  		v.AuxInt = int64ToAuxInt(-c)
  5681  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  5682  		v0.AddArg2(y, x)
  5683  		v.AddArg(v0)
  5684  		return true
  5685  	}
  5686  	// match: (NEGV (NEGV x))
  5687  	// result: x
  5688  	for {
  5689  		if v_0.Op != OpLOONG64NEGV {
  5690  			break
  5691  		}
  5692  		x := v_0.Args[0]
  5693  		v.copyOf(x)
  5694  		return true
  5695  	}
  5696  	// match: (NEGV <t> s:(ADDVconst [c] (NEGV x)))
  5697  	// cond: s.Uses == 1 && is12Bit(-c)
  5698  	// result: (ADDVconst [-c] x)
  5699  	for {
  5700  		s := v_0
  5701  		if s.Op != OpLOONG64ADDVconst {
  5702  			break
  5703  		}
  5704  		c := auxIntToInt64(s.AuxInt)
  5705  		s_0 := s.Args[0]
  5706  		if s_0.Op != OpLOONG64NEGV {
  5707  			break
  5708  		}
  5709  		x := s_0.Args[0]
  5710  		if !(s.Uses == 1 && is12Bit(-c)) {
  5711  			break
  5712  		}
  5713  		v.reset(OpLOONG64ADDVconst)
  5714  		v.AuxInt = int64ToAuxInt(-c)
  5715  		v.AddArg(x)
  5716  		return true
  5717  	}
  5718  	// match: (NEGV (MOVVconst [c]))
  5719  	// result: (MOVVconst [-c])
  5720  	for {
  5721  		if v_0.Op != OpLOONG64MOVVconst {
  5722  			break
  5723  		}
  5724  		c := auxIntToInt64(v_0.AuxInt)
  5725  		v.reset(OpLOONG64MOVVconst)
  5726  		v.AuxInt = int64ToAuxInt(-c)
  5727  		return true
  5728  	}
  5729  	return false
  5730  }
  5731  func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool {
  5732  	v_1 := v.Args[1]
  5733  	v_0 := v.Args[0]
  5734  	// match: (NOR x (MOVVconst [c]))
  5735  	// cond: is32Bit(c)
  5736  	// result: (NORconst [c] x)
  5737  	for {
  5738  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5739  			x := v_0
  5740  			if v_1.Op != OpLOONG64MOVVconst {
  5741  				continue
  5742  			}
  5743  			c := auxIntToInt64(v_1.AuxInt)
  5744  			if !(is32Bit(c)) {
  5745  				continue
  5746  			}
  5747  			v.reset(OpLOONG64NORconst)
  5748  			v.AuxInt = int64ToAuxInt(c)
  5749  			v.AddArg(x)
  5750  			return true
  5751  		}
  5752  		break
  5753  	}
  5754  	return false
  5755  }
  5756  func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool {
  5757  	v_0 := v.Args[0]
  5758  	// match: (NORconst [c] (MOVVconst [d]))
  5759  	// result: (MOVVconst [^(c|d)])
  5760  	for {
  5761  		c := auxIntToInt64(v.AuxInt)
  5762  		if v_0.Op != OpLOONG64MOVVconst {
  5763  			break
  5764  		}
  5765  		d := auxIntToInt64(v_0.AuxInt)
  5766  		v.reset(OpLOONG64MOVVconst)
  5767  		v.AuxInt = int64ToAuxInt(^(c | d))
  5768  		return true
  5769  	}
  5770  	return false
  5771  }
  5772  func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool {
  5773  	v_1 := v.Args[1]
  5774  	v_0 := v.Args[0]
  5775  	// match: (OR x (MOVVconst [c]))
  5776  	// cond: is32Bit(c)
  5777  	// result: (ORconst [c] x)
  5778  	for {
  5779  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5780  			x := v_0
  5781  			if v_1.Op != OpLOONG64MOVVconst {
  5782  				continue
  5783  			}
  5784  			c := auxIntToInt64(v_1.AuxInt)
  5785  			if !(is32Bit(c)) {
  5786  				continue
  5787  			}
  5788  			v.reset(OpLOONG64ORconst)
  5789  			v.AuxInt = int64ToAuxInt(c)
  5790  			v.AddArg(x)
  5791  			return true
  5792  		}
  5793  		break
  5794  	}
  5795  	// match: (OR x x)
  5796  	// result: x
  5797  	for {
  5798  		x := v_0
  5799  		if x != v_1 {
  5800  			break
  5801  		}
  5802  		v.copyOf(x)
  5803  		return true
  5804  	}
  5805  	// match: (OR x (NORconst [0] y))
  5806  	// result: (ORN x y)
  5807  	for {
  5808  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5809  			x := v_0
  5810  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5811  				continue
  5812  			}
  5813  			y := v_1.Args[0]
  5814  			v.reset(OpLOONG64ORN)
  5815  			v.AddArg2(x, y)
  5816  			return true
  5817  		}
  5818  		break
  5819  	}
  5820  	return false
  5821  }
  5822  func rewriteValueLOONG64_OpLOONG64ORN(v *Value) bool {
  5823  	v_1 := v.Args[1]
  5824  	v_0 := v.Args[0]
  5825  	// match: (ORN x (MOVVconst [-1]))
  5826  	// result: x
  5827  	for {
  5828  		x := v_0
  5829  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  5830  			break
  5831  		}
  5832  		v.copyOf(x)
  5833  		return true
  5834  	}
  5835  	return false
  5836  }
  5837  func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool {
  5838  	v_0 := v.Args[0]
  5839  	// match: (ORconst [0] x)
  5840  	// result: x
  5841  	for {
  5842  		if auxIntToInt64(v.AuxInt) != 0 {
  5843  			break
  5844  		}
  5845  		x := v_0
  5846  		v.copyOf(x)
  5847  		return true
  5848  	}
  5849  	// match: (ORconst [-1] _)
  5850  	// result: (MOVVconst [-1])
  5851  	for {
  5852  		if auxIntToInt64(v.AuxInt) != -1 {
  5853  			break
  5854  		}
  5855  		v.reset(OpLOONG64MOVVconst)
  5856  		v.AuxInt = int64ToAuxInt(-1)
  5857  		return true
  5858  	}
  5859  	// match: (ORconst [c] (MOVVconst [d]))
  5860  	// result: (MOVVconst [c|d])
  5861  	for {
  5862  		c := auxIntToInt64(v.AuxInt)
  5863  		if v_0.Op != OpLOONG64MOVVconst {
  5864  			break
  5865  		}
  5866  		d := auxIntToInt64(v_0.AuxInt)
  5867  		v.reset(OpLOONG64MOVVconst)
  5868  		v.AuxInt = int64ToAuxInt(c | d)
  5869  		return true
  5870  	}
  5871  	// match: (ORconst [c] (ORconst [d] x))
  5872  	// cond: is32Bit(c|d)
  5873  	// result: (ORconst [c|d] x)
  5874  	for {
  5875  		c := auxIntToInt64(v.AuxInt)
  5876  		if v_0.Op != OpLOONG64ORconst {
  5877  			break
  5878  		}
  5879  		d := auxIntToInt64(v_0.AuxInt)
  5880  		x := v_0.Args[0]
  5881  		if !(is32Bit(c | d)) {
  5882  			break
  5883  		}
  5884  		v.reset(OpLOONG64ORconst)
  5885  		v.AuxInt = int64ToAuxInt(c | d)
  5886  		v.AddArg(x)
  5887  		return true
  5888  	}
  5889  	return false
  5890  }
  5891  func rewriteValueLOONG64_OpLOONG64REMV(v *Value) bool {
  5892  	v_1 := v.Args[1]
  5893  	v_0 := v.Args[0]
  5894  	// match: (REMV (MOVVconst [c]) (MOVVconst [d]))
  5895  	// cond: d != 0
  5896  	// result: (MOVVconst [c%d])
  5897  	for {
  5898  		if v_0.Op != OpLOONG64MOVVconst {
  5899  			break
  5900  		}
  5901  		c := auxIntToInt64(v_0.AuxInt)
  5902  		if v_1.Op != OpLOONG64MOVVconst {
  5903  			break
  5904  		}
  5905  		d := auxIntToInt64(v_1.AuxInt)
  5906  		if !(d != 0) {
  5907  			break
  5908  		}
  5909  		v.reset(OpLOONG64MOVVconst)
  5910  		v.AuxInt = int64ToAuxInt(c % d)
  5911  		return true
  5912  	}
  5913  	return false
  5914  }
  5915  func rewriteValueLOONG64_OpLOONG64REMVU(v *Value) bool {
  5916  	v_1 := v.Args[1]
  5917  	v_0 := v.Args[0]
  5918  	// match: (REMVU _ (MOVVconst [1]))
  5919  	// result: (MOVVconst [0])
  5920  	for {
  5921  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  5922  			break
  5923  		}
  5924  		v.reset(OpLOONG64MOVVconst)
  5925  		v.AuxInt = int64ToAuxInt(0)
  5926  		return true
  5927  	}
  5928  	// match: (REMVU x (MOVVconst [c]))
  5929  	// cond: isPowerOfTwo(c)
  5930  	// result: (ANDconst [c-1] x)
  5931  	for {
  5932  		x := v_0
  5933  		if v_1.Op != OpLOONG64MOVVconst {
  5934  			break
  5935  		}
  5936  		c := auxIntToInt64(v_1.AuxInt)
  5937  		if !(isPowerOfTwo(c)) {
  5938  			break
  5939  		}
  5940  		v.reset(OpLOONG64ANDconst)
  5941  		v.AuxInt = int64ToAuxInt(c - 1)
  5942  		v.AddArg(x)
  5943  		return true
  5944  	}
  5945  	// match: (REMVU (MOVVconst [c]) (MOVVconst [d]))
  5946  	// cond: d != 0
  5947  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  5948  	for {
  5949  		if v_0.Op != OpLOONG64MOVVconst {
  5950  			break
  5951  		}
  5952  		c := auxIntToInt64(v_0.AuxInt)
  5953  		if v_1.Op != OpLOONG64MOVVconst {
  5954  			break
  5955  		}
  5956  		d := auxIntToInt64(v_1.AuxInt)
  5957  		if !(d != 0) {
  5958  			break
  5959  		}
  5960  		v.reset(OpLOONG64MOVVconst)
  5961  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  5962  		return true
  5963  	}
  5964  	return false
  5965  }
  5966  func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool {
  5967  	v_1 := v.Args[1]
  5968  	v_0 := v.Args[0]
  5969  	// match: (ROTR x (MOVVconst [c]))
  5970  	// result: (ROTRconst x [c&31])
  5971  	for {
  5972  		x := v_0
  5973  		if v_1.Op != OpLOONG64MOVVconst {
  5974  			break
  5975  		}
  5976  		c := auxIntToInt64(v_1.AuxInt)
  5977  		v.reset(OpLOONG64ROTRconst)
  5978  		v.AuxInt = int64ToAuxInt(c & 31)
  5979  		v.AddArg(x)
  5980  		return true
  5981  	}
  5982  	return false
  5983  }
  5984  func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool {
  5985  	v_1 := v.Args[1]
  5986  	v_0 := v.Args[0]
  5987  	// match: (ROTRV x (MOVVconst [c]))
  5988  	// result: (ROTRVconst x [c&63])
  5989  	for {
  5990  		x := v_0
  5991  		if v_1.Op != OpLOONG64MOVVconst {
  5992  			break
  5993  		}
  5994  		c := auxIntToInt64(v_1.AuxInt)
  5995  		v.reset(OpLOONG64ROTRVconst)
  5996  		v.AuxInt = int64ToAuxInt(c & 63)
  5997  		v.AddArg(x)
  5998  		return true
  5999  	}
  6000  	return false
  6001  }
  6002  func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool {
  6003  	v_1 := v.Args[1]
  6004  	v_0 := v.Args[0]
  6005  	b := v.Block
  6006  	typ := &b.Func.Config.Types
  6007  	// match: (SGT (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  6008  	// cond: is32Bit(d-c)
  6009  	// result: (SGT x (MOVVconst [d-c]))
  6010  	for {
  6011  		if v_0.Op != OpLOONG64MOVVconst {
  6012  			break
  6013  		}
  6014  		c := auxIntToInt64(v_0.AuxInt)
  6015  		if v_1.Op != OpLOONG64NEGV {
  6016  			break
  6017  		}
  6018  		v_1_0 := v_1.Args[0]
  6019  		if v_1_0.Op != OpLOONG64SUBVconst {
  6020  			break
  6021  		}
  6022  		d := auxIntToInt64(v_1_0.AuxInt)
  6023  		x := v_1_0.Args[0]
  6024  		if !(is32Bit(d - c)) {
  6025  			break
  6026  		}
  6027  		v.reset(OpLOONG64SGT)
  6028  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6029  		v0.AuxInt = int64ToAuxInt(d - c)
  6030  		v.AddArg2(x, v0)
  6031  		return true
  6032  	}
  6033  	// match: (SGT (MOVVconst [c]) x)
  6034  	// cond: is32Bit(c)
  6035  	// result: (SGTconst [c] x)
  6036  	for {
  6037  		if v_0.Op != OpLOONG64MOVVconst {
  6038  			break
  6039  		}
  6040  		c := auxIntToInt64(v_0.AuxInt)
  6041  		x := v_1
  6042  		if !(is32Bit(c)) {
  6043  			break
  6044  		}
  6045  		v.reset(OpLOONG64SGTconst)
  6046  		v.AuxInt = int64ToAuxInt(c)
  6047  		v.AddArg(x)
  6048  		return true
  6049  	}
  6050  	// match: (SGT x x)
  6051  	// result: (MOVVconst [0])
  6052  	for {
  6053  		x := v_0
  6054  		if x != v_1 {
  6055  			break
  6056  		}
  6057  		v.reset(OpLOONG64MOVVconst)
  6058  		v.AuxInt = int64ToAuxInt(0)
  6059  		return true
  6060  	}
  6061  	return false
  6062  }
  6063  func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool {
  6064  	v_1 := v.Args[1]
  6065  	v_0 := v.Args[0]
  6066  	// match: (SGTU (MOVVconst [c]) x)
  6067  	// cond: is32Bit(c)
  6068  	// result: (SGTUconst [c] x)
  6069  	for {
  6070  		if v_0.Op != OpLOONG64MOVVconst {
  6071  			break
  6072  		}
  6073  		c := auxIntToInt64(v_0.AuxInt)
  6074  		x := v_1
  6075  		if !(is32Bit(c)) {
  6076  			break
  6077  		}
  6078  		v.reset(OpLOONG64SGTUconst)
  6079  		v.AuxInt = int64ToAuxInt(c)
  6080  		v.AddArg(x)
  6081  		return true
  6082  	}
  6083  	// match: (SGTU x x)
  6084  	// result: (MOVVconst [0])
  6085  	for {
  6086  		x := v_0
  6087  		if x != v_1 {
  6088  			break
  6089  		}
  6090  		v.reset(OpLOONG64MOVVconst)
  6091  		v.AuxInt = int64ToAuxInt(0)
  6092  		return true
  6093  	}
  6094  	return false
  6095  }
  6096  func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool {
  6097  	v_0 := v.Args[0]
  6098  	// match: (SGTUconst [c] (MOVVconst [d]))
  6099  	// cond: uint64(c)>uint64(d)
  6100  	// result: (MOVVconst [1])
  6101  	for {
  6102  		c := auxIntToInt64(v.AuxInt)
  6103  		if v_0.Op != OpLOONG64MOVVconst {
  6104  			break
  6105  		}
  6106  		d := auxIntToInt64(v_0.AuxInt)
  6107  		if !(uint64(c) > uint64(d)) {
  6108  			break
  6109  		}
  6110  		v.reset(OpLOONG64MOVVconst)
  6111  		v.AuxInt = int64ToAuxInt(1)
  6112  		return true
  6113  	}
  6114  	// match: (SGTUconst [c] (MOVVconst [d]))
  6115  	// cond: uint64(c)<=uint64(d)
  6116  	// result: (MOVVconst [0])
  6117  	for {
  6118  		c := auxIntToInt64(v.AuxInt)
  6119  		if v_0.Op != OpLOONG64MOVVconst {
  6120  			break
  6121  		}
  6122  		d := auxIntToInt64(v_0.AuxInt)
  6123  		if !(uint64(c) <= uint64(d)) {
  6124  			break
  6125  		}
  6126  		v.reset(OpLOONG64MOVVconst)
  6127  		v.AuxInt = int64ToAuxInt(0)
  6128  		return true
  6129  	}
  6130  	// match: (SGTUconst [c] (MOVBUreg _))
  6131  	// cond: 0xff < uint64(c)
  6132  	// result: (MOVVconst [1])
  6133  	for {
  6134  		c := auxIntToInt64(v.AuxInt)
  6135  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) {
  6136  			break
  6137  		}
  6138  		v.reset(OpLOONG64MOVVconst)
  6139  		v.AuxInt = int64ToAuxInt(1)
  6140  		return true
  6141  	}
  6142  	// match: (SGTUconst [c] (MOVHUreg _))
  6143  	// cond: 0xffff < uint64(c)
  6144  	// result: (MOVVconst [1])
  6145  	for {
  6146  		c := auxIntToInt64(v.AuxInt)
  6147  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) {
  6148  			break
  6149  		}
  6150  		v.reset(OpLOONG64MOVVconst)
  6151  		v.AuxInt = int64ToAuxInt(1)
  6152  		return true
  6153  	}
  6154  	// match: (SGTUconst [c] (ANDconst [m] _))
  6155  	// cond: uint64(m) < uint64(c)
  6156  	// result: (MOVVconst [1])
  6157  	for {
  6158  		c := auxIntToInt64(v.AuxInt)
  6159  		if v_0.Op != OpLOONG64ANDconst {
  6160  			break
  6161  		}
  6162  		m := auxIntToInt64(v_0.AuxInt)
  6163  		if !(uint64(m) < uint64(c)) {
  6164  			break
  6165  		}
  6166  		v.reset(OpLOONG64MOVVconst)
  6167  		v.AuxInt = int64ToAuxInt(1)
  6168  		return true
  6169  	}
  6170  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  6171  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6172  	// result: (MOVVconst [1])
  6173  	for {
  6174  		c := auxIntToInt64(v.AuxInt)
  6175  		if v_0.Op != OpLOONG64SRLVconst {
  6176  			break
  6177  		}
  6178  		d := auxIntToInt64(v_0.AuxInt)
  6179  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6180  			break
  6181  		}
  6182  		v.reset(OpLOONG64MOVVconst)
  6183  		v.AuxInt = int64ToAuxInt(1)
  6184  		return true
  6185  	}
  6186  	return false
  6187  }
  6188  func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool {
  6189  	v_0 := v.Args[0]
  6190  	// match: (SGTconst [c] (MOVVconst [d]))
  6191  	// cond: c>d
  6192  	// result: (MOVVconst [1])
  6193  	for {
  6194  		c := auxIntToInt64(v.AuxInt)
  6195  		if v_0.Op != OpLOONG64MOVVconst {
  6196  			break
  6197  		}
  6198  		d := auxIntToInt64(v_0.AuxInt)
  6199  		if !(c > d) {
  6200  			break
  6201  		}
  6202  		v.reset(OpLOONG64MOVVconst)
  6203  		v.AuxInt = int64ToAuxInt(1)
  6204  		return true
  6205  	}
  6206  	// match: (SGTconst [c] (MOVVconst [d]))
  6207  	// cond: c<=d
  6208  	// result: (MOVVconst [0])
  6209  	for {
  6210  		c := auxIntToInt64(v.AuxInt)
  6211  		if v_0.Op != OpLOONG64MOVVconst {
  6212  			break
  6213  		}
  6214  		d := auxIntToInt64(v_0.AuxInt)
  6215  		if !(c <= d) {
  6216  			break
  6217  		}
  6218  		v.reset(OpLOONG64MOVVconst)
  6219  		v.AuxInt = int64ToAuxInt(0)
  6220  		return true
  6221  	}
  6222  	// match: (SGTconst [c] (MOVBreg _))
  6223  	// cond: 0x7f < c
  6224  	// result: (MOVVconst [1])
  6225  	for {
  6226  		c := auxIntToInt64(v.AuxInt)
  6227  		if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) {
  6228  			break
  6229  		}
  6230  		v.reset(OpLOONG64MOVVconst)
  6231  		v.AuxInt = int64ToAuxInt(1)
  6232  		return true
  6233  	}
  6234  	// match: (SGTconst [c] (MOVBreg _))
  6235  	// cond: c <= -0x80
  6236  	// result: (MOVVconst [0])
  6237  	for {
  6238  		c := auxIntToInt64(v.AuxInt)
  6239  		if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) {
  6240  			break
  6241  		}
  6242  		v.reset(OpLOONG64MOVVconst)
  6243  		v.AuxInt = int64ToAuxInt(0)
  6244  		return true
  6245  	}
  6246  	// match: (SGTconst [c] (MOVBUreg _))
  6247  	// cond: 0xff < c
  6248  	// result: (MOVVconst [1])
  6249  	for {
  6250  		c := auxIntToInt64(v.AuxInt)
  6251  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) {
  6252  			break
  6253  		}
  6254  		v.reset(OpLOONG64MOVVconst)
  6255  		v.AuxInt = int64ToAuxInt(1)
  6256  		return true
  6257  	}
  6258  	// match: (SGTconst [c] (MOVBUreg _))
  6259  	// cond: c < 0
  6260  	// result: (MOVVconst [0])
  6261  	for {
  6262  		c := auxIntToInt64(v.AuxInt)
  6263  		if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) {
  6264  			break
  6265  		}
  6266  		v.reset(OpLOONG64MOVVconst)
  6267  		v.AuxInt = int64ToAuxInt(0)
  6268  		return true
  6269  	}
  6270  	// match: (SGTconst [c] (MOVHreg _))
  6271  	// cond: 0x7fff < c
  6272  	// result: (MOVVconst [1])
  6273  	for {
  6274  		c := auxIntToInt64(v.AuxInt)
  6275  		if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) {
  6276  			break
  6277  		}
  6278  		v.reset(OpLOONG64MOVVconst)
  6279  		v.AuxInt = int64ToAuxInt(1)
  6280  		return true
  6281  	}
  6282  	// match: (SGTconst [c] (MOVHreg _))
  6283  	// cond: c <= -0x8000
  6284  	// result: (MOVVconst [0])
  6285  	for {
  6286  		c := auxIntToInt64(v.AuxInt)
  6287  		if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) {
  6288  			break
  6289  		}
  6290  		v.reset(OpLOONG64MOVVconst)
  6291  		v.AuxInt = int64ToAuxInt(0)
  6292  		return true
  6293  	}
  6294  	// match: (SGTconst [c] (MOVHUreg _))
  6295  	// cond: 0xffff < c
  6296  	// result: (MOVVconst [1])
  6297  	for {
  6298  		c := auxIntToInt64(v.AuxInt)
  6299  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) {
  6300  			break
  6301  		}
  6302  		v.reset(OpLOONG64MOVVconst)
  6303  		v.AuxInt = int64ToAuxInt(1)
  6304  		return true
  6305  	}
  6306  	// match: (SGTconst [c] (MOVHUreg _))
  6307  	// cond: c < 0
  6308  	// result: (MOVVconst [0])
  6309  	for {
  6310  		c := auxIntToInt64(v.AuxInt)
  6311  		if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) {
  6312  			break
  6313  		}
  6314  		v.reset(OpLOONG64MOVVconst)
  6315  		v.AuxInt = int64ToAuxInt(0)
  6316  		return true
  6317  	}
  6318  	// match: (SGTconst [c] (MOVWUreg _))
  6319  	// cond: c < 0
  6320  	// result: (MOVVconst [0])
  6321  	for {
  6322  		c := auxIntToInt64(v.AuxInt)
  6323  		if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) {
  6324  			break
  6325  		}
  6326  		v.reset(OpLOONG64MOVVconst)
  6327  		v.AuxInt = int64ToAuxInt(0)
  6328  		return true
  6329  	}
  6330  	// match: (SGTconst [c] (ANDconst [m] _))
  6331  	// cond: 0 <= m && m < c
  6332  	// result: (MOVVconst [1])
  6333  	for {
  6334  		c := auxIntToInt64(v.AuxInt)
  6335  		if v_0.Op != OpLOONG64ANDconst {
  6336  			break
  6337  		}
  6338  		m := auxIntToInt64(v_0.AuxInt)
  6339  		if !(0 <= m && m < c) {
  6340  			break
  6341  		}
  6342  		v.reset(OpLOONG64MOVVconst)
  6343  		v.AuxInt = int64ToAuxInt(1)
  6344  		return true
  6345  	}
  6346  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6347  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6348  	// result: (MOVVconst [1])
  6349  	for {
  6350  		c := auxIntToInt64(v.AuxInt)
  6351  		if v_0.Op != OpLOONG64SRLVconst {
  6352  			break
  6353  		}
  6354  		d := auxIntToInt64(v_0.AuxInt)
  6355  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6356  			break
  6357  		}
  6358  		v.reset(OpLOONG64MOVVconst)
  6359  		v.AuxInt = int64ToAuxInt(1)
  6360  		return true
  6361  	}
  6362  	return false
  6363  }
  6364  func rewriteValueLOONG64_OpLOONG64SLL(v *Value) bool {
  6365  	v_1 := v.Args[1]
  6366  	v_0 := v.Args[0]
  6367  	// match: (SLL _ (MOVVconst [c]))
  6368  	// cond: uint64(c)>=32
  6369  	// result: (MOVVconst [0])
  6370  	for {
  6371  		if v_1.Op != OpLOONG64MOVVconst {
  6372  			break
  6373  		}
  6374  		c := auxIntToInt64(v_1.AuxInt)
  6375  		if !(uint64(c) >= 32) {
  6376  			break
  6377  		}
  6378  		v.reset(OpLOONG64MOVVconst)
  6379  		v.AuxInt = int64ToAuxInt(0)
  6380  		return true
  6381  	}
  6382  	// match: (SLL x (MOVVconst [c]))
  6383  	// cond: uint64(c) >=0 && uint64(c) <=31
  6384  	// result: (SLLconst x [c])
  6385  	for {
  6386  		x := v_0
  6387  		if v_1.Op != OpLOONG64MOVVconst {
  6388  			break
  6389  		}
  6390  		c := auxIntToInt64(v_1.AuxInt)
  6391  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6392  			break
  6393  		}
  6394  		v.reset(OpLOONG64SLLconst)
  6395  		v.AuxInt = int64ToAuxInt(c)
  6396  		v.AddArg(x)
  6397  		return true
  6398  	}
  6399  	// match: (SLL x (ANDconst [31] y))
  6400  	// result: (SLL x y)
  6401  	for {
  6402  		x := v_0
  6403  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6404  			break
  6405  		}
  6406  		y := v_1.Args[0]
  6407  		v.reset(OpLOONG64SLL)
  6408  		v.AddArg2(x, y)
  6409  		return true
  6410  	}
  6411  	return false
  6412  }
  6413  func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
  6414  	v_1 := v.Args[1]
  6415  	v_0 := v.Args[0]
  6416  	// match: (SLLV _ (MOVVconst [c]))
  6417  	// cond: uint64(c)>=64
  6418  	// result: (MOVVconst [0])
  6419  	for {
  6420  		if v_1.Op != OpLOONG64MOVVconst {
  6421  			break
  6422  		}
  6423  		c := auxIntToInt64(v_1.AuxInt)
  6424  		if !(uint64(c) >= 64) {
  6425  			break
  6426  		}
  6427  		v.reset(OpLOONG64MOVVconst)
  6428  		v.AuxInt = int64ToAuxInt(0)
  6429  		return true
  6430  	}
  6431  	// match: (SLLV x (MOVVconst [c]))
  6432  	// result: (SLLVconst x [c])
  6433  	for {
  6434  		x := v_0
  6435  		if v_1.Op != OpLOONG64MOVVconst {
  6436  			break
  6437  		}
  6438  		c := auxIntToInt64(v_1.AuxInt)
  6439  		v.reset(OpLOONG64SLLVconst)
  6440  		v.AuxInt = int64ToAuxInt(c)
  6441  		v.AddArg(x)
  6442  		return true
  6443  	}
  6444  	// match: (SLLV x (ANDconst [63] y))
  6445  	// result: (SLLV x y)
  6446  	for {
  6447  		x := v_0
  6448  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6449  			break
  6450  		}
  6451  		y := v_1.Args[0]
  6452  		v.reset(OpLOONG64SLLV)
  6453  		v.AddArg2(x, y)
  6454  		return true
  6455  	}
  6456  	return false
  6457  }
  6458  func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
  6459  	v_0 := v.Args[0]
  6460  	// match: (SLLVconst [c] (MOVVconst [d]))
  6461  	// result: (MOVVconst [d<<uint64(c)])
  6462  	for {
  6463  		c := auxIntToInt64(v.AuxInt)
  6464  		if v_0.Op != OpLOONG64MOVVconst {
  6465  			break
  6466  		}
  6467  		d := auxIntToInt64(v_0.AuxInt)
  6468  		v.reset(OpLOONG64MOVVconst)
  6469  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  6470  		return true
  6471  	}
  6472  	return false
  6473  }
  6474  func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
  6475  	v_1 := v.Args[1]
  6476  	v_0 := v.Args[0]
  6477  	// match: (SRA x (MOVVconst [c]))
  6478  	// cond: uint64(c)>=32
  6479  	// result: (SRAconst x [31])
  6480  	for {
  6481  		x := v_0
  6482  		if v_1.Op != OpLOONG64MOVVconst {
  6483  			break
  6484  		}
  6485  		c := auxIntToInt64(v_1.AuxInt)
  6486  		if !(uint64(c) >= 32) {
  6487  			break
  6488  		}
  6489  		v.reset(OpLOONG64SRAconst)
  6490  		v.AuxInt = int64ToAuxInt(31)
  6491  		v.AddArg(x)
  6492  		return true
  6493  	}
  6494  	// match: (SRA x (MOVVconst [c]))
  6495  	// cond: uint64(c) >=0 && uint64(c) <=31
  6496  	// result: (SRAconst x [c])
  6497  	for {
  6498  		x := v_0
  6499  		if v_1.Op != OpLOONG64MOVVconst {
  6500  			break
  6501  		}
  6502  		c := auxIntToInt64(v_1.AuxInt)
  6503  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6504  			break
  6505  		}
  6506  		v.reset(OpLOONG64SRAconst)
  6507  		v.AuxInt = int64ToAuxInt(c)
  6508  		v.AddArg(x)
  6509  		return true
  6510  	}
  6511  	// match: (SRA x (ANDconst [31] y))
  6512  	// result: (SRA x y)
  6513  	for {
  6514  		x := v_0
  6515  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6516  			break
  6517  		}
  6518  		y := v_1.Args[0]
  6519  		v.reset(OpLOONG64SRA)
  6520  		v.AddArg2(x, y)
  6521  		return true
  6522  	}
  6523  	return false
  6524  }
  6525  func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
  6526  	v_1 := v.Args[1]
  6527  	v_0 := v.Args[0]
  6528  	// match: (SRAV x (MOVVconst [c]))
  6529  	// cond: uint64(c)>=64
  6530  	// result: (SRAVconst x [63])
  6531  	for {
  6532  		x := v_0
  6533  		if v_1.Op != OpLOONG64MOVVconst {
  6534  			break
  6535  		}
  6536  		c := auxIntToInt64(v_1.AuxInt)
  6537  		if !(uint64(c) >= 64) {
  6538  			break
  6539  		}
  6540  		v.reset(OpLOONG64SRAVconst)
  6541  		v.AuxInt = int64ToAuxInt(63)
  6542  		v.AddArg(x)
  6543  		return true
  6544  	}
  6545  	// match: (SRAV x (MOVVconst [c]))
  6546  	// result: (SRAVconst x [c])
  6547  	for {
  6548  		x := v_0
  6549  		if v_1.Op != OpLOONG64MOVVconst {
  6550  			break
  6551  		}
  6552  		c := auxIntToInt64(v_1.AuxInt)
  6553  		v.reset(OpLOONG64SRAVconst)
  6554  		v.AuxInt = int64ToAuxInt(c)
  6555  		v.AddArg(x)
  6556  		return true
  6557  	}
  6558  	// match: (SRAV x (ANDconst [63] y))
  6559  	// result: (SRAV x y)
  6560  	for {
  6561  		x := v_0
  6562  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6563  			break
  6564  		}
  6565  		y := v_1.Args[0]
  6566  		v.reset(OpLOONG64SRAV)
  6567  		v.AddArg2(x, y)
  6568  		return true
  6569  	}
  6570  	return false
  6571  }
  6572  func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
  6573  	v_0 := v.Args[0]
  6574  	b := v.Block
  6575  	// match: (SRAVconst [rc] (MOVWreg y))
  6576  	// cond: rc >= 0 && rc <= 31
  6577  	// result: (SRAconst [int64(rc)] y)
  6578  	for {
  6579  		rc := auxIntToInt64(v.AuxInt)
  6580  		if v_0.Op != OpLOONG64MOVWreg {
  6581  			break
  6582  		}
  6583  		y := v_0.Args[0]
  6584  		if !(rc >= 0 && rc <= 31) {
  6585  			break
  6586  		}
  6587  		v.reset(OpLOONG64SRAconst)
  6588  		v.AuxInt = int64ToAuxInt(int64(rc))
  6589  		v.AddArg(y)
  6590  		return true
  6591  	}
  6592  	// match: (SRAVconst <t> [rc] (MOVBreg y))
  6593  	// cond: rc >= 8
  6594  	// result: (SRAVconst [63] (SLLVconst <t> [56] y))
  6595  	for {
  6596  		t := v.Type
  6597  		rc := auxIntToInt64(v.AuxInt)
  6598  		if v_0.Op != OpLOONG64MOVBreg {
  6599  			break
  6600  		}
  6601  		y := v_0.Args[0]
  6602  		if !(rc >= 8) {
  6603  			break
  6604  		}
  6605  		v.reset(OpLOONG64SRAVconst)
  6606  		v.AuxInt = int64ToAuxInt(63)
  6607  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  6608  		v0.AuxInt = int64ToAuxInt(56)
  6609  		v0.AddArg(y)
  6610  		v.AddArg(v0)
  6611  		return true
  6612  	}
  6613  	// match: (SRAVconst <t> [rc] (MOVHreg y))
  6614  	// cond: rc >= 16
  6615  	// result: (SRAVconst [63] (SLLVconst <t> [48] y))
  6616  	for {
  6617  		t := v.Type
  6618  		rc := auxIntToInt64(v.AuxInt)
  6619  		if v_0.Op != OpLOONG64MOVHreg {
  6620  			break
  6621  		}
  6622  		y := v_0.Args[0]
  6623  		if !(rc >= 16) {
  6624  			break
  6625  		}
  6626  		v.reset(OpLOONG64SRAVconst)
  6627  		v.AuxInt = int64ToAuxInt(63)
  6628  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  6629  		v0.AuxInt = int64ToAuxInt(48)
  6630  		v0.AddArg(y)
  6631  		v.AddArg(v0)
  6632  		return true
  6633  	}
  6634  	// match: (SRAVconst <t> [rc] (MOVWreg y))
  6635  	// cond: rc >= 32
  6636  	// result: (SRAconst [31] y)
  6637  	for {
  6638  		rc := auxIntToInt64(v.AuxInt)
  6639  		if v_0.Op != OpLOONG64MOVWreg {
  6640  			break
  6641  		}
  6642  		y := v_0.Args[0]
  6643  		if !(rc >= 32) {
  6644  			break
  6645  		}
  6646  		v.reset(OpLOONG64SRAconst)
  6647  		v.AuxInt = int64ToAuxInt(31)
  6648  		v.AddArg(y)
  6649  		return true
  6650  	}
  6651  	// match: (SRAVconst [c] (MOVVconst [d]))
  6652  	// result: (MOVVconst [d>>uint64(c)])
  6653  	for {
  6654  		c := auxIntToInt64(v.AuxInt)
  6655  		if v_0.Op != OpLOONG64MOVVconst {
  6656  			break
  6657  		}
  6658  		d := auxIntToInt64(v_0.AuxInt)
  6659  		v.reset(OpLOONG64MOVVconst)
  6660  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  6661  		return true
  6662  	}
  6663  	return false
  6664  }
  6665  func rewriteValueLOONG64_OpLOONG64SRL(v *Value) bool {
  6666  	v_1 := v.Args[1]
  6667  	v_0 := v.Args[0]
  6668  	// match: (SRL _ (MOVVconst [c]))
  6669  	// cond: uint64(c)>=32
  6670  	// result: (MOVVconst [0])
  6671  	for {
  6672  		if v_1.Op != OpLOONG64MOVVconst {
  6673  			break
  6674  		}
  6675  		c := auxIntToInt64(v_1.AuxInt)
  6676  		if !(uint64(c) >= 32) {
  6677  			break
  6678  		}
  6679  		v.reset(OpLOONG64MOVVconst)
  6680  		v.AuxInt = int64ToAuxInt(0)
  6681  		return true
  6682  	}
  6683  	// match: (SRL x (MOVVconst [c]))
  6684  	// cond: uint64(c) >=0 && uint64(c) <=31
  6685  	// result: (SRLconst x [c])
  6686  	for {
  6687  		x := v_0
  6688  		if v_1.Op != OpLOONG64MOVVconst {
  6689  			break
  6690  		}
  6691  		c := auxIntToInt64(v_1.AuxInt)
  6692  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6693  			break
  6694  		}
  6695  		v.reset(OpLOONG64SRLconst)
  6696  		v.AuxInt = int64ToAuxInt(c)
  6697  		v.AddArg(x)
  6698  		return true
  6699  	}
  6700  	// match: (SRL x (ANDconst [31] y))
  6701  	// result: (SRL x y)
  6702  	for {
  6703  		x := v_0
  6704  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6705  			break
  6706  		}
  6707  		y := v_1.Args[0]
  6708  		v.reset(OpLOONG64SRL)
  6709  		v.AddArg2(x, y)
  6710  		return true
  6711  	}
  6712  	return false
  6713  }
  6714  func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
  6715  	v_1 := v.Args[1]
  6716  	v_0 := v.Args[0]
  6717  	// match: (SRLV _ (MOVVconst [c]))
  6718  	// cond: uint64(c)>=64
  6719  	// result: (MOVVconst [0])
  6720  	for {
  6721  		if v_1.Op != OpLOONG64MOVVconst {
  6722  			break
  6723  		}
  6724  		c := auxIntToInt64(v_1.AuxInt)
  6725  		if !(uint64(c) >= 64) {
  6726  			break
  6727  		}
  6728  		v.reset(OpLOONG64MOVVconst)
  6729  		v.AuxInt = int64ToAuxInt(0)
  6730  		return true
  6731  	}
  6732  	// match: (SRLV x (MOVVconst [c]))
  6733  	// result: (SRLVconst x [c])
  6734  	for {
  6735  		x := v_0
  6736  		if v_1.Op != OpLOONG64MOVVconst {
  6737  			break
  6738  		}
  6739  		c := auxIntToInt64(v_1.AuxInt)
  6740  		v.reset(OpLOONG64SRLVconst)
  6741  		v.AuxInt = int64ToAuxInt(c)
  6742  		v.AddArg(x)
  6743  		return true
  6744  	}
  6745  	// match: (SRLV x (ANDconst [63] y))
  6746  	// result: (SRLV x y)
  6747  	for {
  6748  		x := v_0
  6749  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6750  			break
  6751  		}
  6752  		y := v_1.Args[0]
  6753  		v.reset(OpLOONG64SRLV)
  6754  		v.AddArg2(x, y)
  6755  		return true
  6756  	}
  6757  	return false
  6758  }
  6759  func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool {
  6760  	v_0 := v.Args[0]
  6761  	// match: (SRLVconst [rc] (SLLVconst [lc] x))
  6762  	// cond: lc <= rc
  6763  	// result: (BSTRPICKV [rc-lc + ((64-lc)-1)<<6] x)
  6764  	for {
  6765  		rc := auxIntToInt64(v.AuxInt)
  6766  		if v_0.Op != OpLOONG64SLLVconst {
  6767  			break
  6768  		}
  6769  		lc := auxIntToInt64(v_0.AuxInt)
  6770  		x := v_0.Args[0]
  6771  		if !(lc <= rc) {
  6772  			break
  6773  		}
  6774  		v.reset(OpLOONG64BSTRPICKV)
  6775  		v.AuxInt = int64ToAuxInt(rc - lc + ((64-lc)-1)<<6)
  6776  		v.AddArg(x)
  6777  		return true
  6778  	}
  6779  	// match: (SRLVconst [rc] (MOVWUreg x))
  6780  	// cond: rc < 32
  6781  	// result: (BSTRPICKV [rc + 31<<6] x)
  6782  	for {
  6783  		rc := auxIntToInt64(v.AuxInt)
  6784  		if v_0.Op != OpLOONG64MOVWUreg {
  6785  			break
  6786  		}
  6787  		x := v_0.Args[0]
  6788  		if !(rc < 32) {
  6789  			break
  6790  		}
  6791  		v.reset(OpLOONG64BSTRPICKV)
  6792  		v.AuxInt = int64ToAuxInt(rc + 31<<6)
  6793  		v.AddArg(x)
  6794  		return true
  6795  	}
  6796  	// match: (SRLVconst [rc] (MOVHUreg x))
  6797  	// cond: rc < 16
  6798  	// result: (BSTRPICKV [rc + 15<<6] x)
  6799  	for {
  6800  		rc := auxIntToInt64(v.AuxInt)
  6801  		if v_0.Op != OpLOONG64MOVHUreg {
  6802  			break
  6803  		}
  6804  		x := v_0.Args[0]
  6805  		if !(rc < 16) {
  6806  			break
  6807  		}
  6808  		v.reset(OpLOONG64BSTRPICKV)
  6809  		v.AuxInt = int64ToAuxInt(rc + 15<<6)
  6810  		v.AddArg(x)
  6811  		return true
  6812  	}
  6813  	// match: (SRLVconst [rc] (MOVBUreg x))
  6814  	// cond: rc < 8
  6815  	// result: (BSTRPICKV [rc + 7<<6] x)
  6816  	for {
  6817  		rc := auxIntToInt64(v.AuxInt)
  6818  		if v_0.Op != OpLOONG64MOVBUreg {
  6819  			break
  6820  		}
  6821  		x := v_0.Args[0]
  6822  		if !(rc < 8) {
  6823  			break
  6824  		}
  6825  		v.reset(OpLOONG64BSTRPICKV)
  6826  		v.AuxInt = int64ToAuxInt(rc + 7<<6)
  6827  		v.AddArg(x)
  6828  		return true
  6829  	}
  6830  	// match: (SRLVconst [rc] (MOVWUreg y))
  6831  	// cond: rc >= 0 && rc <= 31
  6832  	// result: (SRLconst [int64(rc)] y)
  6833  	for {
  6834  		rc := auxIntToInt64(v.AuxInt)
  6835  		if v_0.Op != OpLOONG64MOVWUreg {
  6836  			break
  6837  		}
  6838  		y := v_0.Args[0]
  6839  		if !(rc >= 0 && rc <= 31) {
  6840  			break
  6841  		}
  6842  		v.reset(OpLOONG64SRLconst)
  6843  		v.AuxInt = int64ToAuxInt(int64(rc))
  6844  		v.AddArg(y)
  6845  		return true
  6846  	}
  6847  	// match: (SRLVconst [rc] (MOVWUreg x))
  6848  	// cond: rc >= 32
  6849  	// result: (MOVVconst [0])
  6850  	for {
  6851  		rc := auxIntToInt64(v.AuxInt)
  6852  		if v_0.Op != OpLOONG64MOVWUreg {
  6853  			break
  6854  		}
  6855  		if !(rc >= 32) {
  6856  			break
  6857  		}
  6858  		v.reset(OpLOONG64MOVVconst)
  6859  		v.AuxInt = int64ToAuxInt(0)
  6860  		return true
  6861  	}
  6862  	// match: (SRLVconst [rc] (MOVHUreg x))
  6863  	// cond: rc >= 16
  6864  	// result: (MOVVconst [0])
  6865  	for {
  6866  		rc := auxIntToInt64(v.AuxInt)
  6867  		if v_0.Op != OpLOONG64MOVHUreg {
  6868  			break
  6869  		}
  6870  		if !(rc >= 16) {
  6871  			break
  6872  		}
  6873  		v.reset(OpLOONG64MOVVconst)
  6874  		v.AuxInt = int64ToAuxInt(0)
  6875  		return true
  6876  	}
  6877  	// match: (SRLVconst [rc] (MOVBUreg x))
  6878  	// cond: rc >= 8
  6879  	// result: (MOVVconst [0])
  6880  	for {
  6881  		rc := auxIntToInt64(v.AuxInt)
  6882  		if v_0.Op != OpLOONG64MOVBUreg {
  6883  			break
  6884  		}
  6885  		if !(rc >= 8) {
  6886  			break
  6887  		}
  6888  		v.reset(OpLOONG64MOVVconst)
  6889  		v.AuxInt = int64ToAuxInt(0)
  6890  		return true
  6891  	}
  6892  	// match: (SRLVconst [c] (MOVVconst [d]))
  6893  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6894  	for {
  6895  		c := auxIntToInt64(v.AuxInt)
  6896  		if v_0.Op != OpLOONG64MOVVconst {
  6897  			break
  6898  		}
  6899  		d := auxIntToInt64(v_0.AuxInt)
  6900  		v.reset(OpLOONG64MOVVconst)
  6901  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  6902  		return true
  6903  	}
  6904  	return false
  6905  }
  6906  func rewriteValueLOONG64_OpLOONG64SUBD(v *Value) bool {
  6907  	v_1 := v.Args[1]
  6908  	v_0 := v.Args[0]
  6909  	// match: (SUBD (MULD x y) z)
  6910  	// cond: z.Block.Func.useFMA(v)
  6911  	// result: (FMSUBD x y z)
  6912  	for {
  6913  		if v_0.Op != OpLOONG64MULD {
  6914  			break
  6915  		}
  6916  		y := v_0.Args[1]
  6917  		x := v_0.Args[0]
  6918  		z := v_1
  6919  		if !(z.Block.Func.useFMA(v)) {
  6920  			break
  6921  		}
  6922  		v.reset(OpLOONG64FMSUBD)
  6923  		v.AddArg3(x, y, z)
  6924  		return true
  6925  	}
  6926  	// match: (SUBD z (MULD x y))
  6927  	// cond: z.Block.Func.useFMA(v)
  6928  	// result: (FNMSUBD x y z)
  6929  	for {
  6930  		z := v_0
  6931  		if v_1.Op != OpLOONG64MULD {
  6932  			break
  6933  		}
  6934  		y := v_1.Args[1]
  6935  		x := v_1.Args[0]
  6936  		if !(z.Block.Func.useFMA(v)) {
  6937  			break
  6938  		}
  6939  		v.reset(OpLOONG64FNMSUBD)
  6940  		v.AddArg3(x, y, z)
  6941  		return true
  6942  	}
  6943  	// match: (SUBD z (NEGD (MULD x y)))
  6944  	// cond: z.Block.Func.useFMA(v)
  6945  	// result: (FMADDD x y z)
  6946  	for {
  6947  		z := v_0
  6948  		if v_1.Op != OpLOONG64NEGD {
  6949  			break
  6950  		}
  6951  		v_1_0 := v_1.Args[0]
  6952  		if v_1_0.Op != OpLOONG64MULD {
  6953  			break
  6954  		}
  6955  		y := v_1_0.Args[1]
  6956  		x := v_1_0.Args[0]
  6957  		if !(z.Block.Func.useFMA(v)) {
  6958  			break
  6959  		}
  6960  		v.reset(OpLOONG64FMADDD)
  6961  		v.AddArg3(x, y, z)
  6962  		return true
  6963  	}
  6964  	// match: (SUBD (NEGD (MULD x y)) z)
  6965  	// cond: z.Block.Func.useFMA(v)
  6966  	// result: (FNMADDD x y z)
  6967  	for {
  6968  		if v_0.Op != OpLOONG64NEGD {
  6969  			break
  6970  		}
  6971  		v_0_0 := v_0.Args[0]
  6972  		if v_0_0.Op != OpLOONG64MULD {
  6973  			break
  6974  		}
  6975  		y := v_0_0.Args[1]
  6976  		x := v_0_0.Args[0]
  6977  		z := v_1
  6978  		if !(z.Block.Func.useFMA(v)) {
  6979  			break
  6980  		}
  6981  		v.reset(OpLOONG64FNMADDD)
  6982  		v.AddArg3(x, y, z)
  6983  		return true
  6984  	}
  6985  	return false
  6986  }
  6987  func rewriteValueLOONG64_OpLOONG64SUBF(v *Value) bool {
  6988  	v_1 := v.Args[1]
  6989  	v_0 := v.Args[0]
  6990  	// match: (SUBF (MULF x y) z)
  6991  	// cond: z.Block.Func.useFMA(v)
  6992  	// result: (FMSUBF x y z)
  6993  	for {
  6994  		if v_0.Op != OpLOONG64MULF {
  6995  			break
  6996  		}
  6997  		y := v_0.Args[1]
  6998  		x := v_0.Args[0]
  6999  		z := v_1
  7000  		if !(z.Block.Func.useFMA(v)) {
  7001  			break
  7002  		}
  7003  		v.reset(OpLOONG64FMSUBF)
  7004  		v.AddArg3(x, y, z)
  7005  		return true
  7006  	}
  7007  	// match: (SUBF z (MULF x y))
  7008  	// cond: z.Block.Func.useFMA(v)
  7009  	// result: (FNMSUBF x y z)
  7010  	for {
  7011  		z := v_0
  7012  		if v_1.Op != OpLOONG64MULF {
  7013  			break
  7014  		}
  7015  		y := v_1.Args[1]
  7016  		x := v_1.Args[0]
  7017  		if !(z.Block.Func.useFMA(v)) {
  7018  			break
  7019  		}
  7020  		v.reset(OpLOONG64FNMSUBF)
  7021  		v.AddArg3(x, y, z)
  7022  		return true
  7023  	}
  7024  	// match: (SUBF z (NEGF (MULF x y)))
  7025  	// cond: z.Block.Func.useFMA(v)
  7026  	// result: (FMADDF x y z)
  7027  	for {
  7028  		z := v_0
  7029  		if v_1.Op != OpLOONG64NEGF {
  7030  			break
  7031  		}
  7032  		v_1_0 := v_1.Args[0]
  7033  		if v_1_0.Op != OpLOONG64MULF {
  7034  			break
  7035  		}
  7036  		y := v_1_0.Args[1]
  7037  		x := v_1_0.Args[0]
  7038  		if !(z.Block.Func.useFMA(v)) {
  7039  			break
  7040  		}
  7041  		v.reset(OpLOONG64FMADDF)
  7042  		v.AddArg3(x, y, z)
  7043  		return true
  7044  	}
  7045  	// match: (SUBF (NEGF (MULF x y)) z)
  7046  	// cond: z.Block.Func.useFMA(v)
  7047  	// result: (FNMADDF x y z)
  7048  	for {
  7049  		if v_0.Op != OpLOONG64NEGF {
  7050  			break
  7051  		}
  7052  		v_0_0 := v_0.Args[0]
  7053  		if v_0_0.Op != OpLOONG64MULF {
  7054  			break
  7055  		}
  7056  		y := v_0_0.Args[1]
  7057  		x := v_0_0.Args[0]
  7058  		z := v_1
  7059  		if !(z.Block.Func.useFMA(v)) {
  7060  			break
  7061  		}
  7062  		v.reset(OpLOONG64FNMADDF)
  7063  		v.AddArg3(x, y, z)
  7064  		return true
  7065  	}
  7066  	return false
  7067  }
  7068  func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool {
  7069  	v_1 := v.Args[1]
  7070  	v_0 := v.Args[0]
  7071  	// match: (SUBV x (MOVVconst [c]))
  7072  	// cond: is32Bit(c)
  7073  	// result: (SUBVconst [c] x)
  7074  	for {
  7075  		x := v_0
  7076  		if v_1.Op != OpLOONG64MOVVconst {
  7077  			break
  7078  		}
  7079  		c := auxIntToInt64(v_1.AuxInt)
  7080  		if !(is32Bit(c)) {
  7081  			break
  7082  		}
  7083  		v.reset(OpLOONG64SUBVconst)
  7084  		v.AuxInt = int64ToAuxInt(c)
  7085  		v.AddArg(x)
  7086  		return true
  7087  	}
  7088  	// match: (SUBV x (NEGV y))
  7089  	// result: (ADDV x y)
  7090  	for {
  7091  		x := v_0
  7092  		if v_1.Op != OpLOONG64NEGV {
  7093  			break
  7094  		}
  7095  		y := v_1.Args[0]
  7096  		v.reset(OpLOONG64ADDV)
  7097  		v.AddArg2(x, y)
  7098  		return true
  7099  	}
  7100  	// match: (SUBV x x)
  7101  	// result: (MOVVconst [0])
  7102  	for {
  7103  		x := v_0
  7104  		if x != v_1 {
  7105  			break
  7106  		}
  7107  		v.reset(OpLOONG64MOVVconst)
  7108  		v.AuxInt = int64ToAuxInt(0)
  7109  		return true
  7110  	}
  7111  	// match: (SUBV (MOVVconst [0]) x)
  7112  	// result: (NEGV x)
  7113  	for {
  7114  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7115  			break
  7116  		}
  7117  		x := v_1
  7118  		v.reset(OpLOONG64NEGV)
  7119  		v.AddArg(x)
  7120  		return true
  7121  	}
  7122  	// match: (SUBV (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  7123  	// result: (ADDVconst [c-d] x)
  7124  	for {
  7125  		if v_0.Op != OpLOONG64MOVVconst {
  7126  			break
  7127  		}
  7128  		c := auxIntToInt64(v_0.AuxInt)
  7129  		if v_1.Op != OpLOONG64NEGV {
  7130  			break
  7131  		}
  7132  		v_1_0 := v_1.Args[0]
  7133  		if v_1_0.Op != OpLOONG64SUBVconst {
  7134  			break
  7135  		}
  7136  		d := auxIntToInt64(v_1_0.AuxInt)
  7137  		x := v_1_0.Args[0]
  7138  		v.reset(OpLOONG64ADDVconst)
  7139  		v.AuxInt = int64ToAuxInt(c - d)
  7140  		v.AddArg(x)
  7141  		return true
  7142  	}
  7143  	return false
  7144  }
  7145  func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool {
  7146  	v_0 := v.Args[0]
  7147  	// match: (SUBVconst [0] x)
  7148  	// result: x
  7149  	for {
  7150  		if auxIntToInt64(v.AuxInt) != 0 {
  7151  			break
  7152  		}
  7153  		x := v_0
  7154  		v.copyOf(x)
  7155  		return true
  7156  	}
  7157  	// match: (SUBVconst [c] (MOVVconst [d]))
  7158  	// result: (MOVVconst [d-c])
  7159  	for {
  7160  		c := auxIntToInt64(v.AuxInt)
  7161  		if v_0.Op != OpLOONG64MOVVconst {
  7162  			break
  7163  		}
  7164  		d := auxIntToInt64(v_0.AuxInt)
  7165  		v.reset(OpLOONG64MOVVconst)
  7166  		v.AuxInt = int64ToAuxInt(d - c)
  7167  		return true
  7168  	}
  7169  	// match: (SUBVconst [c] (SUBVconst [d] x))
  7170  	// cond: is32Bit(-c-d)
  7171  	// result: (ADDVconst [-c-d] x)
  7172  	for {
  7173  		c := auxIntToInt64(v.AuxInt)
  7174  		if v_0.Op != OpLOONG64SUBVconst {
  7175  			break
  7176  		}
  7177  		d := auxIntToInt64(v_0.AuxInt)
  7178  		x := v_0.Args[0]
  7179  		if !(is32Bit(-c - d)) {
  7180  			break
  7181  		}
  7182  		v.reset(OpLOONG64ADDVconst)
  7183  		v.AuxInt = int64ToAuxInt(-c - d)
  7184  		v.AddArg(x)
  7185  		return true
  7186  	}
  7187  	// match: (SUBVconst [c] (ADDVconst [d] x))
  7188  	// cond: is32Bit(-c+d)
  7189  	// result: (ADDVconst [-c+d] x)
  7190  	for {
  7191  		c := auxIntToInt64(v.AuxInt)
  7192  		if v_0.Op != OpLOONG64ADDVconst {
  7193  			break
  7194  		}
  7195  		d := auxIntToInt64(v_0.AuxInt)
  7196  		x := v_0.Args[0]
  7197  		if !(is32Bit(-c + d)) {
  7198  			break
  7199  		}
  7200  		v.reset(OpLOONG64ADDVconst)
  7201  		v.AuxInt = int64ToAuxInt(-c + d)
  7202  		v.AddArg(x)
  7203  		return true
  7204  	}
  7205  	return false
  7206  }
  7207  func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool {
  7208  	v_1 := v.Args[1]
  7209  	v_0 := v.Args[0]
  7210  	// match: (XOR x (MOVVconst [c]))
  7211  	// cond: is32Bit(c)
  7212  	// result: (XORconst [c] x)
  7213  	for {
  7214  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7215  			x := v_0
  7216  			if v_1.Op != OpLOONG64MOVVconst {
  7217  				continue
  7218  			}
  7219  			c := auxIntToInt64(v_1.AuxInt)
  7220  			if !(is32Bit(c)) {
  7221  				continue
  7222  			}
  7223  			v.reset(OpLOONG64XORconst)
  7224  			v.AuxInt = int64ToAuxInt(c)
  7225  			v.AddArg(x)
  7226  			return true
  7227  		}
  7228  		break
  7229  	}
  7230  	// match: (XOR x x)
  7231  	// result: (MOVVconst [0])
  7232  	for {
  7233  		x := v_0
  7234  		if x != v_1 {
  7235  			break
  7236  		}
  7237  		v.reset(OpLOONG64MOVVconst)
  7238  		v.AuxInt = int64ToAuxInt(0)
  7239  		return true
  7240  	}
  7241  	return false
  7242  }
  7243  func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool {
  7244  	v_0 := v.Args[0]
  7245  	// match: (XORconst [0] x)
  7246  	// result: x
  7247  	for {
  7248  		if auxIntToInt64(v.AuxInt) != 0 {
  7249  			break
  7250  		}
  7251  		x := v_0
  7252  		v.copyOf(x)
  7253  		return true
  7254  	}
  7255  	// match: (XORconst [-1] x)
  7256  	// result: (NORconst [0] x)
  7257  	for {
  7258  		if auxIntToInt64(v.AuxInt) != -1 {
  7259  			break
  7260  		}
  7261  		x := v_0
  7262  		v.reset(OpLOONG64NORconst)
  7263  		v.AuxInt = int64ToAuxInt(0)
  7264  		v.AddArg(x)
  7265  		return true
  7266  	}
  7267  	// match: (XORconst [c] (MOVVconst [d]))
  7268  	// result: (MOVVconst [c^d])
  7269  	for {
  7270  		c := auxIntToInt64(v.AuxInt)
  7271  		if v_0.Op != OpLOONG64MOVVconst {
  7272  			break
  7273  		}
  7274  		d := auxIntToInt64(v_0.AuxInt)
  7275  		v.reset(OpLOONG64MOVVconst)
  7276  		v.AuxInt = int64ToAuxInt(c ^ d)
  7277  		return true
  7278  	}
  7279  	// match: (XORconst [c] (XORconst [d] x))
  7280  	// cond: is32Bit(c^d)
  7281  	// result: (XORconst [c^d] x)
  7282  	for {
  7283  		c := auxIntToInt64(v.AuxInt)
  7284  		if v_0.Op != OpLOONG64XORconst {
  7285  			break
  7286  		}
  7287  		d := auxIntToInt64(v_0.AuxInt)
  7288  		x := v_0.Args[0]
  7289  		if !(is32Bit(c ^ d)) {
  7290  			break
  7291  		}
  7292  		v.reset(OpLOONG64XORconst)
  7293  		v.AuxInt = int64ToAuxInt(c ^ d)
  7294  		v.AddArg(x)
  7295  		return true
  7296  	}
  7297  	return false
  7298  }
  7299  func rewriteValueLOONG64_OpLeq16(v *Value) bool {
  7300  	v_1 := v.Args[1]
  7301  	v_0 := v.Args[0]
  7302  	b := v.Block
  7303  	typ := &b.Func.Config.Types
  7304  	// match: (Leq16 x y)
  7305  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  7306  	for {
  7307  		x := v_0
  7308  		y := v_1
  7309  		v.reset(OpLOONG64XOR)
  7310  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7311  		v0.AuxInt = int64ToAuxInt(1)
  7312  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7313  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7314  		v2.AddArg(x)
  7315  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7316  		v3.AddArg(y)
  7317  		v1.AddArg2(v2, v3)
  7318  		v.AddArg2(v0, v1)
  7319  		return true
  7320  	}
  7321  }
  7322  func rewriteValueLOONG64_OpLeq16U(v *Value) bool {
  7323  	v_1 := v.Args[1]
  7324  	v_0 := v.Args[0]
  7325  	b := v.Block
  7326  	typ := &b.Func.Config.Types
  7327  	// match: (Leq16U x y)
  7328  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7329  	for {
  7330  		x := v_0
  7331  		y := v_1
  7332  		v.reset(OpLOONG64XOR)
  7333  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7334  		v0.AuxInt = int64ToAuxInt(1)
  7335  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7336  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7337  		v2.AddArg(x)
  7338  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7339  		v3.AddArg(y)
  7340  		v1.AddArg2(v2, v3)
  7341  		v.AddArg2(v0, v1)
  7342  		return true
  7343  	}
  7344  }
  7345  func rewriteValueLOONG64_OpLeq32(v *Value) bool {
  7346  	v_1 := v.Args[1]
  7347  	v_0 := v.Args[0]
  7348  	b := v.Block
  7349  	typ := &b.Func.Config.Types
  7350  	// match: (Leq32 x y)
  7351  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  7352  	for {
  7353  		x := v_0
  7354  		y := v_1
  7355  		v.reset(OpLOONG64XOR)
  7356  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7357  		v0.AuxInt = int64ToAuxInt(1)
  7358  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7359  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7360  		v2.AddArg(x)
  7361  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7362  		v3.AddArg(y)
  7363  		v1.AddArg2(v2, v3)
  7364  		v.AddArg2(v0, v1)
  7365  		return true
  7366  	}
  7367  }
  7368  func rewriteValueLOONG64_OpLeq32F(v *Value) bool {
  7369  	v_1 := v.Args[1]
  7370  	v_0 := v.Args[0]
  7371  	b := v.Block
  7372  	// match: (Leq32F x y)
  7373  	// result: (FPFlagTrue (CMPGEF y x))
  7374  	for {
  7375  		x := v_0
  7376  		y := v_1
  7377  		v.reset(OpLOONG64FPFlagTrue)
  7378  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags)
  7379  		v0.AddArg2(y, x)
  7380  		v.AddArg(v0)
  7381  		return true
  7382  	}
  7383  }
  7384  func rewriteValueLOONG64_OpLeq32U(v *Value) bool {
  7385  	v_1 := v.Args[1]
  7386  	v_0 := v.Args[0]
  7387  	b := v.Block
  7388  	typ := &b.Func.Config.Types
  7389  	// match: (Leq32U x y)
  7390  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7391  	for {
  7392  		x := v_0
  7393  		y := v_1
  7394  		v.reset(OpLOONG64XOR)
  7395  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7396  		v0.AuxInt = int64ToAuxInt(1)
  7397  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7398  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7399  		v2.AddArg(x)
  7400  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7401  		v3.AddArg(y)
  7402  		v1.AddArg2(v2, v3)
  7403  		v.AddArg2(v0, v1)
  7404  		return true
  7405  	}
  7406  }
  7407  func rewriteValueLOONG64_OpLeq64(v *Value) bool {
  7408  	v_1 := v.Args[1]
  7409  	v_0 := v.Args[0]
  7410  	b := v.Block
  7411  	typ := &b.Func.Config.Types
  7412  	// match: (Leq64 x y)
  7413  	// result: (XOR (MOVVconst [1]) (SGT x y))
  7414  	for {
  7415  		x := v_0
  7416  		y := v_1
  7417  		v.reset(OpLOONG64XOR)
  7418  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7419  		v0.AuxInt = int64ToAuxInt(1)
  7420  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7421  		v1.AddArg2(x, y)
  7422  		v.AddArg2(v0, v1)
  7423  		return true
  7424  	}
  7425  }
  7426  func rewriteValueLOONG64_OpLeq64F(v *Value) bool {
  7427  	v_1 := v.Args[1]
  7428  	v_0 := v.Args[0]
  7429  	b := v.Block
  7430  	// match: (Leq64F x y)
  7431  	// result: (FPFlagTrue (CMPGED y x))
  7432  	for {
  7433  		x := v_0
  7434  		y := v_1
  7435  		v.reset(OpLOONG64FPFlagTrue)
  7436  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags)
  7437  		v0.AddArg2(y, x)
  7438  		v.AddArg(v0)
  7439  		return true
  7440  	}
  7441  }
  7442  func rewriteValueLOONG64_OpLeq64U(v *Value) bool {
  7443  	v_1 := v.Args[1]
  7444  	v_0 := v.Args[0]
  7445  	b := v.Block
  7446  	typ := &b.Func.Config.Types
  7447  	// match: (Leq64U x y)
  7448  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  7449  	for {
  7450  		x := v_0
  7451  		y := v_1
  7452  		v.reset(OpLOONG64XOR)
  7453  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7454  		v0.AuxInt = int64ToAuxInt(1)
  7455  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7456  		v1.AddArg2(x, y)
  7457  		v.AddArg2(v0, v1)
  7458  		return true
  7459  	}
  7460  }
  7461  func rewriteValueLOONG64_OpLeq8(v *Value) bool {
  7462  	v_1 := v.Args[1]
  7463  	v_0 := v.Args[0]
  7464  	b := v.Block
  7465  	typ := &b.Func.Config.Types
  7466  	// match: (Leq8 x y)
  7467  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  7468  	for {
  7469  		x := v_0
  7470  		y := v_1
  7471  		v.reset(OpLOONG64XOR)
  7472  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7473  		v0.AuxInt = int64ToAuxInt(1)
  7474  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7475  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7476  		v2.AddArg(x)
  7477  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7478  		v3.AddArg(y)
  7479  		v1.AddArg2(v2, v3)
  7480  		v.AddArg2(v0, v1)
  7481  		return true
  7482  	}
  7483  }
  7484  func rewriteValueLOONG64_OpLeq8U(v *Value) bool {
  7485  	v_1 := v.Args[1]
  7486  	v_0 := v.Args[0]
  7487  	b := v.Block
  7488  	typ := &b.Func.Config.Types
  7489  	// match: (Leq8U x y)
  7490  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  7491  	for {
  7492  		x := v_0
  7493  		y := v_1
  7494  		v.reset(OpLOONG64XOR)
  7495  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7496  		v0.AuxInt = int64ToAuxInt(1)
  7497  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7498  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7499  		v2.AddArg(x)
  7500  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7501  		v3.AddArg(y)
  7502  		v1.AddArg2(v2, v3)
  7503  		v.AddArg2(v0, v1)
  7504  		return true
  7505  	}
  7506  }
  7507  func rewriteValueLOONG64_OpLess16(v *Value) bool {
  7508  	v_1 := v.Args[1]
  7509  	v_0 := v.Args[0]
  7510  	b := v.Block
  7511  	typ := &b.Func.Config.Types
  7512  	// match: (Less16 x y)
  7513  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  7514  	for {
  7515  		x := v_0
  7516  		y := v_1
  7517  		v.reset(OpLOONG64SGT)
  7518  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7519  		v0.AddArg(y)
  7520  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7521  		v1.AddArg(x)
  7522  		v.AddArg2(v0, v1)
  7523  		return true
  7524  	}
  7525  }
  7526  func rewriteValueLOONG64_OpLess16U(v *Value) bool {
  7527  	v_1 := v.Args[1]
  7528  	v_0 := v.Args[0]
  7529  	b := v.Block
  7530  	typ := &b.Func.Config.Types
  7531  	// match: (Less16U x y)
  7532  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  7533  	for {
  7534  		x := v_0
  7535  		y := v_1
  7536  		v.reset(OpLOONG64SGTU)
  7537  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7538  		v0.AddArg(y)
  7539  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7540  		v1.AddArg(x)
  7541  		v.AddArg2(v0, v1)
  7542  		return true
  7543  	}
  7544  }
  7545  func rewriteValueLOONG64_OpLess32(v *Value) bool {
  7546  	v_1 := v.Args[1]
  7547  	v_0 := v.Args[0]
  7548  	b := v.Block
  7549  	typ := &b.Func.Config.Types
  7550  	// match: (Less32 x y)
  7551  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  7552  	for {
  7553  		x := v_0
  7554  		y := v_1
  7555  		v.reset(OpLOONG64SGT)
  7556  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7557  		v0.AddArg(y)
  7558  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7559  		v1.AddArg(x)
  7560  		v.AddArg2(v0, v1)
  7561  		return true
  7562  	}
  7563  }
  7564  func rewriteValueLOONG64_OpLess32F(v *Value) bool {
  7565  	v_1 := v.Args[1]
  7566  	v_0 := v.Args[0]
  7567  	b := v.Block
  7568  	// match: (Less32F x y)
  7569  	// result: (FPFlagTrue (CMPGTF y x))
  7570  	for {
  7571  		x := v_0
  7572  		y := v_1
  7573  		v.reset(OpLOONG64FPFlagTrue)
  7574  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags)
  7575  		v0.AddArg2(y, x)
  7576  		v.AddArg(v0)
  7577  		return true
  7578  	}
  7579  }
  7580  func rewriteValueLOONG64_OpLess32U(v *Value) bool {
  7581  	v_1 := v.Args[1]
  7582  	v_0 := v.Args[0]
  7583  	b := v.Block
  7584  	typ := &b.Func.Config.Types
  7585  	// match: (Less32U x y)
  7586  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  7587  	for {
  7588  		x := v_0
  7589  		y := v_1
  7590  		v.reset(OpLOONG64SGTU)
  7591  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7592  		v0.AddArg(y)
  7593  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7594  		v1.AddArg(x)
  7595  		v.AddArg2(v0, v1)
  7596  		return true
  7597  	}
  7598  }
  7599  func rewriteValueLOONG64_OpLess64(v *Value) bool {
  7600  	v_1 := v.Args[1]
  7601  	v_0 := v.Args[0]
  7602  	// match: (Less64 x y)
  7603  	// result: (SGT y x)
  7604  	for {
  7605  		x := v_0
  7606  		y := v_1
  7607  		v.reset(OpLOONG64SGT)
  7608  		v.AddArg2(y, x)
  7609  		return true
  7610  	}
  7611  }
  7612  func rewriteValueLOONG64_OpLess64F(v *Value) bool {
  7613  	v_1 := v.Args[1]
  7614  	v_0 := v.Args[0]
  7615  	b := v.Block
  7616  	// match: (Less64F x y)
  7617  	// result: (FPFlagTrue (CMPGTD y x))
  7618  	for {
  7619  		x := v_0
  7620  		y := v_1
  7621  		v.reset(OpLOONG64FPFlagTrue)
  7622  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags)
  7623  		v0.AddArg2(y, x)
  7624  		v.AddArg(v0)
  7625  		return true
  7626  	}
  7627  }
  7628  func rewriteValueLOONG64_OpLess64U(v *Value) bool {
  7629  	v_1 := v.Args[1]
  7630  	v_0 := v.Args[0]
  7631  	// match: (Less64U x y)
  7632  	// result: (SGTU y x)
  7633  	for {
  7634  		x := v_0
  7635  		y := v_1
  7636  		v.reset(OpLOONG64SGTU)
  7637  		v.AddArg2(y, x)
  7638  		return true
  7639  	}
  7640  }
  7641  func rewriteValueLOONG64_OpLess8(v *Value) bool {
  7642  	v_1 := v.Args[1]
  7643  	v_0 := v.Args[0]
  7644  	b := v.Block
  7645  	typ := &b.Func.Config.Types
  7646  	// match: (Less8 x y)
  7647  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  7648  	for {
  7649  		x := v_0
  7650  		y := v_1
  7651  		v.reset(OpLOONG64SGT)
  7652  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7653  		v0.AddArg(y)
  7654  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7655  		v1.AddArg(x)
  7656  		v.AddArg2(v0, v1)
  7657  		return true
  7658  	}
  7659  }
  7660  func rewriteValueLOONG64_OpLess8U(v *Value) bool {
  7661  	v_1 := v.Args[1]
  7662  	v_0 := v.Args[0]
  7663  	b := v.Block
  7664  	typ := &b.Func.Config.Types
  7665  	// match: (Less8U x y)
  7666  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  7667  	for {
  7668  		x := v_0
  7669  		y := v_1
  7670  		v.reset(OpLOONG64SGTU)
  7671  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7672  		v0.AddArg(y)
  7673  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7674  		v1.AddArg(x)
  7675  		v.AddArg2(v0, v1)
  7676  		return true
  7677  	}
  7678  }
  7679  func rewriteValueLOONG64_OpLoad(v *Value) bool {
  7680  	v_1 := v.Args[1]
  7681  	v_0 := v.Args[0]
  7682  	// match: (Load <t> ptr mem)
  7683  	// cond: t.IsBoolean()
  7684  	// result: (MOVBUload ptr mem)
  7685  	for {
  7686  		t := v.Type
  7687  		ptr := v_0
  7688  		mem := v_1
  7689  		if !(t.IsBoolean()) {
  7690  			break
  7691  		}
  7692  		v.reset(OpLOONG64MOVBUload)
  7693  		v.AddArg2(ptr, mem)
  7694  		return true
  7695  	}
  7696  	// match: (Load <t> ptr mem)
  7697  	// cond: (is8BitInt(t) && t.IsSigned())
  7698  	// result: (MOVBload ptr mem)
  7699  	for {
  7700  		t := v.Type
  7701  		ptr := v_0
  7702  		mem := v_1
  7703  		if !(is8BitInt(t) && t.IsSigned()) {
  7704  			break
  7705  		}
  7706  		v.reset(OpLOONG64MOVBload)
  7707  		v.AddArg2(ptr, mem)
  7708  		return true
  7709  	}
  7710  	// match: (Load <t> ptr mem)
  7711  	// cond: (is8BitInt(t) && !t.IsSigned())
  7712  	// result: (MOVBUload ptr mem)
  7713  	for {
  7714  		t := v.Type
  7715  		ptr := v_0
  7716  		mem := v_1
  7717  		if !(is8BitInt(t) && !t.IsSigned()) {
  7718  			break
  7719  		}
  7720  		v.reset(OpLOONG64MOVBUload)
  7721  		v.AddArg2(ptr, mem)
  7722  		return true
  7723  	}
  7724  	// match: (Load <t> ptr mem)
  7725  	// cond: (is16BitInt(t) && t.IsSigned())
  7726  	// result: (MOVHload ptr mem)
  7727  	for {
  7728  		t := v.Type
  7729  		ptr := v_0
  7730  		mem := v_1
  7731  		if !(is16BitInt(t) && t.IsSigned()) {
  7732  			break
  7733  		}
  7734  		v.reset(OpLOONG64MOVHload)
  7735  		v.AddArg2(ptr, mem)
  7736  		return true
  7737  	}
  7738  	// match: (Load <t> ptr mem)
  7739  	// cond: (is16BitInt(t) && !t.IsSigned())
  7740  	// result: (MOVHUload ptr mem)
  7741  	for {
  7742  		t := v.Type
  7743  		ptr := v_0
  7744  		mem := v_1
  7745  		if !(is16BitInt(t) && !t.IsSigned()) {
  7746  			break
  7747  		}
  7748  		v.reset(OpLOONG64MOVHUload)
  7749  		v.AddArg2(ptr, mem)
  7750  		return true
  7751  	}
  7752  	// match: (Load <t> ptr mem)
  7753  	// cond: (is32BitInt(t) && t.IsSigned())
  7754  	// result: (MOVWload ptr mem)
  7755  	for {
  7756  		t := v.Type
  7757  		ptr := v_0
  7758  		mem := v_1
  7759  		if !(is32BitInt(t) && t.IsSigned()) {
  7760  			break
  7761  		}
  7762  		v.reset(OpLOONG64MOVWload)
  7763  		v.AddArg2(ptr, mem)
  7764  		return true
  7765  	}
  7766  	// match: (Load <t> ptr mem)
  7767  	// cond: (is32BitInt(t) && !t.IsSigned())
  7768  	// result: (MOVWUload ptr mem)
  7769  	for {
  7770  		t := v.Type
  7771  		ptr := v_0
  7772  		mem := v_1
  7773  		if !(is32BitInt(t) && !t.IsSigned()) {
  7774  			break
  7775  		}
  7776  		v.reset(OpLOONG64MOVWUload)
  7777  		v.AddArg2(ptr, mem)
  7778  		return true
  7779  	}
  7780  	// match: (Load <t> ptr mem)
  7781  	// cond: (is64BitInt(t) || isPtr(t))
  7782  	// result: (MOVVload ptr mem)
  7783  	for {
  7784  		t := v.Type
  7785  		ptr := v_0
  7786  		mem := v_1
  7787  		if !(is64BitInt(t) || isPtr(t)) {
  7788  			break
  7789  		}
  7790  		v.reset(OpLOONG64MOVVload)
  7791  		v.AddArg2(ptr, mem)
  7792  		return true
  7793  	}
  7794  	// match: (Load <t> ptr mem)
  7795  	// cond: is32BitFloat(t)
  7796  	// result: (MOVFload ptr mem)
  7797  	for {
  7798  		t := v.Type
  7799  		ptr := v_0
  7800  		mem := v_1
  7801  		if !(is32BitFloat(t)) {
  7802  			break
  7803  		}
  7804  		v.reset(OpLOONG64MOVFload)
  7805  		v.AddArg2(ptr, mem)
  7806  		return true
  7807  	}
  7808  	// match: (Load <t> ptr mem)
  7809  	// cond: is64BitFloat(t)
  7810  	// result: (MOVDload ptr mem)
  7811  	for {
  7812  		t := v.Type
  7813  		ptr := v_0
  7814  		mem := v_1
  7815  		if !(is64BitFloat(t)) {
  7816  			break
  7817  		}
  7818  		v.reset(OpLOONG64MOVDload)
  7819  		v.AddArg2(ptr, mem)
  7820  		return true
  7821  	}
  7822  	return false
  7823  }
  7824  func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
  7825  	v_1 := v.Args[1]
  7826  	v_0 := v.Args[0]
  7827  	b := v.Block
  7828  	typ := &b.Func.Config.Types
  7829  	// match: (LocalAddr <t> {sym} base mem)
  7830  	// cond: t.Elem().HasPointers()
  7831  	// result: (MOVVaddr {sym} (SPanchored base mem))
  7832  	for {
  7833  		t := v.Type
  7834  		sym := auxToSym(v.Aux)
  7835  		base := v_0
  7836  		mem := v_1
  7837  		if !(t.Elem().HasPointers()) {
  7838  			break
  7839  		}
  7840  		v.reset(OpLOONG64MOVVaddr)
  7841  		v.Aux = symToAux(sym)
  7842  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  7843  		v0.AddArg2(base, mem)
  7844  		v.AddArg(v0)
  7845  		return true
  7846  	}
  7847  	// match: (LocalAddr <t> {sym} base _)
  7848  	// cond: !t.Elem().HasPointers()
  7849  	// result: (MOVVaddr {sym} base)
  7850  	for {
  7851  		t := v.Type
  7852  		sym := auxToSym(v.Aux)
  7853  		base := v_0
  7854  		if !(!t.Elem().HasPointers()) {
  7855  			break
  7856  		}
  7857  		v.reset(OpLOONG64MOVVaddr)
  7858  		v.Aux = symToAux(sym)
  7859  		v.AddArg(base)
  7860  		return true
  7861  	}
  7862  	return false
  7863  }
  7864  func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
  7865  	v_1 := v.Args[1]
  7866  	v_0 := v.Args[0]
  7867  	b := v.Block
  7868  	typ := &b.Func.Config.Types
  7869  	// match: (Lsh16x16 x y)
  7870  	// cond: shiftIsBounded(v)
  7871  	// result: (SLLV x y)
  7872  	for {
  7873  		x := v_0
  7874  		y := v_1
  7875  		if !(shiftIsBounded(v)) {
  7876  			break
  7877  		}
  7878  		v.reset(OpLOONG64SLLV)
  7879  		v.AddArg2(x, y)
  7880  		return true
  7881  	}
  7882  	// match: (Lsh16x16 <t> x y)
  7883  	// cond: !shiftIsBounded(v)
  7884  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  7885  	for {
  7886  		t := v.Type
  7887  		x := v_0
  7888  		y := v_1
  7889  		if !(!shiftIsBounded(v)) {
  7890  			break
  7891  		}
  7892  		v.reset(OpLOONG64MASKEQZ)
  7893  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7894  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7895  		v1.AddArg(y)
  7896  		v0.AddArg2(x, v1)
  7897  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7898  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7899  		v3.AuxInt = int64ToAuxInt(64)
  7900  		v2.AddArg2(v3, v1)
  7901  		v.AddArg2(v0, v2)
  7902  		return true
  7903  	}
  7904  	return false
  7905  }
  7906  func rewriteValueLOONG64_OpLsh16x32(v *Value) bool {
  7907  	v_1 := v.Args[1]
  7908  	v_0 := v.Args[0]
  7909  	b := v.Block
  7910  	typ := &b.Func.Config.Types
  7911  	// match: (Lsh16x32 x y)
  7912  	// cond: shiftIsBounded(v)
  7913  	// result: (SLLV x y)
  7914  	for {
  7915  		x := v_0
  7916  		y := v_1
  7917  		if !(shiftIsBounded(v)) {
  7918  			break
  7919  		}
  7920  		v.reset(OpLOONG64SLLV)
  7921  		v.AddArg2(x, y)
  7922  		return true
  7923  	}
  7924  	// match: (Lsh16x32 <t> x y)
  7925  	// cond: !shiftIsBounded(v)
  7926  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  7927  	for {
  7928  		t := v.Type
  7929  		x := v_0
  7930  		y := v_1
  7931  		if !(!shiftIsBounded(v)) {
  7932  			break
  7933  		}
  7934  		v.reset(OpLOONG64MASKEQZ)
  7935  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7936  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7937  		v1.AddArg(y)
  7938  		v0.AddArg2(x, v1)
  7939  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7940  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7941  		v3.AuxInt = int64ToAuxInt(64)
  7942  		v2.AddArg2(v3, v1)
  7943  		v.AddArg2(v0, v2)
  7944  		return true
  7945  	}
  7946  	return false
  7947  }
  7948  func rewriteValueLOONG64_OpLsh16x64(v *Value) bool {
  7949  	v_1 := v.Args[1]
  7950  	v_0 := v.Args[0]
  7951  	b := v.Block
  7952  	typ := &b.Func.Config.Types
  7953  	// match: (Lsh16x64 x y)
  7954  	// cond: shiftIsBounded(v)
  7955  	// result: (SLLV x y)
  7956  	for {
  7957  		x := v_0
  7958  		y := v_1
  7959  		if !(shiftIsBounded(v)) {
  7960  			break
  7961  		}
  7962  		v.reset(OpLOONG64SLLV)
  7963  		v.AddArg2(x, y)
  7964  		return true
  7965  	}
  7966  	// match: (Lsh16x64 <t> x y)
  7967  	// cond: !shiftIsBounded(v)
  7968  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  7969  	for {
  7970  		t := v.Type
  7971  		x := v_0
  7972  		y := v_1
  7973  		if !(!shiftIsBounded(v)) {
  7974  			break
  7975  		}
  7976  		v.reset(OpLOONG64MASKEQZ)
  7977  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7978  		v0.AddArg2(x, y)
  7979  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7980  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7981  		v2.AuxInt = int64ToAuxInt(64)
  7982  		v1.AddArg2(v2, y)
  7983  		v.AddArg2(v0, v1)
  7984  		return true
  7985  	}
  7986  	return false
  7987  }
  7988  func rewriteValueLOONG64_OpLsh16x8(v *Value) bool {
  7989  	v_1 := v.Args[1]
  7990  	v_0 := v.Args[0]
  7991  	b := v.Block
  7992  	typ := &b.Func.Config.Types
  7993  	// match: (Lsh16x8 x y)
  7994  	// cond: shiftIsBounded(v)
  7995  	// result: (SLLV x y)
  7996  	for {
  7997  		x := v_0
  7998  		y := v_1
  7999  		if !(shiftIsBounded(v)) {
  8000  			break
  8001  		}
  8002  		v.reset(OpLOONG64SLLV)
  8003  		v.AddArg2(x, y)
  8004  		return true
  8005  	}
  8006  	// match: (Lsh16x8 <t> x y)
  8007  	// cond: !shiftIsBounded(v)
  8008  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8009  	for {
  8010  		t := v.Type
  8011  		x := v_0
  8012  		y := v_1
  8013  		if !(!shiftIsBounded(v)) {
  8014  			break
  8015  		}
  8016  		v.reset(OpLOONG64MASKEQZ)
  8017  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8018  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8019  		v1.AddArg(y)
  8020  		v0.AddArg2(x, v1)
  8021  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8022  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8023  		v3.AuxInt = int64ToAuxInt(64)
  8024  		v2.AddArg2(v3, v1)
  8025  		v.AddArg2(v0, v2)
  8026  		return true
  8027  	}
  8028  	return false
  8029  }
  8030  func rewriteValueLOONG64_OpLsh32x16(v *Value) bool {
  8031  	v_1 := v.Args[1]
  8032  	v_0 := v.Args[0]
  8033  	b := v.Block
  8034  	typ := &b.Func.Config.Types
  8035  	// match: (Lsh32x16 x y)
  8036  	// cond: shiftIsBounded(v)
  8037  	// result: (SLL x y)
  8038  	for {
  8039  		x := v_0
  8040  		y := v_1
  8041  		if !(shiftIsBounded(v)) {
  8042  			break
  8043  		}
  8044  		v.reset(OpLOONG64SLL)
  8045  		v.AddArg2(x, y)
  8046  		return true
  8047  	}
  8048  	// match: (Lsh32x16 <t> x y)
  8049  	// cond: !shiftIsBounded(v)
  8050  	// result: (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  8051  	for {
  8052  		t := v.Type
  8053  		x := v_0
  8054  		y := v_1
  8055  		if !(!shiftIsBounded(v)) {
  8056  			break
  8057  		}
  8058  		v.reset(OpLOONG64MASKEQZ)
  8059  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8060  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8061  		v1.AddArg(y)
  8062  		v0.AddArg2(x, v1)
  8063  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8064  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8065  		v3.AuxInt = int64ToAuxInt(32)
  8066  		v2.AddArg2(v3, v1)
  8067  		v.AddArg2(v0, v2)
  8068  		return true
  8069  	}
  8070  	return false
  8071  }
  8072  func rewriteValueLOONG64_OpLsh32x32(v *Value) bool {
  8073  	v_1 := v.Args[1]
  8074  	v_0 := v.Args[0]
  8075  	b := v.Block
  8076  	typ := &b.Func.Config.Types
  8077  	// match: (Lsh32x32 x y)
  8078  	// cond: shiftIsBounded(v)
  8079  	// result: (SLL x y)
  8080  	for {
  8081  		x := v_0
  8082  		y := v_1
  8083  		if !(shiftIsBounded(v)) {
  8084  			break
  8085  		}
  8086  		v.reset(OpLOONG64SLL)
  8087  		v.AddArg2(x, y)
  8088  		return true
  8089  	}
  8090  	// match: (Lsh32x32 <t> x y)
  8091  	// cond: !shiftIsBounded(v)
  8092  	// result: (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  8093  	for {
  8094  		t := v.Type
  8095  		x := v_0
  8096  		y := v_1
  8097  		if !(!shiftIsBounded(v)) {
  8098  			break
  8099  		}
  8100  		v.reset(OpLOONG64MASKEQZ)
  8101  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8102  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8103  		v1.AddArg(y)
  8104  		v0.AddArg2(x, v1)
  8105  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8106  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8107  		v3.AuxInt = int64ToAuxInt(32)
  8108  		v2.AddArg2(v3, v1)
  8109  		v.AddArg2(v0, v2)
  8110  		return true
  8111  	}
  8112  	return false
  8113  }
  8114  func rewriteValueLOONG64_OpLsh32x64(v *Value) bool {
  8115  	v_1 := v.Args[1]
  8116  	v_0 := v.Args[0]
  8117  	b := v.Block
  8118  	typ := &b.Func.Config.Types
  8119  	// match: (Lsh32x64 x y)
  8120  	// cond: shiftIsBounded(v)
  8121  	// result: (SLL x y)
  8122  	for {
  8123  		x := v_0
  8124  		y := v_1
  8125  		if !(shiftIsBounded(v)) {
  8126  			break
  8127  		}
  8128  		v.reset(OpLOONG64SLL)
  8129  		v.AddArg2(x, y)
  8130  		return true
  8131  	}
  8132  	// match: (Lsh32x64 <t> x y)
  8133  	// cond: !shiftIsBounded(v)
  8134  	// result: (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  8135  	for {
  8136  		t := v.Type
  8137  		x := v_0
  8138  		y := v_1
  8139  		if !(!shiftIsBounded(v)) {
  8140  			break
  8141  		}
  8142  		v.reset(OpLOONG64MASKEQZ)
  8143  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8144  		v0.AddArg2(x, y)
  8145  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8146  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8147  		v2.AuxInt = int64ToAuxInt(32)
  8148  		v1.AddArg2(v2, y)
  8149  		v.AddArg2(v0, v1)
  8150  		return true
  8151  	}
  8152  	return false
  8153  }
  8154  func rewriteValueLOONG64_OpLsh32x8(v *Value) bool {
  8155  	v_1 := v.Args[1]
  8156  	v_0 := v.Args[0]
  8157  	b := v.Block
  8158  	typ := &b.Func.Config.Types
  8159  	// match: (Lsh32x8 x y)
  8160  	// cond: shiftIsBounded(v)
  8161  	// result: (SLL x y)
  8162  	for {
  8163  		x := v_0
  8164  		y := v_1
  8165  		if !(shiftIsBounded(v)) {
  8166  			break
  8167  		}
  8168  		v.reset(OpLOONG64SLL)
  8169  		v.AddArg2(x, y)
  8170  		return true
  8171  	}
  8172  	// match: (Lsh32x8 <t> x y)
  8173  	// cond: !shiftIsBounded(v)
  8174  	// result: (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  8175  	for {
  8176  		t := v.Type
  8177  		x := v_0
  8178  		y := v_1
  8179  		if !(!shiftIsBounded(v)) {
  8180  			break
  8181  		}
  8182  		v.reset(OpLOONG64MASKEQZ)
  8183  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8184  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8185  		v1.AddArg(y)
  8186  		v0.AddArg2(x, v1)
  8187  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8188  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8189  		v3.AuxInt = int64ToAuxInt(32)
  8190  		v2.AddArg2(v3, v1)
  8191  		v.AddArg2(v0, v2)
  8192  		return true
  8193  	}
  8194  	return false
  8195  }
  8196  func rewriteValueLOONG64_OpLsh64x16(v *Value) bool {
  8197  	v_1 := v.Args[1]
  8198  	v_0 := v.Args[0]
  8199  	b := v.Block
  8200  	typ := &b.Func.Config.Types
  8201  	// match: (Lsh64x16 x y)
  8202  	// cond: shiftIsBounded(v)
  8203  	// result: (SLLV x y)
  8204  	for {
  8205  		x := v_0
  8206  		y := v_1
  8207  		if !(shiftIsBounded(v)) {
  8208  			break
  8209  		}
  8210  		v.reset(OpLOONG64SLLV)
  8211  		v.AddArg2(x, y)
  8212  		return true
  8213  	}
  8214  	// match: (Lsh64x16 <t> x y)
  8215  	// cond: !shiftIsBounded(v)
  8216  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8217  	for {
  8218  		t := v.Type
  8219  		x := v_0
  8220  		y := v_1
  8221  		if !(!shiftIsBounded(v)) {
  8222  			break
  8223  		}
  8224  		v.reset(OpLOONG64MASKEQZ)
  8225  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8226  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8227  		v1.AddArg(y)
  8228  		v0.AddArg2(x, v1)
  8229  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8230  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8231  		v3.AuxInt = int64ToAuxInt(64)
  8232  		v2.AddArg2(v3, v1)
  8233  		v.AddArg2(v0, v2)
  8234  		return true
  8235  	}
  8236  	return false
  8237  }
  8238  func rewriteValueLOONG64_OpLsh64x32(v *Value) bool {
  8239  	v_1 := v.Args[1]
  8240  	v_0 := v.Args[0]
  8241  	b := v.Block
  8242  	typ := &b.Func.Config.Types
  8243  	// match: (Lsh64x32 x y)
  8244  	// cond: shiftIsBounded(v)
  8245  	// result: (SLLV x y)
  8246  	for {
  8247  		x := v_0
  8248  		y := v_1
  8249  		if !(shiftIsBounded(v)) {
  8250  			break
  8251  		}
  8252  		v.reset(OpLOONG64SLLV)
  8253  		v.AddArg2(x, y)
  8254  		return true
  8255  	}
  8256  	// match: (Lsh64x32 <t> x y)
  8257  	// cond: !shiftIsBounded(v)
  8258  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8259  	for {
  8260  		t := v.Type
  8261  		x := v_0
  8262  		y := v_1
  8263  		if !(!shiftIsBounded(v)) {
  8264  			break
  8265  		}
  8266  		v.reset(OpLOONG64MASKEQZ)
  8267  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8268  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8269  		v1.AddArg(y)
  8270  		v0.AddArg2(x, v1)
  8271  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8272  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8273  		v3.AuxInt = int64ToAuxInt(64)
  8274  		v2.AddArg2(v3, v1)
  8275  		v.AddArg2(v0, v2)
  8276  		return true
  8277  	}
  8278  	return false
  8279  }
  8280  func rewriteValueLOONG64_OpLsh64x64(v *Value) bool {
  8281  	v_1 := v.Args[1]
  8282  	v_0 := v.Args[0]
  8283  	b := v.Block
  8284  	typ := &b.Func.Config.Types
  8285  	// match: (Lsh64x64 x y)
  8286  	// cond: shiftIsBounded(v)
  8287  	// result: (SLLV x y)
  8288  	for {
  8289  		x := v_0
  8290  		y := v_1
  8291  		if !(shiftIsBounded(v)) {
  8292  			break
  8293  		}
  8294  		v.reset(OpLOONG64SLLV)
  8295  		v.AddArg2(x, y)
  8296  		return true
  8297  	}
  8298  	// match: (Lsh64x64 <t> x y)
  8299  	// cond: !shiftIsBounded(v)
  8300  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8301  	for {
  8302  		t := v.Type
  8303  		x := v_0
  8304  		y := v_1
  8305  		if !(!shiftIsBounded(v)) {
  8306  			break
  8307  		}
  8308  		v.reset(OpLOONG64MASKEQZ)
  8309  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8310  		v0.AddArg2(x, y)
  8311  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8312  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8313  		v2.AuxInt = int64ToAuxInt(64)
  8314  		v1.AddArg2(v2, y)
  8315  		v.AddArg2(v0, v1)
  8316  		return true
  8317  	}
  8318  	return false
  8319  }
  8320  func rewriteValueLOONG64_OpLsh64x8(v *Value) bool {
  8321  	v_1 := v.Args[1]
  8322  	v_0 := v.Args[0]
  8323  	b := v.Block
  8324  	typ := &b.Func.Config.Types
  8325  	// match: (Lsh64x8 x y)
  8326  	// cond: shiftIsBounded(v)
  8327  	// result: (SLLV x y)
  8328  	for {
  8329  		x := v_0
  8330  		y := v_1
  8331  		if !(shiftIsBounded(v)) {
  8332  			break
  8333  		}
  8334  		v.reset(OpLOONG64SLLV)
  8335  		v.AddArg2(x, y)
  8336  		return true
  8337  	}
  8338  	// match: (Lsh64x8 <t> x y)
  8339  	// cond: !shiftIsBounded(v)
  8340  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8341  	for {
  8342  		t := v.Type
  8343  		x := v_0
  8344  		y := v_1
  8345  		if !(!shiftIsBounded(v)) {
  8346  			break
  8347  		}
  8348  		v.reset(OpLOONG64MASKEQZ)
  8349  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8350  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8351  		v1.AddArg(y)
  8352  		v0.AddArg2(x, v1)
  8353  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8354  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8355  		v3.AuxInt = int64ToAuxInt(64)
  8356  		v2.AddArg2(v3, v1)
  8357  		v.AddArg2(v0, v2)
  8358  		return true
  8359  	}
  8360  	return false
  8361  }
  8362  func rewriteValueLOONG64_OpLsh8x16(v *Value) bool {
  8363  	v_1 := v.Args[1]
  8364  	v_0 := v.Args[0]
  8365  	b := v.Block
  8366  	typ := &b.Func.Config.Types
  8367  	// match: (Lsh8x16 x y)
  8368  	// cond: shiftIsBounded(v)
  8369  	// result: (SLLV x y)
  8370  	for {
  8371  		x := v_0
  8372  		y := v_1
  8373  		if !(shiftIsBounded(v)) {
  8374  			break
  8375  		}
  8376  		v.reset(OpLOONG64SLLV)
  8377  		v.AddArg2(x, y)
  8378  		return true
  8379  	}
  8380  	// match: (Lsh8x16 <t> x y)
  8381  	// cond: !shiftIsBounded(v)
  8382  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8383  	for {
  8384  		t := v.Type
  8385  		x := v_0
  8386  		y := v_1
  8387  		if !(!shiftIsBounded(v)) {
  8388  			break
  8389  		}
  8390  		v.reset(OpLOONG64MASKEQZ)
  8391  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8392  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8393  		v1.AddArg(y)
  8394  		v0.AddArg2(x, v1)
  8395  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8396  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8397  		v3.AuxInt = int64ToAuxInt(64)
  8398  		v2.AddArg2(v3, v1)
  8399  		v.AddArg2(v0, v2)
  8400  		return true
  8401  	}
  8402  	return false
  8403  }
  8404  func rewriteValueLOONG64_OpLsh8x32(v *Value) bool {
  8405  	v_1 := v.Args[1]
  8406  	v_0 := v.Args[0]
  8407  	b := v.Block
  8408  	typ := &b.Func.Config.Types
  8409  	// match: (Lsh8x32 x y)
  8410  	// cond: shiftIsBounded(v)
  8411  	// result: (SLLV x y)
  8412  	for {
  8413  		x := v_0
  8414  		y := v_1
  8415  		if !(shiftIsBounded(v)) {
  8416  			break
  8417  		}
  8418  		v.reset(OpLOONG64SLLV)
  8419  		v.AddArg2(x, y)
  8420  		return true
  8421  	}
  8422  	// match: (Lsh8x32 <t> x y)
  8423  	// cond: !shiftIsBounded(v)
  8424  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8425  	for {
  8426  		t := v.Type
  8427  		x := v_0
  8428  		y := v_1
  8429  		if !(!shiftIsBounded(v)) {
  8430  			break
  8431  		}
  8432  		v.reset(OpLOONG64MASKEQZ)
  8433  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8434  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8435  		v1.AddArg(y)
  8436  		v0.AddArg2(x, v1)
  8437  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8438  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8439  		v3.AuxInt = int64ToAuxInt(64)
  8440  		v2.AddArg2(v3, v1)
  8441  		v.AddArg2(v0, v2)
  8442  		return true
  8443  	}
  8444  	return false
  8445  }
  8446  func rewriteValueLOONG64_OpLsh8x64(v *Value) bool {
  8447  	v_1 := v.Args[1]
  8448  	v_0 := v.Args[0]
  8449  	b := v.Block
  8450  	typ := &b.Func.Config.Types
  8451  	// match: (Lsh8x64 x y)
  8452  	// cond: shiftIsBounded(v)
  8453  	// result: (SLLV x y)
  8454  	for {
  8455  		x := v_0
  8456  		y := v_1
  8457  		if !(shiftIsBounded(v)) {
  8458  			break
  8459  		}
  8460  		v.reset(OpLOONG64SLLV)
  8461  		v.AddArg2(x, y)
  8462  		return true
  8463  	}
  8464  	// match: (Lsh8x64 <t> x y)
  8465  	// cond: !shiftIsBounded(v)
  8466  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8467  	for {
  8468  		t := v.Type
  8469  		x := v_0
  8470  		y := v_1
  8471  		if !(!shiftIsBounded(v)) {
  8472  			break
  8473  		}
  8474  		v.reset(OpLOONG64MASKEQZ)
  8475  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8476  		v0.AddArg2(x, y)
  8477  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8478  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8479  		v2.AuxInt = int64ToAuxInt(64)
  8480  		v1.AddArg2(v2, y)
  8481  		v.AddArg2(v0, v1)
  8482  		return true
  8483  	}
  8484  	return false
  8485  }
  8486  func rewriteValueLOONG64_OpLsh8x8(v *Value) bool {
  8487  	v_1 := v.Args[1]
  8488  	v_0 := v.Args[0]
  8489  	b := v.Block
  8490  	typ := &b.Func.Config.Types
  8491  	// match: (Lsh8x8 x y)
  8492  	// cond: shiftIsBounded(v)
  8493  	// result: (SLLV x y)
  8494  	for {
  8495  		x := v_0
  8496  		y := v_1
  8497  		if !(shiftIsBounded(v)) {
  8498  			break
  8499  		}
  8500  		v.reset(OpLOONG64SLLV)
  8501  		v.AddArg2(x, y)
  8502  		return true
  8503  	}
  8504  	// match: (Lsh8x8 <t> x y)
  8505  	// cond: !shiftIsBounded(v)
  8506  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8507  	for {
  8508  		t := v.Type
  8509  		x := v_0
  8510  		y := v_1
  8511  		if !(!shiftIsBounded(v)) {
  8512  			break
  8513  		}
  8514  		v.reset(OpLOONG64MASKEQZ)
  8515  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8516  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8517  		v1.AddArg(y)
  8518  		v0.AddArg2(x, v1)
  8519  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8520  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8521  		v3.AuxInt = int64ToAuxInt(64)
  8522  		v2.AddArg2(v3, v1)
  8523  		v.AddArg2(v0, v2)
  8524  		return true
  8525  	}
  8526  	return false
  8527  }
  8528  func rewriteValueLOONG64_OpMod16(v *Value) bool {
  8529  	v_1 := v.Args[1]
  8530  	v_0 := v.Args[0]
  8531  	b := v.Block
  8532  	typ := &b.Func.Config.Types
  8533  	// match: (Mod16 x y)
  8534  	// result: (REMV (SignExt16to64 x) (SignExt16to64 y))
  8535  	for {
  8536  		x := v_0
  8537  		y := v_1
  8538  		v.reset(OpLOONG64REMV)
  8539  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8540  		v0.AddArg(x)
  8541  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8542  		v1.AddArg(y)
  8543  		v.AddArg2(v0, v1)
  8544  		return true
  8545  	}
  8546  }
  8547  func rewriteValueLOONG64_OpMod16u(v *Value) bool {
  8548  	v_1 := v.Args[1]
  8549  	v_0 := v.Args[0]
  8550  	b := v.Block
  8551  	typ := &b.Func.Config.Types
  8552  	// match: (Mod16u x y)
  8553  	// result: (REMVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  8554  	for {
  8555  		x := v_0
  8556  		y := v_1
  8557  		v.reset(OpLOONG64REMVU)
  8558  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8559  		v0.AddArg(x)
  8560  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8561  		v1.AddArg(y)
  8562  		v.AddArg2(v0, v1)
  8563  		return true
  8564  	}
  8565  }
  8566  func rewriteValueLOONG64_OpMod32(v *Value) bool {
  8567  	v_1 := v.Args[1]
  8568  	v_0 := v.Args[0]
  8569  	b := v.Block
  8570  	typ := &b.Func.Config.Types
  8571  	// match: (Mod32 x y)
  8572  	// result: (REMV (SignExt32to64 x) (SignExt32to64 y))
  8573  	for {
  8574  		x := v_0
  8575  		y := v_1
  8576  		v.reset(OpLOONG64REMV)
  8577  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8578  		v0.AddArg(x)
  8579  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8580  		v1.AddArg(y)
  8581  		v.AddArg2(v0, v1)
  8582  		return true
  8583  	}
  8584  }
  8585  func rewriteValueLOONG64_OpMod32u(v *Value) bool {
  8586  	v_1 := v.Args[1]
  8587  	v_0 := v.Args[0]
  8588  	b := v.Block
  8589  	typ := &b.Func.Config.Types
  8590  	// match: (Mod32u x y)
  8591  	// result: (REMVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  8592  	for {
  8593  		x := v_0
  8594  		y := v_1
  8595  		v.reset(OpLOONG64REMVU)
  8596  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8597  		v0.AddArg(x)
  8598  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8599  		v1.AddArg(y)
  8600  		v.AddArg2(v0, v1)
  8601  		return true
  8602  	}
  8603  }
  8604  func rewriteValueLOONG64_OpMod64(v *Value) bool {
  8605  	v_1 := v.Args[1]
  8606  	v_0 := v.Args[0]
  8607  	// match: (Mod64 x y)
  8608  	// result: (REMV x y)
  8609  	for {
  8610  		x := v_0
  8611  		y := v_1
  8612  		v.reset(OpLOONG64REMV)
  8613  		v.AddArg2(x, y)
  8614  		return true
  8615  	}
  8616  }
  8617  func rewriteValueLOONG64_OpMod8(v *Value) bool {
  8618  	v_1 := v.Args[1]
  8619  	v_0 := v.Args[0]
  8620  	b := v.Block
  8621  	typ := &b.Func.Config.Types
  8622  	// match: (Mod8 x y)
  8623  	// result: (REMV (SignExt8to64 x) (SignExt8to64 y))
  8624  	for {
  8625  		x := v_0
  8626  		y := v_1
  8627  		v.reset(OpLOONG64REMV)
  8628  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8629  		v0.AddArg(x)
  8630  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8631  		v1.AddArg(y)
  8632  		v.AddArg2(v0, v1)
  8633  		return true
  8634  	}
  8635  }
  8636  func rewriteValueLOONG64_OpMod8u(v *Value) bool {
  8637  	v_1 := v.Args[1]
  8638  	v_0 := v.Args[0]
  8639  	b := v.Block
  8640  	typ := &b.Func.Config.Types
  8641  	// match: (Mod8u x y)
  8642  	// result: (REMVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  8643  	for {
  8644  		x := v_0
  8645  		y := v_1
  8646  		v.reset(OpLOONG64REMVU)
  8647  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8648  		v0.AddArg(x)
  8649  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8650  		v1.AddArg(y)
  8651  		v.AddArg2(v0, v1)
  8652  		return true
  8653  	}
  8654  }
  8655  func rewriteValueLOONG64_OpMove(v *Value) bool {
  8656  	v_2 := v.Args[2]
  8657  	v_1 := v.Args[1]
  8658  	v_0 := v.Args[0]
  8659  	b := v.Block
  8660  	typ := &b.Func.Config.Types
  8661  	// match: (Move [0] _ _ mem)
  8662  	// result: mem
  8663  	for {
  8664  		if auxIntToInt64(v.AuxInt) != 0 {
  8665  			break
  8666  		}
  8667  		mem := v_2
  8668  		v.copyOf(mem)
  8669  		return true
  8670  	}
  8671  	// match: (Move [1] dst src mem)
  8672  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  8673  	for {
  8674  		if auxIntToInt64(v.AuxInt) != 1 {
  8675  			break
  8676  		}
  8677  		dst := v_0
  8678  		src := v_1
  8679  		mem := v_2
  8680  		v.reset(OpLOONG64MOVBstore)
  8681  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8682  		v0.AddArg2(src, mem)
  8683  		v.AddArg3(dst, v0, mem)
  8684  		return true
  8685  	}
  8686  	// match: (Move [2] dst src mem)
  8687  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  8688  	for {
  8689  		if auxIntToInt64(v.AuxInt) != 2 {
  8690  			break
  8691  		}
  8692  		dst := v_0
  8693  		src := v_1
  8694  		mem := v_2
  8695  		v.reset(OpLOONG64MOVHstore)
  8696  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8697  		v0.AddArg2(src, mem)
  8698  		v.AddArg3(dst, v0, mem)
  8699  		return true
  8700  	}
  8701  	// match: (Move [3] dst src mem)
  8702  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  8703  	for {
  8704  		if auxIntToInt64(v.AuxInt) != 3 {
  8705  			break
  8706  		}
  8707  		dst := v_0
  8708  		src := v_1
  8709  		mem := v_2
  8710  		v.reset(OpLOONG64MOVBstore)
  8711  		v.AuxInt = int32ToAuxInt(2)
  8712  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8713  		v0.AuxInt = int32ToAuxInt(2)
  8714  		v0.AddArg2(src, mem)
  8715  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  8716  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8717  		v2.AddArg2(src, mem)
  8718  		v1.AddArg3(dst, v2, mem)
  8719  		v.AddArg3(dst, v0, v1)
  8720  		return true
  8721  	}
  8722  	// match: (Move [4] dst src mem)
  8723  	// result: (MOVWstore dst (MOVWUload src mem) mem)
  8724  	for {
  8725  		if auxIntToInt64(v.AuxInt) != 4 {
  8726  			break
  8727  		}
  8728  		dst := v_0
  8729  		src := v_1
  8730  		mem := v_2
  8731  		v.reset(OpLOONG64MOVWstore)
  8732  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8733  		v0.AddArg2(src, mem)
  8734  		v.AddArg3(dst, v0, mem)
  8735  		return true
  8736  	}
  8737  	// match: (Move [5] dst src mem)
  8738  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8739  	for {
  8740  		if auxIntToInt64(v.AuxInt) != 5 {
  8741  			break
  8742  		}
  8743  		dst := v_0
  8744  		src := v_1
  8745  		mem := v_2
  8746  		v.reset(OpLOONG64MOVBstore)
  8747  		v.AuxInt = int32ToAuxInt(4)
  8748  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8749  		v0.AuxInt = int32ToAuxInt(4)
  8750  		v0.AddArg2(src, mem)
  8751  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8752  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8753  		v2.AddArg2(src, mem)
  8754  		v1.AddArg3(dst, v2, mem)
  8755  		v.AddArg3(dst, v0, v1)
  8756  		return true
  8757  	}
  8758  	// match: (Move [6] dst src mem)
  8759  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8760  	for {
  8761  		if auxIntToInt64(v.AuxInt) != 6 {
  8762  			break
  8763  		}
  8764  		dst := v_0
  8765  		src := v_1
  8766  		mem := v_2
  8767  		v.reset(OpLOONG64MOVHstore)
  8768  		v.AuxInt = int32ToAuxInt(4)
  8769  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8770  		v0.AuxInt = int32ToAuxInt(4)
  8771  		v0.AddArg2(src, mem)
  8772  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8773  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8774  		v2.AddArg2(src, mem)
  8775  		v1.AddArg3(dst, v2, mem)
  8776  		v.AddArg3(dst, v0, v1)
  8777  		return true
  8778  	}
  8779  	// match: (Move [7] dst src mem)
  8780  	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8781  	for {
  8782  		if auxIntToInt64(v.AuxInt) != 7 {
  8783  			break
  8784  		}
  8785  		dst := v_0
  8786  		src := v_1
  8787  		mem := v_2
  8788  		v.reset(OpLOONG64MOVWstore)
  8789  		v.AuxInt = int32ToAuxInt(3)
  8790  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8791  		v0.AuxInt = int32ToAuxInt(3)
  8792  		v0.AddArg2(src, mem)
  8793  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8794  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8795  		v2.AddArg2(src, mem)
  8796  		v1.AddArg3(dst, v2, mem)
  8797  		v.AddArg3(dst, v0, v1)
  8798  		return true
  8799  	}
  8800  	// match: (Move [8] dst src mem)
  8801  	// result: (MOVVstore dst (MOVVload src mem) mem)
  8802  	for {
  8803  		if auxIntToInt64(v.AuxInt) != 8 {
  8804  			break
  8805  		}
  8806  		dst := v_0
  8807  		src := v_1
  8808  		mem := v_2
  8809  		v.reset(OpLOONG64MOVVstore)
  8810  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8811  		v0.AddArg2(src, mem)
  8812  		v.AddArg3(dst, v0, mem)
  8813  		return true
  8814  	}
  8815  	// match: (Move [9] dst src mem)
  8816  	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8817  	for {
  8818  		if auxIntToInt64(v.AuxInt) != 9 {
  8819  			break
  8820  		}
  8821  		dst := v_0
  8822  		src := v_1
  8823  		mem := v_2
  8824  		v.reset(OpLOONG64MOVBstore)
  8825  		v.AuxInt = int32ToAuxInt(8)
  8826  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8827  		v0.AuxInt = int32ToAuxInt(8)
  8828  		v0.AddArg2(src, mem)
  8829  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8830  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8831  		v2.AddArg2(src, mem)
  8832  		v1.AddArg3(dst, v2, mem)
  8833  		v.AddArg3(dst, v0, v1)
  8834  		return true
  8835  	}
  8836  	// match: (Move [10] dst src mem)
  8837  	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8838  	for {
  8839  		if auxIntToInt64(v.AuxInt) != 10 {
  8840  			break
  8841  		}
  8842  		dst := v_0
  8843  		src := v_1
  8844  		mem := v_2
  8845  		v.reset(OpLOONG64MOVHstore)
  8846  		v.AuxInt = int32ToAuxInt(8)
  8847  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8848  		v0.AuxInt = int32ToAuxInt(8)
  8849  		v0.AddArg2(src, mem)
  8850  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8851  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8852  		v2.AddArg2(src, mem)
  8853  		v1.AddArg3(dst, v2, mem)
  8854  		v.AddArg3(dst, v0, v1)
  8855  		return true
  8856  	}
  8857  	// match: (Move [11] dst src mem)
  8858  	// result: (MOVWstore [7] dst (MOVWload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8859  	for {
  8860  		if auxIntToInt64(v.AuxInt) != 11 {
  8861  			break
  8862  		}
  8863  		dst := v_0
  8864  		src := v_1
  8865  		mem := v_2
  8866  		v.reset(OpLOONG64MOVWstore)
  8867  		v.AuxInt = int32ToAuxInt(7)
  8868  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  8869  		v0.AuxInt = int32ToAuxInt(7)
  8870  		v0.AddArg2(src, mem)
  8871  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8872  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8873  		v2.AddArg2(src, mem)
  8874  		v1.AddArg3(dst, v2, mem)
  8875  		v.AddArg3(dst, v0, v1)
  8876  		return true
  8877  	}
  8878  	// match: (Move [12] dst src mem)
  8879  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8880  	for {
  8881  		if auxIntToInt64(v.AuxInt) != 12 {
  8882  			break
  8883  		}
  8884  		dst := v_0
  8885  		src := v_1
  8886  		mem := v_2
  8887  		v.reset(OpLOONG64MOVWstore)
  8888  		v.AuxInt = int32ToAuxInt(8)
  8889  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8890  		v0.AuxInt = int32ToAuxInt(8)
  8891  		v0.AddArg2(src, mem)
  8892  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8893  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8894  		v2.AddArg2(src, mem)
  8895  		v1.AddArg3(dst, v2, mem)
  8896  		v.AddArg3(dst, v0, v1)
  8897  		return true
  8898  	}
  8899  	// match: (Move [13] dst src mem)
  8900  	// result: (MOVVstore [5] dst (MOVVload [5] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8901  	for {
  8902  		if auxIntToInt64(v.AuxInt) != 13 {
  8903  			break
  8904  		}
  8905  		dst := v_0
  8906  		src := v_1
  8907  		mem := v_2
  8908  		v.reset(OpLOONG64MOVVstore)
  8909  		v.AuxInt = int32ToAuxInt(5)
  8910  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8911  		v0.AuxInt = int32ToAuxInt(5)
  8912  		v0.AddArg2(src, mem)
  8913  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8914  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8915  		v2.AddArg2(src, mem)
  8916  		v1.AddArg3(dst, v2, mem)
  8917  		v.AddArg3(dst, v0, v1)
  8918  		return true
  8919  	}
  8920  	// match: (Move [14] dst src mem)
  8921  	// result: (MOVVstore [6] dst (MOVVload [6] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8922  	for {
  8923  		if auxIntToInt64(v.AuxInt) != 14 {
  8924  			break
  8925  		}
  8926  		dst := v_0
  8927  		src := v_1
  8928  		mem := v_2
  8929  		v.reset(OpLOONG64MOVVstore)
  8930  		v.AuxInt = int32ToAuxInt(6)
  8931  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8932  		v0.AuxInt = int32ToAuxInt(6)
  8933  		v0.AddArg2(src, mem)
  8934  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8935  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8936  		v2.AddArg2(src, mem)
  8937  		v1.AddArg3(dst, v2, mem)
  8938  		v.AddArg3(dst, v0, v1)
  8939  		return true
  8940  	}
  8941  	// match: (Move [15] dst src mem)
  8942  	// result: (MOVVstore [7] dst (MOVVload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8943  	for {
  8944  		if auxIntToInt64(v.AuxInt) != 15 {
  8945  			break
  8946  		}
  8947  		dst := v_0
  8948  		src := v_1
  8949  		mem := v_2
  8950  		v.reset(OpLOONG64MOVVstore)
  8951  		v.AuxInt = int32ToAuxInt(7)
  8952  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8953  		v0.AuxInt = int32ToAuxInt(7)
  8954  		v0.AddArg2(src, mem)
  8955  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8956  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8957  		v2.AddArg2(src, mem)
  8958  		v1.AddArg3(dst, v2, mem)
  8959  		v.AddArg3(dst, v0, v1)
  8960  		return true
  8961  	}
  8962  	// match: (Move [16] dst src mem)
  8963  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8964  	for {
  8965  		if auxIntToInt64(v.AuxInt) != 16 {
  8966  			break
  8967  		}
  8968  		dst := v_0
  8969  		src := v_1
  8970  		mem := v_2
  8971  		v.reset(OpLOONG64MOVVstore)
  8972  		v.AuxInt = int32ToAuxInt(8)
  8973  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8974  		v0.AuxInt = int32ToAuxInt(8)
  8975  		v0.AddArg2(src, mem)
  8976  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8977  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8978  		v2.AddArg2(src, mem)
  8979  		v1.AddArg3(dst, v2, mem)
  8980  		v.AddArg3(dst, v0, v1)
  8981  		return true
  8982  	}
  8983  	// match: (Move [s] dst src mem)
  8984  	// cond: s%8 != 0 && s > 16
  8985  	// result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem))
  8986  	for {
  8987  		s := auxIntToInt64(v.AuxInt)
  8988  		dst := v_0
  8989  		src := v_1
  8990  		mem := v_2
  8991  		if !(s%8 != 0 && s > 16) {
  8992  			break
  8993  		}
  8994  		v.reset(OpMove)
  8995  		v.AuxInt = int64ToAuxInt(s % 8)
  8996  		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
  8997  		v0.AuxInt = int64ToAuxInt(s - s%8)
  8998  		v0.AddArg(dst)
  8999  		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
  9000  		v1.AuxInt = int64ToAuxInt(s - s%8)
  9001  		v1.AddArg(src)
  9002  		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
  9003  		v2.AuxInt = int64ToAuxInt(s - s%8)
  9004  		v2.AddArg3(dst, src, mem)
  9005  		v.AddArg3(v0, v1, v2)
  9006  		return true
  9007  	}
  9008  	// match: (Move [s] dst src mem)
  9009  	// cond: s%8 == 0 && s > 16 && s <= 8*128 && logLargeCopy(v, s)
  9010  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  9011  	for {
  9012  		s := auxIntToInt64(v.AuxInt)
  9013  		dst := v_0
  9014  		src := v_1
  9015  		mem := v_2
  9016  		if !(s%8 == 0 && s > 16 && s <= 8*128 && logLargeCopy(v, s)) {
  9017  			break
  9018  		}
  9019  		v.reset(OpLOONG64DUFFCOPY)
  9020  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  9021  		v.AddArg3(dst, src, mem)
  9022  		return true
  9023  	}
  9024  	// match: (Move [s] dst src mem)
  9025  	// cond: s%8 == 0 && s > 1024 && logLargeCopy(v, s)
  9026  	// result: (LoweredMove dst src (ADDVconst <src.Type> src [s-8]) mem)
  9027  	for {
  9028  		s := auxIntToInt64(v.AuxInt)
  9029  		dst := v_0
  9030  		src := v_1
  9031  		mem := v_2
  9032  		if !(s%8 == 0 && s > 1024 && logLargeCopy(v, s)) {
  9033  			break
  9034  		}
  9035  		v.reset(OpLOONG64LoweredMove)
  9036  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, src.Type)
  9037  		v0.AuxInt = int64ToAuxInt(s - 8)
  9038  		v0.AddArg(src)
  9039  		v.AddArg4(dst, src, v0, mem)
  9040  		return true
  9041  	}
  9042  	return false
  9043  }
  9044  func rewriteValueLOONG64_OpNeq16(v *Value) bool {
  9045  	v_1 := v.Args[1]
  9046  	v_0 := v.Args[0]
  9047  	b := v.Block
  9048  	typ := &b.Func.Config.Types
  9049  	// match: (Neq16 x y)
  9050  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  9051  	for {
  9052  		x := v_0
  9053  		y := v_1
  9054  		v.reset(OpLOONG64SGTU)
  9055  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9056  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9057  		v1.AddArg(x)
  9058  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9059  		v2.AddArg(y)
  9060  		v0.AddArg2(v1, v2)
  9061  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9062  		v3.AuxInt = int64ToAuxInt(0)
  9063  		v.AddArg2(v0, v3)
  9064  		return true
  9065  	}
  9066  }
  9067  func rewriteValueLOONG64_OpNeq32(v *Value) bool {
  9068  	v_1 := v.Args[1]
  9069  	v_0 := v.Args[0]
  9070  	b := v.Block
  9071  	typ := &b.Func.Config.Types
  9072  	// match: (Neq32 x y)
  9073  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  9074  	for {
  9075  		x := v_0
  9076  		y := v_1
  9077  		v.reset(OpLOONG64SGTU)
  9078  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9079  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9080  		v1.AddArg(x)
  9081  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9082  		v2.AddArg(y)
  9083  		v0.AddArg2(v1, v2)
  9084  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9085  		v3.AuxInt = int64ToAuxInt(0)
  9086  		v.AddArg2(v0, v3)
  9087  		return true
  9088  	}
  9089  }
  9090  func rewriteValueLOONG64_OpNeq32F(v *Value) bool {
  9091  	v_1 := v.Args[1]
  9092  	v_0 := v.Args[0]
  9093  	b := v.Block
  9094  	// match: (Neq32F x y)
  9095  	// result: (FPFlagFalse (CMPEQF x y))
  9096  	for {
  9097  		x := v_0
  9098  		y := v_1
  9099  		v.reset(OpLOONG64FPFlagFalse)
  9100  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  9101  		v0.AddArg2(x, y)
  9102  		v.AddArg(v0)
  9103  		return true
  9104  	}
  9105  }
  9106  func rewriteValueLOONG64_OpNeq64(v *Value) bool {
  9107  	v_1 := v.Args[1]
  9108  	v_0 := v.Args[0]
  9109  	b := v.Block
  9110  	typ := &b.Func.Config.Types
  9111  	// match: (Neq64 x y)
  9112  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  9113  	for {
  9114  		x := v_0
  9115  		y := v_1
  9116  		v.reset(OpLOONG64SGTU)
  9117  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9118  		v0.AddArg2(x, y)
  9119  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9120  		v1.AuxInt = int64ToAuxInt(0)
  9121  		v.AddArg2(v0, v1)
  9122  		return true
  9123  	}
  9124  }
  9125  func rewriteValueLOONG64_OpNeq64F(v *Value) bool {
  9126  	v_1 := v.Args[1]
  9127  	v_0 := v.Args[0]
  9128  	b := v.Block
  9129  	// match: (Neq64F x y)
  9130  	// result: (FPFlagFalse (CMPEQD x y))
  9131  	for {
  9132  		x := v_0
  9133  		y := v_1
  9134  		v.reset(OpLOONG64FPFlagFalse)
  9135  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  9136  		v0.AddArg2(x, y)
  9137  		v.AddArg(v0)
  9138  		return true
  9139  	}
  9140  }
  9141  func rewriteValueLOONG64_OpNeq8(v *Value) bool {
  9142  	v_1 := v.Args[1]
  9143  	v_0 := v.Args[0]
  9144  	b := v.Block
  9145  	typ := &b.Func.Config.Types
  9146  	// match: (Neq8 x y)
  9147  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  9148  	for {
  9149  		x := v_0
  9150  		y := v_1
  9151  		v.reset(OpLOONG64SGTU)
  9152  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9153  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9154  		v1.AddArg(x)
  9155  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9156  		v2.AddArg(y)
  9157  		v0.AddArg2(v1, v2)
  9158  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9159  		v3.AuxInt = int64ToAuxInt(0)
  9160  		v.AddArg2(v0, v3)
  9161  		return true
  9162  	}
  9163  }
  9164  func rewriteValueLOONG64_OpNeqPtr(v *Value) bool {
  9165  	v_1 := v.Args[1]
  9166  	v_0 := v.Args[0]
  9167  	b := v.Block
  9168  	typ := &b.Func.Config.Types
  9169  	// match: (NeqPtr x y)
  9170  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  9171  	for {
  9172  		x := v_0
  9173  		y := v_1
  9174  		v.reset(OpLOONG64SGTU)
  9175  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9176  		v0.AddArg2(x, y)
  9177  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9178  		v1.AuxInt = int64ToAuxInt(0)
  9179  		v.AddArg2(v0, v1)
  9180  		return true
  9181  	}
  9182  }
  9183  func rewriteValueLOONG64_OpNot(v *Value) bool {
  9184  	v_0 := v.Args[0]
  9185  	// match: (Not x)
  9186  	// result: (XORconst [1] x)
  9187  	for {
  9188  		x := v_0
  9189  		v.reset(OpLOONG64XORconst)
  9190  		v.AuxInt = int64ToAuxInt(1)
  9191  		v.AddArg(x)
  9192  		return true
  9193  	}
  9194  }
  9195  func rewriteValueLOONG64_OpOffPtr(v *Value) bool {
  9196  	v_0 := v.Args[0]
  9197  	// match: (OffPtr [off] ptr:(SP))
  9198  	// result: (MOVVaddr [int32(off)] ptr)
  9199  	for {
  9200  		off := auxIntToInt64(v.AuxInt)
  9201  		ptr := v_0
  9202  		if ptr.Op != OpSP {
  9203  			break
  9204  		}
  9205  		v.reset(OpLOONG64MOVVaddr)
  9206  		v.AuxInt = int32ToAuxInt(int32(off))
  9207  		v.AddArg(ptr)
  9208  		return true
  9209  	}
  9210  	// match: (OffPtr [off] ptr)
  9211  	// result: (ADDVconst [off] ptr)
  9212  	for {
  9213  		off := auxIntToInt64(v.AuxInt)
  9214  		ptr := v_0
  9215  		v.reset(OpLOONG64ADDVconst)
  9216  		v.AuxInt = int64ToAuxInt(off)
  9217  		v.AddArg(ptr)
  9218  		return true
  9219  	}
  9220  }
  9221  func rewriteValueLOONG64_OpPopCount16(v *Value) bool {
  9222  	v_0 := v.Args[0]
  9223  	b := v.Block
  9224  	typ := &b.Func.Config.Types
  9225  	// match: (PopCount16 <t> x)
  9226  	// result: (MOVWfpgp <t> (VPCNT16 <typ.Float32> (MOVWgpfp <typ.Float32> (ZeroExt16to32 x))))
  9227  	for {
  9228  		t := v.Type
  9229  		x := v_0
  9230  		v.reset(OpLOONG64MOVWfpgp)
  9231  		v.Type = t
  9232  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT16, typ.Float32)
  9233  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9234  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9235  		v2.AddArg(x)
  9236  		v1.AddArg(v2)
  9237  		v0.AddArg(v1)
  9238  		v.AddArg(v0)
  9239  		return true
  9240  	}
  9241  }
  9242  func rewriteValueLOONG64_OpPopCount32(v *Value) bool {
  9243  	v_0 := v.Args[0]
  9244  	b := v.Block
  9245  	typ := &b.Func.Config.Types
  9246  	// match: (PopCount32 <t> x)
  9247  	// result: (MOVWfpgp <t> (VPCNT32 <typ.Float32> (MOVWgpfp <typ.Float32> x)))
  9248  	for {
  9249  		t := v.Type
  9250  		x := v_0
  9251  		v.reset(OpLOONG64MOVWfpgp)
  9252  		v.Type = t
  9253  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT32, typ.Float32)
  9254  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9255  		v1.AddArg(x)
  9256  		v0.AddArg(v1)
  9257  		v.AddArg(v0)
  9258  		return true
  9259  	}
  9260  }
  9261  func rewriteValueLOONG64_OpPopCount64(v *Value) bool {
  9262  	v_0 := v.Args[0]
  9263  	b := v.Block
  9264  	typ := &b.Func.Config.Types
  9265  	// match: (PopCount64 <t> x)
  9266  	// result: (MOVVfpgp <t> (VPCNT64 <typ.Float64> (MOVVgpfp <typ.Float64> x)))
  9267  	for {
  9268  		t := v.Type
  9269  		x := v_0
  9270  		v.reset(OpLOONG64MOVVfpgp)
  9271  		v.Type = t
  9272  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT64, typ.Float64)
  9273  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVgpfp, typ.Float64)
  9274  		v1.AddArg(x)
  9275  		v0.AddArg(v1)
  9276  		v.AddArg(v0)
  9277  		return true
  9278  	}
  9279  }
  9280  func rewriteValueLOONG64_OpPrefetchCache(v *Value) bool {
  9281  	v_1 := v.Args[1]
  9282  	v_0 := v.Args[0]
  9283  	// match: (PrefetchCache addr mem)
  9284  	// result: (PRELD addr mem [0])
  9285  	for {
  9286  		addr := v_0
  9287  		mem := v_1
  9288  		v.reset(OpLOONG64PRELD)
  9289  		v.AuxInt = int64ToAuxInt(0)
  9290  		v.AddArg2(addr, mem)
  9291  		return true
  9292  	}
  9293  }
  9294  func rewriteValueLOONG64_OpPrefetchCacheStreamed(v *Value) bool {
  9295  	v_1 := v.Args[1]
  9296  	v_0 := v.Args[0]
  9297  	// match: (PrefetchCacheStreamed addr mem)
  9298  	// result: (PRELDX addr mem [(((512 << 1) + (1 << 12)) << 5) + 2])
  9299  	for {
  9300  		addr := v_0
  9301  		mem := v_1
  9302  		v.reset(OpLOONG64PRELDX)
  9303  		v.AuxInt = int64ToAuxInt((((512 << 1) + (1 << 12)) << 5) + 2)
  9304  		v.AddArg2(addr, mem)
  9305  		return true
  9306  	}
  9307  }
  9308  func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool {
  9309  	v_1 := v.Args[1]
  9310  	v_0 := v.Args[0]
  9311  	b := v.Block
  9312  	typ := &b.Func.Config.Types
  9313  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  9314  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  9315  	for {
  9316  		t := v.Type
  9317  		x := v_0
  9318  		if v_1.Op != OpLOONG64MOVVconst {
  9319  			break
  9320  		}
  9321  		c := auxIntToInt64(v_1.AuxInt)
  9322  		v.reset(OpOr16)
  9323  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  9324  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9325  		v1.AuxInt = int64ToAuxInt(c & 15)
  9326  		v0.AddArg2(x, v1)
  9327  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  9328  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9329  		v3.AuxInt = int64ToAuxInt(-c & 15)
  9330  		v2.AddArg2(x, v3)
  9331  		v.AddArg2(v0, v2)
  9332  		return true
  9333  	}
  9334  	// match: (RotateLeft16 <t> x y)
  9335  	// result: (ROTR <t> (OR <typ.UInt32> (ZeroExt16to32 x) (SLLVconst <t> (ZeroExt16to32 x) [16])) (NEGV <typ.Int64> y))
  9336  	for {
  9337  		t := v.Type
  9338  		x := v_0
  9339  		y := v_1
  9340  		v.reset(OpLOONG64ROTR)
  9341  		v.Type = t
  9342  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt32)
  9343  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9344  		v1.AddArg(x)
  9345  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  9346  		v2.AuxInt = int64ToAuxInt(16)
  9347  		v2.AddArg(v1)
  9348  		v0.AddArg2(v1, v2)
  9349  		v3 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
  9350  		v3.AddArg(y)
  9351  		v.AddArg2(v0, v3)
  9352  		return true
  9353  	}
  9354  }
  9355  func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool {
  9356  	v_1 := v.Args[1]
  9357  	v_0 := v.Args[0]
  9358  	b := v.Block
  9359  	// match: (RotateLeft32 x y)
  9360  	// result: (ROTR x (NEGV <y.Type> y))
  9361  	for {
  9362  		x := v_0
  9363  		y := v_1
  9364  		v.reset(OpLOONG64ROTR)
  9365  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  9366  		v0.AddArg(y)
  9367  		v.AddArg2(x, v0)
  9368  		return true
  9369  	}
  9370  }
  9371  func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool {
  9372  	v_1 := v.Args[1]
  9373  	v_0 := v.Args[0]
  9374  	b := v.Block
  9375  	// match: (RotateLeft64 x y)
  9376  	// result: (ROTRV x (NEGV <y.Type> y))
  9377  	for {
  9378  		x := v_0
  9379  		y := v_1
  9380  		v.reset(OpLOONG64ROTRV)
  9381  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  9382  		v0.AddArg(y)
  9383  		v.AddArg2(x, v0)
  9384  		return true
  9385  	}
  9386  }
  9387  func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool {
  9388  	v_1 := v.Args[1]
  9389  	v_0 := v.Args[0]
  9390  	b := v.Block
  9391  	typ := &b.Func.Config.Types
  9392  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  9393  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  9394  	for {
  9395  		t := v.Type
  9396  		x := v_0
  9397  		if v_1.Op != OpLOONG64MOVVconst {
  9398  			break
  9399  		}
  9400  		c := auxIntToInt64(v_1.AuxInt)
  9401  		v.reset(OpOr8)
  9402  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  9403  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9404  		v1.AuxInt = int64ToAuxInt(c & 7)
  9405  		v0.AddArg2(x, v1)
  9406  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  9407  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9408  		v3.AuxInt = int64ToAuxInt(-c & 7)
  9409  		v2.AddArg2(x, v3)
  9410  		v.AddArg2(v0, v2)
  9411  		return true
  9412  	}
  9413  	// match: (RotateLeft8 <t> x y)
  9414  	// result: (OR <t> (SLLV <t> x (ANDconst <typ.Int64> [7] y)) (SRLV <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEGV <typ.Int64> y))))
  9415  	for {
  9416  		t := v.Type
  9417  		x := v_0
  9418  		y := v_1
  9419  		v.reset(OpLOONG64OR)
  9420  		v.Type = t
  9421  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9422  		v1 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
  9423  		v1.AuxInt = int64ToAuxInt(7)
  9424  		v1.AddArg(y)
  9425  		v0.AddArg2(x, v1)
  9426  		v2 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9427  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9428  		v3.AddArg(x)
  9429  		v4 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
  9430  		v4.AuxInt = int64ToAuxInt(7)
  9431  		v5 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
  9432  		v5.AddArg(y)
  9433  		v4.AddArg(v5)
  9434  		v2.AddArg2(v3, v4)
  9435  		v.AddArg2(v0, v2)
  9436  		return true
  9437  	}
  9438  }
  9439  func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool {
  9440  	v_1 := v.Args[1]
  9441  	v_0 := v.Args[0]
  9442  	b := v.Block
  9443  	typ := &b.Func.Config.Types
  9444  	// match: (Rsh16Ux16 x y)
  9445  	// cond: shiftIsBounded(v)
  9446  	// result: (SRLV (ZeroExt16to64 x) y)
  9447  	for {
  9448  		x := v_0
  9449  		y := v_1
  9450  		if !(shiftIsBounded(v)) {
  9451  			break
  9452  		}
  9453  		v.reset(OpLOONG64SRLV)
  9454  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9455  		v0.AddArg(x)
  9456  		v.AddArg2(v0, y)
  9457  		return true
  9458  	}
  9459  	// match: (Rsh16Ux16 <t> x y)
  9460  	// cond: !shiftIsBounded(v)
  9461  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9462  	for {
  9463  		t := v.Type
  9464  		x := v_0
  9465  		y := v_1
  9466  		if !(!shiftIsBounded(v)) {
  9467  			break
  9468  		}
  9469  		v.reset(OpLOONG64MASKEQZ)
  9470  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9471  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9472  		v1.AddArg(x)
  9473  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9474  		v2.AddArg(y)
  9475  		v0.AddArg2(v1, v2)
  9476  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9477  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9478  		v4.AuxInt = int64ToAuxInt(64)
  9479  		v3.AddArg2(v4, v2)
  9480  		v.AddArg2(v0, v3)
  9481  		return true
  9482  	}
  9483  	return false
  9484  }
  9485  func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool {
  9486  	v_1 := v.Args[1]
  9487  	v_0 := v.Args[0]
  9488  	b := v.Block
  9489  	typ := &b.Func.Config.Types
  9490  	// match: (Rsh16Ux32 x y)
  9491  	// cond: shiftIsBounded(v)
  9492  	// result: (SRLV (ZeroExt16to64 x) y)
  9493  	for {
  9494  		x := v_0
  9495  		y := v_1
  9496  		if !(shiftIsBounded(v)) {
  9497  			break
  9498  		}
  9499  		v.reset(OpLOONG64SRLV)
  9500  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9501  		v0.AddArg(x)
  9502  		v.AddArg2(v0, y)
  9503  		return true
  9504  	}
  9505  	// match: (Rsh16Ux32 <t> x y)
  9506  	// cond: !shiftIsBounded(v)
  9507  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9508  	for {
  9509  		t := v.Type
  9510  		x := v_0
  9511  		y := v_1
  9512  		if !(!shiftIsBounded(v)) {
  9513  			break
  9514  		}
  9515  		v.reset(OpLOONG64MASKEQZ)
  9516  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9517  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9518  		v1.AddArg(x)
  9519  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9520  		v2.AddArg(y)
  9521  		v0.AddArg2(v1, v2)
  9522  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9523  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9524  		v4.AuxInt = int64ToAuxInt(64)
  9525  		v3.AddArg2(v4, v2)
  9526  		v.AddArg2(v0, v3)
  9527  		return true
  9528  	}
  9529  	return false
  9530  }
  9531  func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool {
  9532  	v_1 := v.Args[1]
  9533  	v_0 := v.Args[0]
  9534  	b := v.Block
  9535  	typ := &b.Func.Config.Types
  9536  	// match: (Rsh16Ux64 x y)
  9537  	// cond: shiftIsBounded(v)
  9538  	// result: (SRLV (ZeroExt16to64 x) y)
  9539  	for {
  9540  		x := v_0
  9541  		y := v_1
  9542  		if !(shiftIsBounded(v)) {
  9543  			break
  9544  		}
  9545  		v.reset(OpLOONG64SRLV)
  9546  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9547  		v0.AddArg(x)
  9548  		v.AddArg2(v0, y)
  9549  		return true
  9550  	}
  9551  	// match: (Rsh16Ux64 <t> x y)
  9552  	// cond: !shiftIsBounded(v)
  9553  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9554  	for {
  9555  		t := v.Type
  9556  		x := v_0
  9557  		y := v_1
  9558  		if !(!shiftIsBounded(v)) {
  9559  			break
  9560  		}
  9561  		v.reset(OpLOONG64MASKEQZ)
  9562  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9563  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9564  		v1.AddArg(x)
  9565  		v0.AddArg2(v1, y)
  9566  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9567  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9568  		v3.AuxInt = int64ToAuxInt(64)
  9569  		v2.AddArg2(v3, y)
  9570  		v.AddArg2(v0, v2)
  9571  		return true
  9572  	}
  9573  	return false
  9574  }
  9575  func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool {
  9576  	v_1 := v.Args[1]
  9577  	v_0 := v.Args[0]
  9578  	b := v.Block
  9579  	typ := &b.Func.Config.Types
  9580  	// match: (Rsh16Ux8 x y)
  9581  	// cond: shiftIsBounded(v)
  9582  	// result: (SRLV (ZeroExt16to64 x) y)
  9583  	for {
  9584  		x := v_0
  9585  		y := v_1
  9586  		if !(shiftIsBounded(v)) {
  9587  			break
  9588  		}
  9589  		v.reset(OpLOONG64SRLV)
  9590  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9591  		v0.AddArg(x)
  9592  		v.AddArg2(v0, y)
  9593  		return true
  9594  	}
  9595  	// match: (Rsh16Ux8 <t> x y)
  9596  	// cond: !shiftIsBounded(v)
  9597  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9598  	for {
  9599  		t := v.Type
  9600  		x := v_0
  9601  		y := v_1
  9602  		if !(!shiftIsBounded(v)) {
  9603  			break
  9604  		}
  9605  		v.reset(OpLOONG64MASKEQZ)
  9606  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9607  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9608  		v1.AddArg(x)
  9609  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9610  		v2.AddArg(y)
  9611  		v0.AddArg2(v1, v2)
  9612  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9613  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9614  		v4.AuxInt = int64ToAuxInt(64)
  9615  		v3.AddArg2(v4, v2)
  9616  		v.AddArg2(v0, v3)
  9617  		return true
  9618  	}
  9619  	return false
  9620  }
  9621  func rewriteValueLOONG64_OpRsh16x16(v *Value) bool {
  9622  	v_1 := v.Args[1]
  9623  	v_0 := v.Args[0]
  9624  	b := v.Block
  9625  	typ := &b.Func.Config.Types
  9626  	// match: (Rsh16x16 x y)
  9627  	// cond: shiftIsBounded(v)
  9628  	// result: (SRAV (SignExt16to64 x) y)
  9629  	for {
  9630  		x := v_0
  9631  		y := v_1
  9632  		if !(shiftIsBounded(v)) {
  9633  			break
  9634  		}
  9635  		v.reset(OpLOONG64SRAV)
  9636  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9637  		v0.AddArg(x)
  9638  		v.AddArg2(v0, y)
  9639  		return true
  9640  	}
  9641  	// match: (Rsh16x16 <t> x y)
  9642  	// cond: !shiftIsBounded(v)
  9643  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  9644  	for {
  9645  		t := v.Type
  9646  		x := v_0
  9647  		y := v_1
  9648  		if !(!shiftIsBounded(v)) {
  9649  			break
  9650  		}
  9651  		v.reset(OpLOONG64SRAV)
  9652  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9653  		v0.AddArg(x)
  9654  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9655  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9656  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9657  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9658  		v4.AddArg(y)
  9659  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9660  		v5.AuxInt = int64ToAuxInt(63)
  9661  		v3.AddArg2(v4, v5)
  9662  		v2.AddArg(v3)
  9663  		v1.AddArg2(v2, v4)
  9664  		v.AddArg2(v0, v1)
  9665  		return true
  9666  	}
  9667  	return false
  9668  }
  9669  func rewriteValueLOONG64_OpRsh16x32(v *Value) bool {
  9670  	v_1 := v.Args[1]
  9671  	v_0 := v.Args[0]
  9672  	b := v.Block
  9673  	typ := &b.Func.Config.Types
  9674  	// match: (Rsh16x32 x y)
  9675  	// cond: shiftIsBounded(v)
  9676  	// result: (SRAV (SignExt16to64 x) y)
  9677  	for {
  9678  		x := v_0
  9679  		y := v_1
  9680  		if !(shiftIsBounded(v)) {
  9681  			break
  9682  		}
  9683  		v.reset(OpLOONG64SRAV)
  9684  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9685  		v0.AddArg(x)
  9686  		v.AddArg2(v0, y)
  9687  		return true
  9688  	}
  9689  	// match: (Rsh16x32 <t> x y)
  9690  	// cond: !shiftIsBounded(v)
  9691  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  9692  	for {
  9693  		t := v.Type
  9694  		x := v_0
  9695  		y := v_1
  9696  		if !(!shiftIsBounded(v)) {
  9697  			break
  9698  		}
  9699  		v.reset(OpLOONG64SRAV)
  9700  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9701  		v0.AddArg(x)
  9702  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9703  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9704  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9705  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9706  		v4.AddArg(y)
  9707  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9708  		v5.AuxInt = int64ToAuxInt(63)
  9709  		v3.AddArg2(v4, v5)
  9710  		v2.AddArg(v3)
  9711  		v1.AddArg2(v2, v4)
  9712  		v.AddArg2(v0, v1)
  9713  		return true
  9714  	}
  9715  	return false
  9716  }
  9717  func rewriteValueLOONG64_OpRsh16x64(v *Value) bool {
  9718  	v_1 := v.Args[1]
  9719  	v_0 := v.Args[0]
  9720  	b := v.Block
  9721  	typ := &b.Func.Config.Types
  9722  	// match: (Rsh16x64 x y)
  9723  	// cond: shiftIsBounded(v)
  9724  	// result: (SRAV (SignExt16to64 x) y)
  9725  	for {
  9726  		x := v_0
  9727  		y := v_1
  9728  		if !(shiftIsBounded(v)) {
  9729  			break
  9730  		}
  9731  		v.reset(OpLOONG64SRAV)
  9732  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9733  		v0.AddArg(x)
  9734  		v.AddArg2(v0, y)
  9735  		return true
  9736  	}
  9737  	// match: (Rsh16x64 <t> x y)
  9738  	// cond: !shiftIsBounded(v)
  9739  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  9740  	for {
  9741  		t := v.Type
  9742  		x := v_0
  9743  		y := v_1
  9744  		if !(!shiftIsBounded(v)) {
  9745  			break
  9746  		}
  9747  		v.reset(OpLOONG64SRAV)
  9748  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9749  		v0.AddArg(x)
  9750  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9751  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9752  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9753  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9754  		v4.AuxInt = int64ToAuxInt(63)
  9755  		v3.AddArg2(y, v4)
  9756  		v2.AddArg(v3)
  9757  		v1.AddArg2(v2, y)
  9758  		v.AddArg2(v0, v1)
  9759  		return true
  9760  	}
  9761  	return false
  9762  }
  9763  func rewriteValueLOONG64_OpRsh16x8(v *Value) bool {
  9764  	v_1 := v.Args[1]
  9765  	v_0 := v.Args[0]
  9766  	b := v.Block
  9767  	typ := &b.Func.Config.Types
  9768  	// match: (Rsh16x8 x y)
  9769  	// cond: shiftIsBounded(v)
  9770  	// result: (SRAV (SignExt16to64 x) y)
  9771  	for {
  9772  		x := v_0
  9773  		y := v_1
  9774  		if !(shiftIsBounded(v)) {
  9775  			break
  9776  		}
  9777  		v.reset(OpLOONG64SRAV)
  9778  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9779  		v0.AddArg(x)
  9780  		v.AddArg2(v0, y)
  9781  		return true
  9782  	}
  9783  	// match: (Rsh16x8 <t> x y)
  9784  	// cond: !shiftIsBounded(v)
  9785  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  9786  	for {
  9787  		t := v.Type
  9788  		x := v_0
  9789  		y := v_1
  9790  		if !(!shiftIsBounded(v)) {
  9791  			break
  9792  		}
  9793  		v.reset(OpLOONG64SRAV)
  9794  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9795  		v0.AddArg(x)
  9796  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9797  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9798  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9799  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9800  		v4.AddArg(y)
  9801  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9802  		v5.AuxInt = int64ToAuxInt(63)
  9803  		v3.AddArg2(v4, v5)
  9804  		v2.AddArg(v3)
  9805  		v1.AddArg2(v2, v4)
  9806  		v.AddArg2(v0, v1)
  9807  		return true
  9808  	}
  9809  	return false
  9810  }
  9811  func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool {
  9812  	v_1 := v.Args[1]
  9813  	v_0 := v.Args[0]
  9814  	b := v.Block
  9815  	typ := &b.Func.Config.Types
  9816  	// match: (Rsh32Ux16 x y)
  9817  	// cond: shiftIsBounded(v)
  9818  	// result: (SRL x y)
  9819  	for {
  9820  		x := v_0
  9821  		y := v_1
  9822  		if !(shiftIsBounded(v)) {
  9823  			break
  9824  		}
  9825  		v.reset(OpLOONG64SRL)
  9826  		v.AddArg2(x, y)
  9827  		return true
  9828  	}
  9829  	// match: (Rsh32Ux16 <t> x y)
  9830  	// cond: !shiftIsBounded(v)
  9831  	// result: (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  9832  	for {
  9833  		t := v.Type
  9834  		x := v_0
  9835  		y := v_1
  9836  		if !(!shiftIsBounded(v)) {
  9837  			break
  9838  		}
  9839  		v.reset(OpLOONG64MASKEQZ)
  9840  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9841  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9842  		v1.AddArg(y)
  9843  		v0.AddArg2(x, v1)
  9844  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9845  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9846  		v3.AuxInt = int64ToAuxInt(32)
  9847  		v2.AddArg2(v3, v1)
  9848  		v.AddArg2(v0, v2)
  9849  		return true
  9850  	}
  9851  	return false
  9852  }
  9853  func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool {
  9854  	v_1 := v.Args[1]
  9855  	v_0 := v.Args[0]
  9856  	b := v.Block
  9857  	typ := &b.Func.Config.Types
  9858  	// match: (Rsh32Ux32 x y)
  9859  	// cond: shiftIsBounded(v)
  9860  	// result: (SRL x y)
  9861  	for {
  9862  		x := v_0
  9863  		y := v_1
  9864  		if !(shiftIsBounded(v)) {
  9865  			break
  9866  		}
  9867  		v.reset(OpLOONG64SRL)
  9868  		v.AddArg2(x, y)
  9869  		return true
  9870  	}
  9871  	// match: (Rsh32Ux32 <t> x y)
  9872  	// cond: !shiftIsBounded(v)
  9873  	// result: (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  9874  	for {
  9875  		t := v.Type
  9876  		x := v_0
  9877  		y := v_1
  9878  		if !(!shiftIsBounded(v)) {
  9879  			break
  9880  		}
  9881  		v.reset(OpLOONG64MASKEQZ)
  9882  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9883  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9884  		v1.AddArg(y)
  9885  		v0.AddArg2(x, v1)
  9886  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9887  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9888  		v3.AuxInt = int64ToAuxInt(32)
  9889  		v2.AddArg2(v3, v1)
  9890  		v.AddArg2(v0, v2)
  9891  		return true
  9892  	}
  9893  	return false
  9894  }
  9895  func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool {
  9896  	v_1 := v.Args[1]
  9897  	v_0 := v.Args[0]
  9898  	b := v.Block
  9899  	typ := &b.Func.Config.Types
  9900  	// match: (Rsh32Ux64 x y)
  9901  	// cond: shiftIsBounded(v)
  9902  	// result: (SRL x y)
  9903  	for {
  9904  		x := v_0
  9905  		y := v_1
  9906  		if !(shiftIsBounded(v)) {
  9907  			break
  9908  		}
  9909  		v.reset(OpLOONG64SRL)
  9910  		v.AddArg2(x, y)
  9911  		return true
  9912  	}
  9913  	// match: (Rsh32Ux64 <t> x y)
  9914  	// cond: !shiftIsBounded(v)
  9915  	// result: (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  9916  	for {
  9917  		t := v.Type
  9918  		x := v_0
  9919  		y := v_1
  9920  		if !(!shiftIsBounded(v)) {
  9921  			break
  9922  		}
  9923  		v.reset(OpLOONG64MASKEQZ)
  9924  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9925  		v0.AddArg2(x, y)
  9926  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9927  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9928  		v2.AuxInt = int64ToAuxInt(32)
  9929  		v1.AddArg2(v2, y)
  9930  		v.AddArg2(v0, v1)
  9931  		return true
  9932  	}
  9933  	return false
  9934  }
  9935  func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool {
  9936  	v_1 := v.Args[1]
  9937  	v_0 := v.Args[0]
  9938  	b := v.Block
  9939  	typ := &b.Func.Config.Types
  9940  	// match: (Rsh32Ux8 x y)
  9941  	// cond: shiftIsBounded(v)
  9942  	// result: (SRL x y)
  9943  	for {
  9944  		x := v_0
  9945  		y := v_1
  9946  		if !(shiftIsBounded(v)) {
  9947  			break
  9948  		}
  9949  		v.reset(OpLOONG64SRL)
  9950  		v.AddArg2(x, y)
  9951  		return true
  9952  	}
  9953  	// match: (Rsh32Ux8 <t> x y)
  9954  	// cond: !shiftIsBounded(v)
  9955  	// result: (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  9956  	for {
  9957  		t := v.Type
  9958  		x := v_0
  9959  		y := v_1
  9960  		if !(!shiftIsBounded(v)) {
  9961  			break
  9962  		}
  9963  		v.reset(OpLOONG64MASKEQZ)
  9964  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9965  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9966  		v1.AddArg(y)
  9967  		v0.AddArg2(x, v1)
  9968  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9969  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9970  		v3.AuxInt = int64ToAuxInt(32)
  9971  		v2.AddArg2(v3, v1)
  9972  		v.AddArg2(v0, v2)
  9973  		return true
  9974  	}
  9975  	return false
  9976  }
  9977  func rewriteValueLOONG64_OpRsh32x16(v *Value) bool {
  9978  	v_1 := v.Args[1]
  9979  	v_0 := v.Args[0]
  9980  	b := v.Block
  9981  	typ := &b.Func.Config.Types
  9982  	// match: (Rsh32x16 x y)
  9983  	// cond: shiftIsBounded(v)
  9984  	// result: (SRA x y)
  9985  	for {
  9986  		x := v_0
  9987  		y := v_1
  9988  		if !(shiftIsBounded(v)) {
  9989  			break
  9990  		}
  9991  		v.reset(OpLOONG64SRA)
  9992  		v.AddArg2(x, y)
  9993  		return true
  9994  	}
  9995  	// match: (Rsh32x16 <t> x y)
  9996  	// cond: !shiftIsBounded(v)
  9997  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
  9998  	for {
  9999  		t := v.Type
 10000  		x := v_0
 10001  		y := v_1
 10002  		if !(!shiftIsBounded(v)) {
 10003  			break
 10004  		}
 10005  		v.reset(OpLOONG64SRA)
 10006  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10007  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10008  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10009  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10010  		v3.AddArg(y)
 10011  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10012  		v4.AuxInt = int64ToAuxInt(31)
 10013  		v2.AddArg2(v3, v4)
 10014  		v1.AddArg(v2)
 10015  		v0.AddArg2(v1, v3)
 10016  		v.AddArg2(x, v0)
 10017  		return true
 10018  	}
 10019  	return false
 10020  }
 10021  func rewriteValueLOONG64_OpRsh32x32(v *Value) bool {
 10022  	v_1 := v.Args[1]
 10023  	v_0 := v.Args[0]
 10024  	b := v.Block
 10025  	typ := &b.Func.Config.Types
 10026  	// match: (Rsh32x32 x y)
 10027  	// cond: shiftIsBounded(v)
 10028  	// result: (SRA x y)
 10029  	for {
 10030  		x := v_0
 10031  		y := v_1
 10032  		if !(shiftIsBounded(v)) {
 10033  			break
 10034  		}
 10035  		v.reset(OpLOONG64SRA)
 10036  		v.AddArg2(x, y)
 10037  		return true
 10038  	}
 10039  	// match: (Rsh32x32 <t> x y)
 10040  	// cond: !shiftIsBounded(v)
 10041  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
 10042  	for {
 10043  		t := v.Type
 10044  		x := v_0
 10045  		y := v_1
 10046  		if !(!shiftIsBounded(v)) {
 10047  			break
 10048  		}
 10049  		v.reset(OpLOONG64SRA)
 10050  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10051  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10052  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10053  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10054  		v3.AddArg(y)
 10055  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10056  		v4.AuxInt = int64ToAuxInt(31)
 10057  		v2.AddArg2(v3, v4)
 10058  		v1.AddArg(v2)
 10059  		v0.AddArg2(v1, v3)
 10060  		v.AddArg2(x, v0)
 10061  		return true
 10062  	}
 10063  	return false
 10064  }
 10065  func rewriteValueLOONG64_OpRsh32x64(v *Value) bool {
 10066  	v_1 := v.Args[1]
 10067  	v_0 := v.Args[0]
 10068  	b := v.Block
 10069  	typ := &b.Func.Config.Types
 10070  	// match: (Rsh32x64 x y)
 10071  	// cond: shiftIsBounded(v)
 10072  	// result: (SRA x y)
 10073  	for {
 10074  		x := v_0
 10075  		y := v_1
 10076  		if !(shiftIsBounded(v)) {
 10077  			break
 10078  		}
 10079  		v.reset(OpLOONG64SRA)
 10080  		v.AddArg2(x, y)
 10081  		return true
 10082  	}
 10083  	// match: (Rsh32x64 <t> x y)
 10084  	// cond: !shiftIsBounded(v)
 10085  	// result: (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
 10086  	for {
 10087  		t := v.Type
 10088  		x := v_0
 10089  		y := v_1
 10090  		if !(!shiftIsBounded(v)) {
 10091  			break
 10092  		}
 10093  		v.reset(OpLOONG64SRA)
 10094  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10095  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10096  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10097  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10098  		v3.AuxInt = int64ToAuxInt(31)
 10099  		v2.AddArg2(y, v3)
 10100  		v1.AddArg(v2)
 10101  		v0.AddArg2(v1, y)
 10102  		v.AddArg2(x, v0)
 10103  		return true
 10104  	}
 10105  	return false
 10106  }
 10107  func rewriteValueLOONG64_OpRsh32x8(v *Value) bool {
 10108  	v_1 := v.Args[1]
 10109  	v_0 := v.Args[0]
 10110  	b := v.Block
 10111  	typ := &b.Func.Config.Types
 10112  	// match: (Rsh32x8 x y)
 10113  	// cond: shiftIsBounded(v)
 10114  	// result: (SRA x y)
 10115  	for {
 10116  		x := v_0
 10117  		y := v_1
 10118  		if !(shiftIsBounded(v)) {
 10119  			break
 10120  		}
 10121  		v.reset(OpLOONG64SRA)
 10122  		v.AddArg2(x, y)
 10123  		return true
 10124  	}
 10125  	// match: (Rsh32x8 <t> x y)
 10126  	// cond: !shiftIsBounded(v)
 10127  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
 10128  	for {
 10129  		t := v.Type
 10130  		x := v_0
 10131  		y := v_1
 10132  		if !(!shiftIsBounded(v)) {
 10133  			break
 10134  		}
 10135  		v.reset(OpLOONG64SRA)
 10136  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10137  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10138  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10139  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10140  		v3.AddArg(y)
 10141  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10142  		v4.AuxInt = int64ToAuxInt(31)
 10143  		v2.AddArg2(v3, v4)
 10144  		v1.AddArg(v2)
 10145  		v0.AddArg2(v1, v3)
 10146  		v.AddArg2(x, v0)
 10147  		return true
 10148  	}
 10149  	return false
 10150  }
 10151  func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool {
 10152  	v_1 := v.Args[1]
 10153  	v_0 := v.Args[0]
 10154  	b := v.Block
 10155  	typ := &b.Func.Config.Types
 10156  	// match: (Rsh64Ux16 x y)
 10157  	// cond: shiftIsBounded(v)
 10158  	// result: (SRLV x y)
 10159  	for {
 10160  		x := v_0
 10161  		y := v_1
 10162  		if !(shiftIsBounded(v)) {
 10163  			break
 10164  		}
 10165  		v.reset(OpLOONG64SRLV)
 10166  		v.AddArg2(x, y)
 10167  		return true
 10168  	}
 10169  	// match: (Rsh64Ux16 <t> x y)
 10170  	// cond: !shiftIsBounded(v)
 10171  	// result: (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10172  	for {
 10173  		t := v.Type
 10174  		x := v_0
 10175  		y := v_1
 10176  		if !(!shiftIsBounded(v)) {
 10177  			break
 10178  		}
 10179  		v.reset(OpLOONG64MASKEQZ)
 10180  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10181  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10182  		v1.AddArg(y)
 10183  		v0.AddArg2(x, v1)
 10184  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10185  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10186  		v3.AuxInt = int64ToAuxInt(64)
 10187  		v2.AddArg2(v3, v1)
 10188  		v.AddArg2(v0, v2)
 10189  		return true
 10190  	}
 10191  	return false
 10192  }
 10193  func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool {
 10194  	v_1 := v.Args[1]
 10195  	v_0 := v.Args[0]
 10196  	b := v.Block
 10197  	typ := &b.Func.Config.Types
 10198  	// match: (Rsh64Ux32 x y)
 10199  	// cond: shiftIsBounded(v)
 10200  	// result: (SRLV x y)
 10201  	for {
 10202  		x := v_0
 10203  		y := v_1
 10204  		if !(shiftIsBounded(v)) {
 10205  			break
 10206  		}
 10207  		v.reset(OpLOONG64SRLV)
 10208  		v.AddArg2(x, y)
 10209  		return true
 10210  	}
 10211  	// match: (Rsh64Ux32 <t> x y)
 10212  	// cond: !shiftIsBounded(v)
 10213  	// result: (MASKEQZ (SRLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10214  	for {
 10215  		t := v.Type
 10216  		x := v_0
 10217  		y := v_1
 10218  		if !(!shiftIsBounded(v)) {
 10219  			break
 10220  		}
 10221  		v.reset(OpLOONG64MASKEQZ)
 10222  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10223  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10224  		v1.AddArg(y)
 10225  		v0.AddArg2(x, v1)
 10226  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10227  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10228  		v3.AuxInt = int64ToAuxInt(64)
 10229  		v2.AddArg2(v3, v1)
 10230  		v.AddArg2(v0, v2)
 10231  		return true
 10232  	}
 10233  	return false
 10234  }
 10235  func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool {
 10236  	v_1 := v.Args[1]
 10237  	v_0 := v.Args[0]
 10238  	b := v.Block
 10239  	typ := &b.Func.Config.Types
 10240  	// match: (Rsh64Ux64 x y)
 10241  	// cond: shiftIsBounded(v)
 10242  	// result: (SRLV x y)
 10243  	for {
 10244  		x := v_0
 10245  		y := v_1
 10246  		if !(shiftIsBounded(v)) {
 10247  			break
 10248  		}
 10249  		v.reset(OpLOONG64SRLV)
 10250  		v.AddArg2(x, y)
 10251  		return true
 10252  	}
 10253  	// match: (Rsh64Ux64 <t> x y)
 10254  	// cond: !shiftIsBounded(v)
 10255  	// result: (MASKEQZ (SRLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10256  	for {
 10257  		t := v.Type
 10258  		x := v_0
 10259  		y := v_1
 10260  		if !(!shiftIsBounded(v)) {
 10261  			break
 10262  		}
 10263  		v.reset(OpLOONG64MASKEQZ)
 10264  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10265  		v0.AddArg2(x, y)
 10266  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10267  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10268  		v2.AuxInt = int64ToAuxInt(64)
 10269  		v1.AddArg2(v2, y)
 10270  		v.AddArg2(v0, v1)
 10271  		return true
 10272  	}
 10273  	return false
 10274  }
 10275  func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool {
 10276  	v_1 := v.Args[1]
 10277  	v_0 := v.Args[0]
 10278  	b := v.Block
 10279  	typ := &b.Func.Config.Types
 10280  	// match: (Rsh64Ux8 x y)
 10281  	// cond: shiftIsBounded(v)
 10282  	// result: (SRLV x y)
 10283  	for {
 10284  		x := v_0
 10285  		y := v_1
 10286  		if !(shiftIsBounded(v)) {
 10287  			break
 10288  		}
 10289  		v.reset(OpLOONG64SRLV)
 10290  		v.AddArg2(x, y)
 10291  		return true
 10292  	}
 10293  	// match: (Rsh64Ux8 <t> x y)
 10294  	// cond: !shiftIsBounded(v)
 10295  	// result: (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10296  	for {
 10297  		t := v.Type
 10298  		x := v_0
 10299  		y := v_1
 10300  		if !(!shiftIsBounded(v)) {
 10301  			break
 10302  		}
 10303  		v.reset(OpLOONG64MASKEQZ)
 10304  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10305  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10306  		v1.AddArg(y)
 10307  		v0.AddArg2(x, v1)
 10308  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10309  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10310  		v3.AuxInt = int64ToAuxInt(64)
 10311  		v2.AddArg2(v3, v1)
 10312  		v.AddArg2(v0, v2)
 10313  		return true
 10314  	}
 10315  	return false
 10316  }
 10317  func rewriteValueLOONG64_OpRsh64x16(v *Value) bool {
 10318  	v_1 := v.Args[1]
 10319  	v_0 := v.Args[0]
 10320  	b := v.Block
 10321  	typ := &b.Func.Config.Types
 10322  	// match: (Rsh64x16 x y)
 10323  	// cond: shiftIsBounded(v)
 10324  	// result: (SRAV x y)
 10325  	for {
 10326  		x := v_0
 10327  		y := v_1
 10328  		if !(shiftIsBounded(v)) {
 10329  			break
 10330  		}
 10331  		v.reset(OpLOONG64SRAV)
 10332  		v.AddArg2(x, y)
 10333  		return true
 10334  	}
 10335  	// match: (Rsh64x16 <t> x y)
 10336  	// cond: !shiftIsBounded(v)
 10337  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10338  	for {
 10339  		t := v.Type
 10340  		x := v_0
 10341  		y := v_1
 10342  		if !(!shiftIsBounded(v)) {
 10343  			break
 10344  		}
 10345  		v.reset(OpLOONG64SRAV)
 10346  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10347  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10348  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10349  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10350  		v3.AddArg(y)
 10351  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10352  		v4.AuxInt = int64ToAuxInt(63)
 10353  		v2.AddArg2(v3, v4)
 10354  		v1.AddArg(v2)
 10355  		v0.AddArg2(v1, v3)
 10356  		v.AddArg2(x, v0)
 10357  		return true
 10358  	}
 10359  	return false
 10360  }
 10361  func rewriteValueLOONG64_OpRsh64x32(v *Value) bool {
 10362  	v_1 := v.Args[1]
 10363  	v_0 := v.Args[0]
 10364  	b := v.Block
 10365  	typ := &b.Func.Config.Types
 10366  	// match: (Rsh64x32 x y)
 10367  	// cond: shiftIsBounded(v)
 10368  	// result: (SRAV x y)
 10369  	for {
 10370  		x := v_0
 10371  		y := v_1
 10372  		if !(shiftIsBounded(v)) {
 10373  			break
 10374  		}
 10375  		v.reset(OpLOONG64SRAV)
 10376  		v.AddArg2(x, y)
 10377  		return true
 10378  	}
 10379  	// match: (Rsh64x32 <t> x y)
 10380  	// cond: !shiftIsBounded(v)
 10381  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10382  	for {
 10383  		t := v.Type
 10384  		x := v_0
 10385  		y := v_1
 10386  		if !(!shiftIsBounded(v)) {
 10387  			break
 10388  		}
 10389  		v.reset(OpLOONG64SRAV)
 10390  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10391  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10392  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10393  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10394  		v3.AddArg(y)
 10395  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10396  		v4.AuxInt = int64ToAuxInt(63)
 10397  		v2.AddArg2(v3, v4)
 10398  		v1.AddArg(v2)
 10399  		v0.AddArg2(v1, v3)
 10400  		v.AddArg2(x, v0)
 10401  		return true
 10402  	}
 10403  	return false
 10404  }
 10405  func rewriteValueLOONG64_OpRsh64x64(v *Value) bool {
 10406  	v_1 := v.Args[1]
 10407  	v_0 := v.Args[0]
 10408  	b := v.Block
 10409  	typ := &b.Func.Config.Types
 10410  	// match: (Rsh64x64 x y)
 10411  	// cond: shiftIsBounded(v)
 10412  	// result: (SRAV x y)
 10413  	for {
 10414  		x := v_0
 10415  		y := v_1
 10416  		if !(shiftIsBounded(v)) {
 10417  			break
 10418  		}
 10419  		v.reset(OpLOONG64SRAV)
 10420  		v.AddArg2(x, y)
 10421  		return true
 10422  	}
 10423  	// match: (Rsh64x64 <t> x y)
 10424  	// cond: !shiftIsBounded(v)
 10425  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10426  	for {
 10427  		t := v.Type
 10428  		x := v_0
 10429  		y := v_1
 10430  		if !(!shiftIsBounded(v)) {
 10431  			break
 10432  		}
 10433  		v.reset(OpLOONG64SRAV)
 10434  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10435  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10436  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10437  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10438  		v3.AuxInt = int64ToAuxInt(63)
 10439  		v2.AddArg2(y, v3)
 10440  		v1.AddArg(v2)
 10441  		v0.AddArg2(v1, y)
 10442  		v.AddArg2(x, v0)
 10443  		return true
 10444  	}
 10445  	return false
 10446  }
 10447  func rewriteValueLOONG64_OpRsh64x8(v *Value) bool {
 10448  	v_1 := v.Args[1]
 10449  	v_0 := v.Args[0]
 10450  	b := v.Block
 10451  	typ := &b.Func.Config.Types
 10452  	// match: (Rsh64x8 x y)
 10453  	// cond: shiftIsBounded(v)
 10454  	// result: (SRAV x y)
 10455  	for {
 10456  		x := v_0
 10457  		y := v_1
 10458  		if !(shiftIsBounded(v)) {
 10459  			break
 10460  		}
 10461  		v.reset(OpLOONG64SRAV)
 10462  		v.AddArg2(x, y)
 10463  		return true
 10464  	}
 10465  	// match: (Rsh64x8 <t> x y)
 10466  	// cond: !shiftIsBounded(v)
 10467  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10468  	for {
 10469  		t := v.Type
 10470  		x := v_0
 10471  		y := v_1
 10472  		if !(!shiftIsBounded(v)) {
 10473  			break
 10474  		}
 10475  		v.reset(OpLOONG64SRAV)
 10476  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10477  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10478  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10479  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10480  		v3.AddArg(y)
 10481  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10482  		v4.AuxInt = int64ToAuxInt(63)
 10483  		v2.AddArg2(v3, v4)
 10484  		v1.AddArg(v2)
 10485  		v0.AddArg2(v1, v3)
 10486  		v.AddArg2(x, v0)
 10487  		return true
 10488  	}
 10489  	return false
 10490  }
 10491  func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool {
 10492  	v_1 := v.Args[1]
 10493  	v_0 := v.Args[0]
 10494  	b := v.Block
 10495  	typ := &b.Func.Config.Types
 10496  	// match: (Rsh8Ux16 x y)
 10497  	// cond: shiftIsBounded(v)
 10498  	// result: (SRLV (ZeroExt8to64 x) y)
 10499  	for {
 10500  		x := v_0
 10501  		y := v_1
 10502  		if !(shiftIsBounded(v)) {
 10503  			break
 10504  		}
 10505  		v.reset(OpLOONG64SRLV)
 10506  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10507  		v0.AddArg(x)
 10508  		v.AddArg2(v0, y)
 10509  		return true
 10510  	}
 10511  	// match: (Rsh8Ux16 <t> x y)
 10512  	// cond: !shiftIsBounded(v)
 10513  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10514  	for {
 10515  		t := v.Type
 10516  		x := v_0
 10517  		y := v_1
 10518  		if !(!shiftIsBounded(v)) {
 10519  			break
 10520  		}
 10521  		v.reset(OpLOONG64MASKEQZ)
 10522  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10523  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10524  		v1.AddArg(x)
 10525  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10526  		v2.AddArg(y)
 10527  		v0.AddArg2(v1, v2)
 10528  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10529  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10530  		v4.AuxInt = int64ToAuxInt(64)
 10531  		v3.AddArg2(v4, v2)
 10532  		v.AddArg2(v0, v3)
 10533  		return true
 10534  	}
 10535  	return false
 10536  }
 10537  func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool {
 10538  	v_1 := v.Args[1]
 10539  	v_0 := v.Args[0]
 10540  	b := v.Block
 10541  	typ := &b.Func.Config.Types
 10542  	// match: (Rsh8Ux32 x y)
 10543  	// cond: shiftIsBounded(v)
 10544  	// result: (SRLV (ZeroExt8to64 x) y)
 10545  	for {
 10546  		x := v_0
 10547  		y := v_1
 10548  		if !(shiftIsBounded(v)) {
 10549  			break
 10550  		}
 10551  		v.reset(OpLOONG64SRLV)
 10552  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10553  		v0.AddArg(x)
 10554  		v.AddArg2(v0, y)
 10555  		return true
 10556  	}
 10557  	// match: (Rsh8Ux32 <t> x y)
 10558  	// cond: !shiftIsBounded(v)
 10559  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10560  	for {
 10561  		t := v.Type
 10562  		x := v_0
 10563  		y := v_1
 10564  		if !(!shiftIsBounded(v)) {
 10565  			break
 10566  		}
 10567  		v.reset(OpLOONG64MASKEQZ)
 10568  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10569  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10570  		v1.AddArg(x)
 10571  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10572  		v2.AddArg(y)
 10573  		v0.AddArg2(v1, v2)
 10574  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10575  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10576  		v4.AuxInt = int64ToAuxInt(64)
 10577  		v3.AddArg2(v4, v2)
 10578  		v.AddArg2(v0, v3)
 10579  		return true
 10580  	}
 10581  	return false
 10582  }
 10583  func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool {
 10584  	v_1 := v.Args[1]
 10585  	v_0 := v.Args[0]
 10586  	b := v.Block
 10587  	typ := &b.Func.Config.Types
 10588  	// match: (Rsh8Ux64 x y)
 10589  	// cond: shiftIsBounded(v)
 10590  	// result: (SRLV (ZeroExt8to64 x) y)
 10591  	for {
 10592  		x := v_0
 10593  		y := v_1
 10594  		if !(shiftIsBounded(v)) {
 10595  			break
 10596  		}
 10597  		v.reset(OpLOONG64SRLV)
 10598  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10599  		v0.AddArg(x)
 10600  		v.AddArg2(v0, y)
 10601  		return true
 10602  	}
 10603  	// match: (Rsh8Ux64 <t> x y)
 10604  	// cond: !shiftIsBounded(v)
 10605  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10606  	for {
 10607  		t := v.Type
 10608  		x := v_0
 10609  		y := v_1
 10610  		if !(!shiftIsBounded(v)) {
 10611  			break
 10612  		}
 10613  		v.reset(OpLOONG64MASKEQZ)
 10614  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10615  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10616  		v1.AddArg(x)
 10617  		v0.AddArg2(v1, y)
 10618  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10619  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10620  		v3.AuxInt = int64ToAuxInt(64)
 10621  		v2.AddArg2(v3, y)
 10622  		v.AddArg2(v0, v2)
 10623  		return true
 10624  	}
 10625  	return false
 10626  }
 10627  func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool {
 10628  	v_1 := v.Args[1]
 10629  	v_0 := v.Args[0]
 10630  	b := v.Block
 10631  	typ := &b.Func.Config.Types
 10632  	// match: (Rsh8Ux8 x y)
 10633  	// cond: shiftIsBounded(v)
 10634  	// result: (SRLV (ZeroExt8to64 x) y)
 10635  	for {
 10636  		x := v_0
 10637  		y := v_1
 10638  		if !(shiftIsBounded(v)) {
 10639  			break
 10640  		}
 10641  		v.reset(OpLOONG64SRLV)
 10642  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10643  		v0.AddArg(x)
 10644  		v.AddArg2(v0, y)
 10645  		return true
 10646  	}
 10647  	// match: (Rsh8Ux8 <t> x y)
 10648  	// cond: !shiftIsBounded(v)
 10649  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10650  	for {
 10651  		t := v.Type
 10652  		x := v_0
 10653  		y := v_1
 10654  		if !(!shiftIsBounded(v)) {
 10655  			break
 10656  		}
 10657  		v.reset(OpLOONG64MASKEQZ)
 10658  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10659  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10660  		v1.AddArg(x)
 10661  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10662  		v2.AddArg(y)
 10663  		v0.AddArg2(v1, v2)
 10664  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10665  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10666  		v4.AuxInt = int64ToAuxInt(64)
 10667  		v3.AddArg2(v4, v2)
 10668  		v.AddArg2(v0, v3)
 10669  		return true
 10670  	}
 10671  	return false
 10672  }
 10673  func rewriteValueLOONG64_OpRsh8x16(v *Value) bool {
 10674  	v_1 := v.Args[1]
 10675  	v_0 := v.Args[0]
 10676  	b := v.Block
 10677  	typ := &b.Func.Config.Types
 10678  	// match: (Rsh8x16 x y)
 10679  	// cond: shiftIsBounded(v)
 10680  	// result: (SRAV (SignExt8to64 x) y)
 10681  	for {
 10682  		x := v_0
 10683  		y := v_1
 10684  		if !(shiftIsBounded(v)) {
 10685  			break
 10686  		}
 10687  		v.reset(OpLOONG64SRAV)
 10688  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10689  		v0.AddArg(x)
 10690  		v.AddArg2(v0, y)
 10691  		return true
 10692  	}
 10693  	// match: (Rsh8x16 <t> x y)
 10694  	// cond: !shiftIsBounded(v)
 10695  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10696  	for {
 10697  		t := v.Type
 10698  		x := v_0
 10699  		y := v_1
 10700  		if !(!shiftIsBounded(v)) {
 10701  			break
 10702  		}
 10703  		v.reset(OpLOONG64SRAV)
 10704  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10705  		v0.AddArg(x)
 10706  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10707  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10708  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10709  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10710  		v4.AddArg(y)
 10711  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10712  		v5.AuxInt = int64ToAuxInt(63)
 10713  		v3.AddArg2(v4, v5)
 10714  		v2.AddArg(v3)
 10715  		v1.AddArg2(v2, v4)
 10716  		v.AddArg2(v0, v1)
 10717  		return true
 10718  	}
 10719  	return false
 10720  }
 10721  func rewriteValueLOONG64_OpRsh8x32(v *Value) bool {
 10722  	v_1 := v.Args[1]
 10723  	v_0 := v.Args[0]
 10724  	b := v.Block
 10725  	typ := &b.Func.Config.Types
 10726  	// match: (Rsh8x32 x y)
 10727  	// cond: shiftIsBounded(v)
 10728  	// result: (SRAV (SignExt8to64 x) y)
 10729  	for {
 10730  		x := v_0
 10731  		y := v_1
 10732  		if !(shiftIsBounded(v)) {
 10733  			break
 10734  		}
 10735  		v.reset(OpLOONG64SRAV)
 10736  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10737  		v0.AddArg(x)
 10738  		v.AddArg2(v0, y)
 10739  		return true
 10740  	}
 10741  	// match: (Rsh8x32 <t> x y)
 10742  	// cond: !shiftIsBounded(v)
 10743  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10744  	for {
 10745  		t := v.Type
 10746  		x := v_0
 10747  		y := v_1
 10748  		if !(!shiftIsBounded(v)) {
 10749  			break
 10750  		}
 10751  		v.reset(OpLOONG64SRAV)
 10752  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10753  		v0.AddArg(x)
 10754  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10755  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10756  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10757  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10758  		v4.AddArg(y)
 10759  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10760  		v5.AuxInt = int64ToAuxInt(63)
 10761  		v3.AddArg2(v4, v5)
 10762  		v2.AddArg(v3)
 10763  		v1.AddArg2(v2, v4)
 10764  		v.AddArg2(v0, v1)
 10765  		return true
 10766  	}
 10767  	return false
 10768  }
 10769  func rewriteValueLOONG64_OpRsh8x64(v *Value) bool {
 10770  	v_1 := v.Args[1]
 10771  	v_0 := v.Args[0]
 10772  	b := v.Block
 10773  	typ := &b.Func.Config.Types
 10774  	// match: (Rsh8x64 x y)
 10775  	// cond: shiftIsBounded(v)
 10776  	// result: (SRAV (SignExt8to64 x) y)
 10777  	for {
 10778  		x := v_0
 10779  		y := v_1
 10780  		if !(shiftIsBounded(v)) {
 10781  			break
 10782  		}
 10783  		v.reset(OpLOONG64SRAV)
 10784  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10785  		v0.AddArg(x)
 10786  		v.AddArg2(v0, y)
 10787  		return true
 10788  	}
 10789  	// match: (Rsh8x64 <t> x y)
 10790  	// cond: !shiftIsBounded(v)
 10791  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10792  	for {
 10793  		t := v.Type
 10794  		x := v_0
 10795  		y := v_1
 10796  		if !(!shiftIsBounded(v)) {
 10797  			break
 10798  		}
 10799  		v.reset(OpLOONG64SRAV)
 10800  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10801  		v0.AddArg(x)
 10802  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10803  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10804  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10805  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10806  		v4.AuxInt = int64ToAuxInt(63)
 10807  		v3.AddArg2(y, v4)
 10808  		v2.AddArg(v3)
 10809  		v1.AddArg2(v2, y)
 10810  		v.AddArg2(v0, v1)
 10811  		return true
 10812  	}
 10813  	return false
 10814  }
 10815  func rewriteValueLOONG64_OpRsh8x8(v *Value) bool {
 10816  	v_1 := v.Args[1]
 10817  	v_0 := v.Args[0]
 10818  	b := v.Block
 10819  	typ := &b.Func.Config.Types
 10820  	// match: (Rsh8x8 x y)
 10821  	// cond: shiftIsBounded(v)
 10822  	// result: (SRAV (SignExt8to64 x) y)
 10823  	for {
 10824  		x := v_0
 10825  		y := v_1
 10826  		if !(shiftIsBounded(v)) {
 10827  			break
 10828  		}
 10829  		v.reset(OpLOONG64SRAV)
 10830  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10831  		v0.AddArg(x)
 10832  		v.AddArg2(v0, y)
 10833  		return true
 10834  	}
 10835  	// match: (Rsh8x8 <t> x y)
 10836  	// cond: !shiftIsBounded(v)
 10837  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10838  	for {
 10839  		t := v.Type
 10840  		x := v_0
 10841  		y := v_1
 10842  		if !(!shiftIsBounded(v)) {
 10843  			break
 10844  		}
 10845  		v.reset(OpLOONG64SRAV)
 10846  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10847  		v0.AddArg(x)
 10848  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10849  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10850  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10851  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10852  		v4.AddArg(y)
 10853  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10854  		v5.AuxInt = int64ToAuxInt(63)
 10855  		v3.AddArg2(v4, v5)
 10856  		v2.AddArg(v3)
 10857  		v1.AddArg2(v2, v4)
 10858  		v.AddArg2(v0, v1)
 10859  		return true
 10860  	}
 10861  	return false
 10862  }
 10863  func rewriteValueLOONG64_OpSelect0(v *Value) bool {
 10864  	v_0 := v.Args[0]
 10865  	b := v.Block
 10866  	// match: (Select0 (Mul64uhilo x y))
 10867  	// result: (MULHVU x y)
 10868  	for {
 10869  		if v_0.Op != OpMul64uhilo {
 10870  			break
 10871  		}
 10872  		y := v_0.Args[1]
 10873  		x := v_0.Args[0]
 10874  		v.reset(OpLOONG64MULHVU)
 10875  		v.AddArg2(x, y)
 10876  		return true
 10877  	}
 10878  	// match: (Select0 (Mul64uover x y))
 10879  	// result: (MULV x y)
 10880  	for {
 10881  		if v_0.Op != OpMul64uover {
 10882  			break
 10883  		}
 10884  		y := v_0.Args[1]
 10885  		x := v_0.Args[0]
 10886  		v.reset(OpLOONG64MULV)
 10887  		v.AddArg2(x, y)
 10888  		return true
 10889  	}
 10890  	// match: (Select0 <t> (Add64carry x y c))
 10891  	// result: (ADDV (ADDV <t> x y) c)
 10892  	for {
 10893  		t := v.Type
 10894  		if v_0.Op != OpAdd64carry {
 10895  			break
 10896  		}
 10897  		c := v_0.Args[2]
 10898  		x := v_0.Args[0]
 10899  		y := v_0.Args[1]
 10900  		v.reset(OpLOONG64ADDV)
 10901  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10902  		v0.AddArg2(x, y)
 10903  		v.AddArg2(v0, c)
 10904  		return true
 10905  	}
 10906  	// match: (Select0 <t> (Sub64borrow x y c))
 10907  	// result: (SUBV (SUBV <t> x y) c)
 10908  	for {
 10909  		t := v.Type
 10910  		if v_0.Op != OpSub64borrow {
 10911  			break
 10912  		}
 10913  		c := v_0.Args[2]
 10914  		x := v_0.Args[0]
 10915  		y := v_0.Args[1]
 10916  		v.reset(OpLOONG64SUBV)
 10917  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10918  		v0.AddArg2(x, y)
 10919  		v.AddArg2(v0, c)
 10920  		return true
 10921  	}
 10922  	return false
 10923  }
 10924  func rewriteValueLOONG64_OpSelect1(v *Value) bool {
 10925  	v_0 := v.Args[0]
 10926  	b := v.Block
 10927  	typ := &b.Func.Config.Types
 10928  	// match: (Select1 (Mul64uhilo x y))
 10929  	// result: (MULV x y)
 10930  	for {
 10931  		if v_0.Op != OpMul64uhilo {
 10932  			break
 10933  		}
 10934  		y := v_0.Args[1]
 10935  		x := v_0.Args[0]
 10936  		v.reset(OpLOONG64MULV)
 10937  		v.AddArg2(x, y)
 10938  		return true
 10939  	}
 10940  	// match: (Select1 (Mul64uover x y))
 10941  	// result: (SGTU <typ.Bool> (MULHVU x y) (MOVVconst <typ.UInt64> [0]))
 10942  	for {
 10943  		if v_0.Op != OpMul64uover {
 10944  			break
 10945  		}
 10946  		y := v_0.Args[1]
 10947  		x := v_0.Args[0]
 10948  		v.reset(OpLOONG64SGTU)
 10949  		v.Type = typ.Bool
 10950  		v0 := b.NewValue0(v.Pos, OpLOONG64MULHVU, typ.UInt64)
 10951  		v0.AddArg2(x, y)
 10952  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10953  		v1.AuxInt = int64ToAuxInt(0)
 10954  		v.AddArg2(v0, v1)
 10955  		return true
 10956  	}
 10957  	// match: (Select1 <t> (Add64carry x y c))
 10958  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
 10959  	for {
 10960  		t := v.Type
 10961  		if v_0.Op != OpAdd64carry {
 10962  			break
 10963  		}
 10964  		c := v_0.Args[2]
 10965  		x := v_0.Args[0]
 10966  		y := v_0.Args[1]
 10967  		v.reset(OpLOONG64OR)
 10968  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10969  		s := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10970  		s.AddArg2(x, y)
 10971  		v0.AddArg2(x, s)
 10972  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10973  		v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10974  		v3.AddArg2(s, c)
 10975  		v2.AddArg2(s, v3)
 10976  		v.AddArg2(v0, v2)
 10977  		return true
 10978  	}
 10979  	// match: (Select1 <t> (Sub64borrow x y c))
 10980  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
 10981  	for {
 10982  		t := v.Type
 10983  		if v_0.Op != OpSub64borrow {
 10984  			break
 10985  		}
 10986  		c := v_0.Args[2]
 10987  		x := v_0.Args[0]
 10988  		y := v_0.Args[1]
 10989  		v.reset(OpLOONG64OR)
 10990  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10991  		s := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10992  		s.AddArg2(x, y)
 10993  		v0.AddArg2(s, x)
 10994  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10995  		v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10996  		v3.AddArg2(s, c)
 10997  		v2.AddArg2(v3, s)
 10998  		v.AddArg2(v0, v2)
 10999  		return true
 11000  	}
 11001  	return false
 11002  }
 11003  func rewriteValueLOONG64_OpSelectN(v *Value) bool {
 11004  	v_0 := v.Args[0]
 11005  	b := v.Block
 11006  	config := b.Func.Config
 11007  	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVVconst [sz]) mem))
 11008  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
 11009  	// result: (Move [sz] dst src mem)
 11010  	for {
 11011  		if auxIntToInt64(v.AuxInt) != 0 {
 11012  			break
 11013  		}
 11014  		call := v_0
 11015  		if call.Op != OpLOONG64CALLstatic || len(call.Args) != 4 {
 11016  			break
 11017  		}
 11018  		sym := auxToCall(call.Aux)
 11019  		mem := call.Args[3]
 11020  		dst := call.Args[0]
 11021  		src := call.Args[1]
 11022  		call_2 := call.Args[2]
 11023  		if call_2.Op != OpLOONG64MOVVconst {
 11024  			break
 11025  		}
 11026  		sz := auxIntToInt64(call_2.AuxInt)
 11027  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
 11028  			break
 11029  		}
 11030  		v.reset(OpMove)
 11031  		v.AuxInt = int64ToAuxInt(sz)
 11032  		v.AddArg3(dst, src, mem)
 11033  		return true
 11034  	}
 11035  	return false
 11036  }
 11037  func rewriteValueLOONG64_OpSlicemask(v *Value) bool {
 11038  	v_0 := v.Args[0]
 11039  	b := v.Block
 11040  	// match: (Slicemask <t> x)
 11041  	// result: (SRAVconst (NEGV <t> x) [63])
 11042  	for {
 11043  		t := v.Type
 11044  		x := v_0
 11045  		v.reset(OpLOONG64SRAVconst)
 11046  		v.AuxInt = int64ToAuxInt(63)
 11047  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11048  		v0.AddArg(x)
 11049  		v.AddArg(v0)
 11050  		return true
 11051  	}
 11052  }
 11053  func rewriteValueLOONG64_OpStore(v *Value) bool {
 11054  	v_2 := v.Args[2]
 11055  	v_1 := v.Args[1]
 11056  	v_0 := v.Args[0]
 11057  	// match: (Store {t} ptr val mem)
 11058  	// cond: t.Size() == 1
 11059  	// result: (MOVBstore ptr val mem)
 11060  	for {
 11061  		t := auxToType(v.Aux)
 11062  		ptr := v_0
 11063  		val := v_1
 11064  		mem := v_2
 11065  		if !(t.Size() == 1) {
 11066  			break
 11067  		}
 11068  		v.reset(OpLOONG64MOVBstore)
 11069  		v.AddArg3(ptr, val, mem)
 11070  		return true
 11071  	}
 11072  	// match: (Store {t} ptr val mem)
 11073  	// cond: t.Size() == 2
 11074  	// result: (MOVHstore ptr val mem)
 11075  	for {
 11076  		t := auxToType(v.Aux)
 11077  		ptr := v_0
 11078  		val := v_1
 11079  		mem := v_2
 11080  		if !(t.Size() == 2) {
 11081  			break
 11082  		}
 11083  		v.reset(OpLOONG64MOVHstore)
 11084  		v.AddArg3(ptr, val, mem)
 11085  		return true
 11086  	}
 11087  	// match: (Store {t} ptr val mem)
 11088  	// cond: t.Size() == 4 && !t.IsFloat()
 11089  	// result: (MOVWstore ptr val mem)
 11090  	for {
 11091  		t := auxToType(v.Aux)
 11092  		ptr := v_0
 11093  		val := v_1
 11094  		mem := v_2
 11095  		if !(t.Size() == 4 && !t.IsFloat()) {
 11096  			break
 11097  		}
 11098  		v.reset(OpLOONG64MOVWstore)
 11099  		v.AddArg3(ptr, val, mem)
 11100  		return true
 11101  	}
 11102  	// match: (Store {t} ptr val mem)
 11103  	// cond: t.Size() == 8 && !t.IsFloat()
 11104  	// result: (MOVVstore ptr val mem)
 11105  	for {
 11106  		t := auxToType(v.Aux)
 11107  		ptr := v_0
 11108  		val := v_1
 11109  		mem := v_2
 11110  		if !(t.Size() == 8 && !t.IsFloat()) {
 11111  			break
 11112  		}
 11113  		v.reset(OpLOONG64MOVVstore)
 11114  		v.AddArg3(ptr, val, mem)
 11115  		return true
 11116  	}
 11117  	// match: (Store {t} ptr val mem)
 11118  	// cond: t.Size() == 4 && t.IsFloat()
 11119  	// result: (MOVFstore ptr val mem)
 11120  	for {
 11121  		t := auxToType(v.Aux)
 11122  		ptr := v_0
 11123  		val := v_1
 11124  		mem := v_2
 11125  		if !(t.Size() == 4 && t.IsFloat()) {
 11126  			break
 11127  		}
 11128  		v.reset(OpLOONG64MOVFstore)
 11129  		v.AddArg3(ptr, val, mem)
 11130  		return true
 11131  	}
 11132  	// match: (Store {t} ptr val mem)
 11133  	// cond: t.Size() == 8 && t.IsFloat()
 11134  	// result: (MOVDstore ptr val mem)
 11135  	for {
 11136  		t := auxToType(v.Aux)
 11137  		ptr := v_0
 11138  		val := v_1
 11139  		mem := v_2
 11140  		if !(t.Size() == 8 && t.IsFloat()) {
 11141  			break
 11142  		}
 11143  		v.reset(OpLOONG64MOVDstore)
 11144  		v.AddArg3(ptr, val, mem)
 11145  		return true
 11146  	}
 11147  	return false
 11148  }
 11149  func rewriteValueLOONG64_OpZero(v *Value) bool {
 11150  	v_1 := v.Args[1]
 11151  	v_0 := v.Args[0]
 11152  	b := v.Block
 11153  	typ := &b.Func.Config.Types
 11154  	// match: (Zero [0] _ mem)
 11155  	// result: mem
 11156  	for {
 11157  		if auxIntToInt64(v.AuxInt) != 0 {
 11158  			break
 11159  		}
 11160  		mem := v_1
 11161  		v.copyOf(mem)
 11162  		return true
 11163  	}
 11164  	// match: (Zero [1] ptr mem)
 11165  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
 11166  	for {
 11167  		if auxIntToInt64(v.AuxInt) != 1 {
 11168  			break
 11169  		}
 11170  		ptr := v_0
 11171  		mem := v_1
 11172  		v.reset(OpLOONG64MOVBstore)
 11173  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11174  		v0.AuxInt = int64ToAuxInt(0)
 11175  		v.AddArg3(ptr, v0, mem)
 11176  		return true
 11177  	}
 11178  	// match: (Zero [2] ptr mem)
 11179  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
 11180  	for {
 11181  		if auxIntToInt64(v.AuxInt) != 2 {
 11182  			break
 11183  		}
 11184  		ptr := v_0
 11185  		mem := v_1
 11186  		v.reset(OpLOONG64MOVHstore)
 11187  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11188  		v0.AuxInt = int64ToAuxInt(0)
 11189  		v.AddArg3(ptr, v0, mem)
 11190  		return true
 11191  	}
 11192  	// match: (Zero [3] ptr mem)
 11193  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVHstore ptr (MOVVconst [0]) mem))
 11194  	for {
 11195  		if auxIntToInt64(v.AuxInt) != 3 {
 11196  			break
 11197  		}
 11198  		ptr := v_0
 11199  		mem := v_1
 11200  		v.reset(OpLOONG64MOVBstore)
 11201  		v.AuxInt = int32ToAuxInt(2)
 11202  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11203  		v0.AuxInt = int64ToAuxInt(0)
 11204  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
 11205  		v1.AddArg3(ptr, v0, mem)
 11206  		v.AddArg3(ptr, v0, v1)
 11207  		return true
 11208  	}
 11209  	// match: (Zero [4] {t} ptr mem)
 11210  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 11211  	for {
 11212  		if auxIntToInt64(v.AuxInt) != 4 {
 11213  			break
 11214  		}
 11215  		ptr := v_0
 11216  		mem := v_1
 11217  		v.reset(OpLOONG64MOVWstore)
 11218  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11219  		v0.AuxInt = int64ToAuxInt(0)
 11220  		v.AddArg3(ptr, v0, mem)
 11221  		return true
 11222  	}
 11223  	// match: (Zero [5] ptr mem)
 11224  	// result: (MOVBstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11225  	for {
 11226  		if auxIntToInt64(v.AuxInt) != 5 {
 11227  			break
 11228  		}
 11229  		ptr := v_0
 11230  		mem := v_1
 11231  		v.reset(OpLOONG64MOVBstore)
 11232  		v.AuxInt = int32ToAuxInt(4)
 11233  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11234  		v0.AuxInt = int64ToAuxInt(0)
 11235  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11236  		v1.AddArg3(ptr, v0, mem)
 11237  		v.AddArg3(ptr, v0, v1)
 11238  		return true
 11239  	}
 11240  	// match: (Zero [6] ptr mem)
 11241  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11242  	for {
 11243  		if auxIntToInt64(v.AuxInt) != 6 {
 11244  			break
 11245  		}
 11246  		ptr := v_0
 11247  		mem := v_1
 11248  		v.reset(OpLOONG64MOVHstore)
 11249  		v.AuxInt = int32ToAuxInt(4)
 11250  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11251  		v0.AuxInt = int64ToAuxInt(0)
 11252  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11253  		v1.AddArg3(ptr, v0, mem)
 11254  		v.AddArg3(ptr, v0, v1)
 11255  		return true
 11256  	}
 11257  	// match: (Zero [7] ptr mem)
 11258  	// result: (MOVWstore [3] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11259  	for {
 11260  		if auxIntToInt64(v.AuxInt) != 7 {
 11261  			break
 11262  		}
 11263  		ptr := v_0
 11264  		mem := v_1
 11265  		v.reset(OpLOONG64MOVWstore)
 11266  		v.AuxInt = int32ToAuxInt(3)
 11267  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11268  		v0.AuxInt = int64ToAuxInt(0)
 11269  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11270  		v1.AddArg3(ptr, v0, mem)
 11271  		v.AddArg3(ptr, v0, v1)
 11272  		return true
 11273  	}
 11274  	// match: (Zero [8] {t} ptr mem)
 11275  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 11276  	for {
 11277  		if auxIntToInt64(v.AuxInt) != 8 {
 11278  			break
 11279  		}
 11280  		ptr := v_0
 11281  		mem := v_1
 11282  		v.reset(OpLOONG64MOVVstore)
 11283  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11284  		v0.AuxInt = int64ToAuxInt(0)
 11285  		v.AddArg3(ptr, v0, mem)
 11286  		return true
 11287  	}
 11288  	// match: (Zero [9] ptr mem)
 11289  	// result: (MOVBstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11290  	for {
 11291  		if auxIntToInt64(v.AuxInt) != 9 {
 11292  			break
 11293  		}
 11294  		ptr := v_0
 11295  		mem := v_1
 11296  		v.reset(OpLOONG64MOVBstore)
 11297  		v.AuxInt = int32ToAuxInt(8)
 11298  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11299  		v0.AuxInt = int64ToAuxInt(0)
 11300  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11301  		v1.AddArg3(ptr, v0, mem)
 11302  		v.AddArg3(ptr, v0, v1)
 11303  		return true
 11304  	}
 11305  	// match: (Zero [10] ptr mem)
 11306  	// result: (MOVHstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11307  	for {
 11308  		if auxIntToInt64(v.AuxInt) != 10 {
 11309  			break
 11310  		}
 11311  		ptr := v_0
 11312  		mem := v_1
 11313  		v.reset(OpLOONG64MOVHstore)
 11314  		v.AuxInt = int32ToAuxInt(8)
 11315  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11316  		v0.AuxInt = int64ToAuxInt(0)
 11317  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11318  		v1.AddArg3(ptr, v0, mem)
 11319  		v.AddArg3(ptr, v0, v1)
 11320  		return true
 11321  	}
 11322  	// match: (Zero [11] ptr mem)
 11323  	// result: (MOVWstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11324  	for {
 11325  		if auxIntToInt64(v.AuxInt) != 11 {
 11326  			break
 11327  		}
 11328  		ptr := v_0
 11329  		mem := v_1
 11330  		v.reset(OpLOONG64MOVWstore)
 11331  		v.AuxInt = int32ToAuxInt(7)
 11332  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11333  		v0.AuxInt = int64ToAuxInt(0)
 11334  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11335  		v1.AddArg3(ptr, v0, mem)
 11336  		v.AddArg3(ptr, v0, v1)
 11337  		return true
 11338  	}
 11339  	// match: (Zero [12] ptr mem)
 11340  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11341  	for {
 11342  		if auxIntToInt64(v.AuxInt) != 12 {
 11343  			break
 11344  		}
 11345  		ptr := v_0
 11346  		mem := v_1
 11347  		v.reset(OpLOONG64MOVWstore)
 11348  		v.AuxInt = int32ToAuxInt(8)
 11349  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11350  		v0.AuxInt = int64ToAuxInt(0)
 11351  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11352  		v1.AddArg3(ptr, v0, mem)
 11353  		v.AddArg3(ptr, v0, v1)
 11354  		return true
 11355  	}
 11356  	// match: (Zero [13] ptr mem)
 11357  	// result: (MOVVstore [5] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11358  	for {
 11359  		if auxIntToInt64(v.AuxInt) != 13 {
 11360  			break
 11361  		}
 11362  		ptr := v_0
 11363  		mem := v_1
 11364  		v.reset(OpLOONG64MOVVstore)
 11365  		v.AuxInt = int32ToAuxInt(5)
 11366  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11367  		v0.AuxInt = int64ToAuxInt(0)
 11368  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11369  		v1.AddArg3(ptr, v0, mem)
 11370  		v.AddArg3(ptr, v0, v1)
 11371  		return true
 11372  	}
 11373  	// match: (Zero [14] ptr mem)
 11374  	// result: (MOVVstore [6] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11375  	for {
 11376  		if auxIntToInt64(v.AuxInt) != 14 {
 11377  			break
 11378  		}
 11379  		ptr := v_0
 11380  		mem := v_1
 11381  		v.reset(OpLOONG64MOVVstore)
 11382  		v.AuxInt = int32ToAuxInt(6)
 11383  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11384  		v0.AuxInt = int64ToAuxInt(0)
 11385  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11386  		v1.AddArg3(ptr, v0, mem)
 11387  		v.AddArg3(ptr, v0, v1)
 11388  		return true
 11389  	}
 11390  	// match: (Zero [15] ptr mem)
 11391  	// result: (MOVVstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11392  	for {
 11393  		if auxIntToInt64(v.AuxInt) != 15 {
 11394  			break
 11395  		}
 11396  		ptr := v_0
 11397  		mem := v_1
 11398  		v.reset(OpLOONG64MOVVstore)
 11399  		v.AuxInt = int32ToAuxInt(7)
 11400  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11401  		v0.AuxInt = int64ToAuxInt(0)
 11402  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11403  		v1.AddArg3(ptr, v0, mem)
 11404  		v.AddArg3(ptr, v0, v1)
 11405  		return true
 11406  	}
 11407  	// match: (Zero [16] ptr mem)
 11408  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11409  	for {
 11410  		if auxIntToInt64(v.AuxInt) != 16 {
 11411  			break
 11412  		}
 11413  		ptr := v_0
 11414  		mem := v_1
 11415  		v.reset(OpLOONG64MOVVstore)
 11416  		v.AuxInt = int32ToAuxInt(8)
 11417  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11418  		v0.AuxInt = int64ToAuxInt(0)
 11419  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11420  		v1.AddArg3(ptr, v0, mem)
 11421  		v.AddArg3(ptr, v0, v1)
 11422  		return true
 11423  	}
 11424  	// match: (Zero [s] ptr mem)
 11425  	// cond: s%8 != 0 && s > 16
 11426  	// result: (Zero [s%8] (OffPtr <ptr.Type> ptr [s-s%8]) (Zero [s-s%8] ptr mem))
 11427  	for {
 11428  		s := auxIntToInt64(v.AuxInt)
 11429  		ptr := v_0
 11430  		mem := v_1
 11431  		if !(s%8 != 0 && s > 16) {
 11432  			break
 11433  		}
 11434  		v.reset(OpZero)
 11435  		v.AuxInt = int64ToAuxInt(s % 8)
 11436  		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
 11437  		v0.AuxInt = int64ToAuxInt(s - s%8)
 11438  		v0.AddArg(ptr)
 11439  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 11440  		v1.AuxInt = int64ToAuxInt(s - s%8)
 11441  		v1.AddArg2(ptr, mem)
 11442  		v.AddArg2(v0, v1)
 11443  		return true
 11444  	}
 11445  	// match: (Zero [s] ptr mem)
 11446  	// cond: s%8 == 0 && s > 16 && s <= 8*128
 11447  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
 11448  	for {
 11449  		s := auxIntToInt64(v.AuxInt)
 11450  		ptr := v_0
 11451  		mem := v_1
 11452  		if !(s%8 == 0 && s > 16 && s <= 8*128) {
 11453  			break
 11454  		}
 11455  		v.reset(OpLOONG64DUFFZERO)
 11456  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
 11457  		v.AddArg2(ptr, mem)
 11458  		return true
 11459  	}
 11460  	// match: (Zero [s] ptr mem)
 11461  	// cond: s%8 == 0 && s > 8*128
 11462  	// result: (LoweredZero ptr (ADDVconst <ptr.Type> ptr [s-8]) mem)
 11463  	for {
 11464  		s := auxIntToInt64(v.AuxInt)
 11465  		ptr := v_0
 11466  		mem := v_1
 11467  		if !(s%8 == 0 && s > 8*128) {
 11468  			break
 11469  		}
 11470  		v.reset(OpLOONG64LoweredZero)
 11471  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, ptr.Type)
 11472  		v0.AuxInt = int64ToAuxInt(s - 8)
 11473  		v0.AddArg(ptr)
 11474  		v.AddArg3(ptr, v0, mem)
 11475  		return true
 11476  	}
 11477  	return false
 11478  }
 11479  func rewriteBlockLOONG64(b *Block) bool {
 11480  	typ := &b.Func.Config.Types
 11481  	switch b.Kind {
 11482  	case BlockLOONG64EQ:
 11483  		// match: (EQ (FPFlagTrue cmp) yes no)
 11484  		// result: (FPF cmp yes no)
 11485  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 11486  			v_0 := b.Controls[0]
 11487  			cmp := v_0.Args[0]
 11488  			b.resetWithControl(BlockLOONG64FPF, cmp)
 11489  			return true
 11490  		}
 11491  		// match: (EQ (FPFlagFalse cmp) yes no)
 11492  		// result: (FPT cmp yes no)
 11493  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 11494  			v_0 := b.Controls[0]
 11495  			cmp := v_0.Args[0]
 11496  			b.resetWithControl(BlockLOONG64FPT, cmp)
 11497  			return true
 11498  		}
 11499  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
 11500  		// result: (NE cmp yes no)
 11501  		for b.Controls[0].Op == OpLOONG64XORconst {
 11502  			v_0 := b.Controls[0]
 11503  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11504  				break
 11505  			}
 11506  			cmp := v_0.Args[0]
 11507  			if cmp.Op != OpLOONG64SGT {
 11508  				break
 11509  			}
 11510  			b.resetWithControl(BlockLOONG64NE, cmp)
 11511  			return true
 11512  		}
 11513  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
 11514  		// result: (NE cmp yes no)
 11515  		for b.Controls[0].Op == OpLOONG64XORconst {
 11516  			v_0 := b.Controls[0]
 11517  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11518  				break
 11519  			}
 11520  			cmp := v_0.Args[0]
 11521  			if cmp.Op != OpLOONG64SGTU {
 11522  				break
 11523  			}
 11524  			b.resetWithControl(BlockLOONG64NE, cmp)
 11525  			return true
 11526  		}
 11527  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
 11528  		// result: (NE cmp yes no)
 11529  		for b.Controls[0].Op == OpLOONG64XORconst {
 11530  			v_0 := b.Controls[0]
 11531  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11532  				break
 11533  			}
 11534  			cmp := v_0.Args[0]
 11535  			if cmp.Op != OpLOONG64SGTconst {
 11536  				break
 11537  			}
 11538  			b.resetWithControl(BlockLOONG64NE, cmp)
 11539  			return true
 11540  		}
 11541  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
 11542  		// result: (NE cmp yes no)
 11543  		for b.Controls[0].Op == OpLOONG64XORconst {
 11544  			v_0 := b.Controls[0]
 11545  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11546  				break
 11547  			}
 11548  			cmp := v_0.Args[0]
 11549  			if cmp.Op != OpLOONG64SGTUconst {
 11550  				break
 11551  			}
 11552  			b.resetWithControl(BlockLOONG64NE, cmp)
 11553  			return true
 11554  		}
 11555  		// match: (EQ (SGTUconst [1] x) yes no)
 11556  		// result: (NE x yes no)
 11557  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 11558  			v_0 := b.Controls[0]
 11559  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11560  				break
 11561  			}
 11562  			x := v_0.Args[0]
 11563  			b.resetWithControl(BlockLOONG64NE, x)
 11564  			return true
 11565  		}
 11566  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
 11567  		// result: (EQ x yes no)
 11568  		for b.Controls[0].Op == OpLOONG64SGTU {
 11569  			v_0 := b.Controls[0]
 11570  			_ = v_0.Args[1]
 11571  			x := v_0.Args[0]
 11572  			v_0_1 := v_0.Args[1]
 11573  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11574  				break
 11575  			}
 11576  			b.resetWithControl(BlockLOONG64EQ, x)
 11577  			return true
 11578  		}
 11579  		// match: (EQ (SGTconst [0] x) yes no)
 11580  		// result: (GEZ x yes no)
 11581  		for b.Controls[0].Op == OpLOONG64SGTconst {
 11582  			v_0 := b.Controls[0]
 11583  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11584  				break
 11585  			}
 11586  			x := v_0.Args[0]
 11587  			b.resetWithControl(BlockLOONG64GEZ, x)
 11588  			return true
 11589  		}
 11590  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
 11591  		// result: (LEZ x yes no)
 11592  		for b.Controls[0].Op == OpLOONG64SGT {
 11593  			v_0 := b.Controls[0]
 11594  			_ = v_0.Args[1]
 11595  			x := v_0.Args[0]
 11596  			v_0_1 := v_0.Args[1]
 11597  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11598  				break
 11599  			}
 11600  			b.resetWithControl(BlockLOONG64LEZ, x)
 11601  			return true
 11602  		}
 11603  		// match: (EQ (SGTU (MOVVconst [c]) y) yes no)
 11604  		// cond: c >= -2048 && c <= 2047
 11605  		// result: (EQ (SGTUconst [c] y) yes no)
 11606  		for b.Controls[0].Op == OpLOONG64SGTU {
 11607  			v_0 := b.Controls[0]
 11608  			y := v_0.Args[1]
 11609  			v_0_0 := v_0.Args[0]
 11610  			if v_0_0.Op != OpLOONG64MOVVconst {
 11611  				break
 11612  			}
 11613  			c := auxIntToInt64(v_0_0.AuxInt)
 11614  			if !(c >= -2048 && c <= 2047) {
 11615  				break
 11616  			}
 11617  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 11618  			v0.AuxInt = int64ToAuxInt(c)
 11619  			v0.AddArg(y)
 11620  			b.resetWithControl(BlockLOONG64EQ, v0)
 11621  			return true
 11622  		}
 11623  		// match: (EQ (SUBV x y) yes no)
 11624  		// result: (BEQ x y yes no)
 11625  		for b.Controls[0].Op == OpLOONG64SUBV {
 11626  			v_0 := b.Controls[0]
 11627  			y := v_0.Args[1]
 11628  			x := v_0.Args[0]
 11629  			b.resetWithControl2(BlockLOONG64BEQ, x, y)
 11630  			return true
 11631  		}
 11632  		// match: (EQ (SGT x y) yes no)
 11633  		// result: (BGE y x yes no)
 11634  		for b.Controls[0].Op == OpLOONG64SGT {
 11635  			v_0 := b.Controls[0]
 11636  			y := v_0.Args[1]
 11637  			x := v_0.Args[0]
 11638  			b.resetWithControl2(BlockLOONG64BGE, y, x)
 11639  			return true
 11640  		}
 11641  		// match: (EQ (SGTU x y) yes no)
 11642  		// result: (BGEU y x yes no)
 11643  		for b.Controls[0].Op == OpLOONG64SGTU {
 11644  			v_0 := b.Controls[0]
 11645  			y := v_0.Args[1]
 11646  			x := v_0.Args[0]
 11647  			b.resetWithControl2(BlockLOONG64BGEU, y, x)
 11648  			return true
 11649  		}
 11650  		// match: (EQ (MOVVconst [0]) yes no)
 11651  		// result: (First yes no)
 11652  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11653  			v_0 := b.Controls[0]
 11654  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11655  				break
 11656  			}
 11657  			b.Reset(BlockFirst)
 11658  			return true
 11659  		}
 11660  		// match: (EQ (MOVVconst [c]) yes no)
 11661  		// cond: c != 0
 11662  		// result: (First no yes)
 11663  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11664  			v_0 := b.Controls[0]
 11665  			c := auxIntToInt64(v_0.AuxInt)
 11666  			if !(c != 0) {
 11667  				break
 11668  			}
 11669  			b.Reset(BlockFirst)
 11670  			b.swapSuccessors()
 11671  			return true
 11672  		}
 11673  	case BlockLOONG64GEZ:
 11674  		// match: (GEZ (MOVVconst [c]) yes no)
 11675  		// cond: c >= 0
 11676  		// result: (First yes no)
 11677  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11678  			v_0 := b.Controls[0]
 11679  			c := auxIntToInt64(v_0.AuxInt)
 11680  			if !(c >= 0) {
 11681  				break
 11682  			}
 11683  			b.Reset(BlockFirst)
 11684  			return true
 11685  		}
 11686  		// match: (GEZ (MOVVconst [c]) yes no)
 11687  		// cond: c < 0
 11688  		// result: (First no yes)
 11689  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11690  			v_0 := b.Controls[0]
 11691  			c := auxIntToInt64(v_0.AuxInt)
 11692  			if !(c < 0) {
 11693  				break
 11694  			}
 11695  			b.Reset(BlockFirst)
 11696  			b.swapSuccessors()
 11697  			return true
 11698  		}
 11699  	case BlockLOONG64GTZ:
 11700  		// match: (GTZ (MOVVconst [c]) yes no)
 11701  		// cond: c > 0
 11702  		// result: (First yes no)
 11703  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11704  			v_0 := b.Controls[0]
 11705  			c := auxIntToInt64(v_0.AuxInt)
 11706  			if !(c > 0) {
 11707  				break
 11708  			}
 11709  			b.Reset(BlockFirst)
 11710  			return true
 11711  		}
 11712  		// match: (GTZ (MOVVconst [c]) yes no)
 11713  		// cond: c <= 0
 11714  		// result: (First no yes)
 11715  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11716  			v_0 := b.Controls[0]
 11717  			c := auxIntToInt64(v_0.AuxInt)
 11718  			if !(c <= 0) {
 11719  				break
 11720  			}
 11721  			b.Reset(BlockFirst)
 11722  			b.swapSuccessors()
 11723  			return true
 11724  		}
 11725  	case BlockIf:
 11726  		// match: (If cond yes no)
 11727  		// result: (NE (MOVBUreg <typ.UInt64> cond) yes no)
 11728  		for {
 11729  			cond := b.Controls[0]
 11730  			v0 := b.NewValue0(cond.Pos, OpLOONG64MOVBUreg, typ.UInt64)
 11731  			v0.AddArg(cond)
 11732  			b.resetWithControl(BlockLOONG64NE, v0)
 11733  			return true
 11734  		}
 11735  	case BlockLOONG64LEZ:
 11736  		// match: (LEZ (MOVVconst [c]) yes no)
 11737  		// cond: c <= 0
 11738  		// result: (First yes no)
 11739  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11740  			v_0 := b.Controls[0]
 11741  			c := auxIntToInt64(v_0.AuxInt)
 11742  			if !(c <= 0) {
 11743  				break
 11744  			}
 11745  			b.Reset(BlockFirst)
 11746  			return true
 11747  		}
 11748  		// match: (LEZ (MOVVconst [c]) yes no)
 11749  		// cond: c > 0
 11750  		// result: (First no yes)
 11751  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11752  			v_0 := b.Controls[0]
 11753  			c := auxIntToInt64(v_0.AuxInt)
 11754  			if !(c > 0) {
 11755  				break
 11756  			}
 11757  			b.Reset(BlockFirst)
 11758  			b.swapSuccessors()
 11759  			return true
 11760  		}
 11761  	case BlockLOONG64LTZ:
 11762  		// match: (LTZ (MOVVconst [c]) yes no)
 11763  		// cond: c < 0
 11764  		// result: (First yes no)
 11765  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11766  			v_0 := b.Controls[0]
 11767  			c := auxIntToInt64(v_0.AuxInt)
 11768  			if !(c < 0) {
 11769  				break
 11770  			}
 11771  			b.Reset(BlockFirst)
 11772  			return true
 11773  		}
 11774  		// match: (LTZ (MOVVconst [c]) yes no)
 11775  		// cond: c >= 0
 11776  		// result: (First no yes)
 11777  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11778  			v_0 := b.Controls[0]
 11779  			c := auxIntToInt64(v_0.AuxInt)
 11780  			if !(c >= 0) {
 11781  				break
 11782  			}
 11783  			b.Reset(BlockFirst)
 11784  			b.swapSuccessors()
 11785  			return true
 11786  		}
 11787  	case BlockLOONG64NE:
 11788  		// match: (NE (FPFlagTrue cmp) yes no)
 11789  		// result: (FPT cmp yes no)
 11790  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 11791  			v_0 := b.Controls[0]
 11792  			cmp := v_0.Args[0]
 11793  			b.resetWithControl(BlockLOONG64FPT, cmp)
 11794  			return true
 11795  		}
 11796  		// match: (NE (FPFlagFalse cmp) yes no)
 11797  		// result: (FPF cmp yes no)
 11798  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 11799  			v_0 := b.Controls[0]
 11800  			cmp := v_0.Args[0]
 11801  			b.resetWithControl(BlockLOONG64FPF, cmp)
 11802  			return true
 11803  		}
 11804  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 11805  		// result: (EQ cmp yes no)
 11806  		for b.Controls[0].Op == OpLOONG64XORconst {
 11807  			v_0 := b.Controls[0]
 11808  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11809  				break
 11810  			}
 11811  			cmp := v_0.Args[0]
 11812  			if cmp.Op != OpLOONG64SGT {
 11813  				break
 11814  			}
 11815  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11816  			return true
 11817  		}
 11818  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 11819  		// result: (EQ cmp yes no)
 11820  		for b.Controls[0].Op == OpLOONG64XORconst {
 11821  			v_0 := b.Controls[0]
 11822  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11823  				break
 11824  			}
 11825  			cmp := v_0.Args[0]
 11826  			if cmp.Op != OpLOONG64SGTU {
 11827  				break
 11828  			}
 11829  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11830  			return true
 11831  		}
 11832  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 11833  		// result: (EQ cmp yes no)
 11834  		for b.Controls[0].Op == OpLOONG64XORconst {
 11835  			v_0 := b.Controls[0]
 11836  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11837  				break
 11838  			}
 11839  			cmp := v_0.Args[0]
 11840  			if cmp.Op != OpLOONG64SGTconst {
 11841  				break
 11842  			}
 11843  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11844  			return true
 11845  		}
 11846  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 11847  		// result: (EQ cmp yes no)
 11848  		for b.Controls[0].Op == OpLOONG64XORconst {
 11849  			v_0 := b.Controls[0]
 11850  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11851  				break
 11852  			}
 11853  			cmp := v_0.Args[0]
 11854  			if cmp.Op != OpLOONG64SGTUconst {
 11855  				break
 11856  			}
 11857  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11858  			return true
 11859  		}
 11860  		// match: (NE (SGTUconst [1] x) yes no)
 11861  		// result: (EQ x yes no)
 11862  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 11863  			v_0 := b.Controls[0]
 11864  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11865  				break
 11866  			}
 11867  			x := v_0.Args[0]
 11868  			b.resetWithControl(BlockLOONG64EQ, x)
 11869  			return true
 11870  		}
 11871  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 11872  		// result: (NE x yes no)
 11873  		for b.Controls[0].Op == OpLOONG64SGTU {
 11874  			v_0 := b.Controls[0]
 11875  			_ = v_0.Args[1]
 11876  			x := v_0.Args[0]
 11877  			v_0_1 := v_0.Args[1]
 11878  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11879  				break
 11880  			}
 11881  			b.resetWithControl(BlockLOONG64NE, x)
 11882  			return true
 11883  		}
 11884  		// match: (NE (SGTconst [0] x) yes no)
 11885  		// result: (LTZ x yes no)
 11886  		for b.Controls[0].Op == OpLOONG64SGTconst {
 11887  			v_0 := b.Controls[0]
 11888  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11889  				break
 11890  			}
 11891  			x := v_0.Args[0]
 11892  			b.resetWithControl(BlockLOONG64LTZ, x)
 11893  			return true
 11894  		}
 11895  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 11896  		// result: (GTZ x yes no)
 11897  		for b.Controls[0].Op == OpLOONG64SGT {
 11898  			v_0 := b.Controls[0]
 11899  			_ = v_0.Args[1]
 11900  			x := v_0.Args[0]
 11901  			v_0_1 := v_0.Args[1]
 11902  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11903  				break
 11904  			}
 11905  			b.resetWithControl(BlockLOONG64GTZ, x)
 11906  			return true
 11907  		}
 11908  		// match: (NE (SGTU (MOVVconst [c]) y) yes no)
 11909  		// cond: c >= -2048 && c <= 2047
 11910  		// result: (NE (SGTUconst [c] y) yes no)
 11911  		for b.Controls[0].Op == OpLOONG64SGTU {
 11912  			v_0 := b.Controls[0]
 11913  			y := v_0.Args[1]
 11914  			v_0_0 := v_0.Args[0]
 11915  			if v_0_0.Op != OpLOONG64MOVVconst {
 11916  				break
 11917  			}
 11918  			c := auxIntToInt64(v_0_0.AuxInt)
 11919  			if !(c >= -2048 && c <= 2047) {
 11920  				break
 11921  			}
 11922  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 11923  			v0.AuxInt = int64ToAuxInt(c)
 11924  			v0.AddArg(y)
 11925  			b.resetWithControl(BlockLOONG64NE, v0)
 11926  			return true
 11927  		}
 11928  		// match: (NE (SUBV x y) yes no)
 11929  		// result: (BNE x y yes no)
 11930  		for b.Controls[0].Op == OpLOONG64SUBV {
 11931  			v_0 := b.Controls[0]
 11932  			y := v_0.Args[1]
 11933  			x := v_0.Args[0]
 11934  			b.resetWithControl2(BlockLOONG64BNE, x, y)
 11935  			return true
 11936  		}
 11937  		// match: (NE (SGT x y) yes no)
 11938  		// result: (BLT y x yes no)
 11939  		for b.Controls[0].Op == OpLOONG64SGT {
 11940  			v_0 := b.Controls[0]
 11941  			y := v_0.Args[1]
 11942  			x := v_0.Args[0]
 11943  			b.resetWithControl2(BlockLOONG64BLT, y, x)
 11944  			return true
 11945  		}
 11946  		// match: (NE (SGTU x y) yes no)
 11947  		// result: (BLTU y x yes no)
 11948  		for b.Controls[0].Op == OpLOONG64SGTU {
 11949  			v_0 := b.Controls[0]
 11950  			y := v_0.Args[1]
 11951  			x := v_0.Args[0]
 11952  			b.resetWithControl2(BlockLOONG64BLTU, y, x)
 11953  			return true
 11954  		}
 11955  		// match: (NE (MOVVconst [0]) yes no)
 11956  		// result: (First no yes)
 11957  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11958  			v_0 := b.Controls[0]
 11959  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11960  				break
 11961  			}
 11962  			b.Reset(BlockFirst)
 11963  			b.swapSuccessors()
 11964  			return true
 11965  		}
 11966  		// match: (NE (MOVVconst [c]) yes no)
 11967  		// cond: c != 0
 11968  		// result: (First yes no)
 11969  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11970  			v_0 := b.Controls[0]
 11971  			c := auxIntToInt64(v_0.AuxInt)
 11972  			if !(c != 0) {
 11973  				break
 11974  			}
 11975  			b.Reset(BlockFirst)
 11976  			return true
 11977  		}
 11978  	}
 11979  	return false
 11980  }
 11981  

View as plain text