Source file src/cmd/compile/internal/ssa/rewriteMIPS64.go

     1  // Code generated from _gen/MIPS64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueMIPS64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpMIPS64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpMIPS64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpMIPS64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpMIPS64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpMIPS64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpMIPS64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpMIPS64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpMIPS64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueMIPS64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpMIPS64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpMIPS64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpMIPS64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpMIPS64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpMIPS64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpMIPS64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpMIPS64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpMIPS64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd8:
    60  		return rewriteValueMIPS64_OpAtomicAnd8(v)
    61  	case OpAtomicCompareAndSwap32:
    62  		return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
    63  	case OpAtomicCompareAndSwap64:
    64  		v.Op = OpMIPS64LoweredAtomicCas64
    65  		return true
    66  	case OpAtomicExchange32:
    67  		v.Op = OpMIPS64LoweredAtomicExchange32
    68  		return true
    69  	case OpAtomicExchange64:
    70  		v.Op = OpMIPS64LoweredAtomicExchange64
    71  		return true
    72  	case OpAtomicLoad32:
    73  		v.Op = OpMIPS64LoweredAtomicLoad32
    74  		return true
    75  	case OpAtomicLoad64:
    76  		v.Op = OpMIPS64LoweredAtomicLoad64
    77  		return true
    78  	case OpAtomicLoad8:
    79  		v.Op = OpMIPS64LoweredAtomicLoad8
    80  		return true
    81  	case OpAtomicLoadPtr:
    82  		v.Op = OpMIPS64LoweredAtomicLoad64
    83  		return true
    84  	case OpAtomicOr32:
    85  		v.Op = OpMIPS64LoweredAtomicOr32
    86  		return true
    87  	case OpAtomicOr8:
    88  		return rewriteValueMIPS64_OpAtomicOr8(v)
    89  	case OpAtomicStore32:
    90  		v.Op = OpMIPS64LoweredAtomicStore32
    91  		return true
    92  	case OpAtomicStore64:
    93  		v.Op = OpMIPS64LoweredAtomicStore64
    94  		return true
    95  	case OpAtomicStore8:
    96  		v.Op = OpMIPS64LoweredAtomicStore8
    97  		return true
    98  	case OpAtomicStorePtrNoWB:
    99  		v.Op = OpMIPS64LoweredAtomicStore64
   100  		return true
   101  	case OpAvg64u:
   102  		return rewriteValueMIPS64_OpAvg64u(v)
   103  	case OpClosureCall:
   104  		v.Op = OpMIPS64CALLclosure
   105  		return true
   106  	case OpCom16:
   107  		return rewriteValueMIPS64_OpCom16(v)
   108  	case OpCom32:
   109  		return rewriteValueMIPS64_OpCom32(v)
   110  	case OpCom64:
   111  		return rewriteValueMIPS64_OpCom64(v)
   112  	case OpCom8:
   113  		return rewriteValueMIPS64_OpCom8(v)
   114  	case OpConst16:
   115  		return rewriteValueMIPS64_OpConst16(v)
   116  	case OpConst32:
   117  		return rewriteValueMIPS64_OpConst32(v)
   118  	case OpConst32F:
   119  		return rewriteValueMIPS64_OpConst32F(v)
   120  	case OpConst64:
   121  		return rewriteValueMIPS64_OpConst64(v)
   122  	case OpConst64F:
   123  		return rewriteValueMIPS64_OpConst64F(v)
   124  	case OpConst8:
   125  		return rewriteValueMIPS64_OpConst8(v)
   126  	case OpConstBool:
   127  		return rewriteValueMIPS64_OpConstBool(v)
   128  	case OpConstNil:
   129  		return rewriteValueMIPS64_OpConstNil(v)
   130  	case OpCvt32Fto32:
   131  		v.Op = OpMIPS64TRUNCFW
   132  		return true
   133  	case OpCvt32Fto64:
   134  		v.Op = OpMIPS64TRUNCFV
   135  		return true
   136  	case OpCvt32Fto64F:
   137  		v.Op = OpMIPS64MOVFD
   138  		return true
   139  	case OpCvt32to32F:
   140  		v.Op = OpMIPS64MOVWF
   141  		return true
   142  	case OpCvt32to64F:
   143  		v.Op = OpMIPS64MOVWD
   144  		return true
   145  	case OpCvt64Fto32:
   146  		v.Op = OpMIPS64TRUNCDW
   147  		return true
   148  	case OpCvt64Fto32F:
   149  		v.Op = OpMIPS64MOVDF
   150  		return true
   151  	case OpCvt64Fto64:
   152  		v.Op = OpMIPS64TRUNCDV
   153  		return true
   154  	case OpCvt64to32F:
   155  		v.Op = OpMIPS64MOVVF
   156  		return true
   157  	case OpCvt64to64F:
   158  		v.Op = OpMIPS64MOVVD
   159  		return true
   160  	case OpCvtBoolToUint8:
   161  		v.Op = OpCopy
   162  		return true
   163  	case OpDiv16:
   164  		return rewriteValueMIPS64_OpDiv16(v)
   165  	case OpDiv16u:
   166  		return rewriteValueMIPS64_OpDiv16u(v)
   167  	case OpDiv32:
   168  		return rewriteValueMIPS64_OpDiv32(v)
   169  	case OpDiv32F:
   170  		v.Op = OpMIPS64DIVF
   171  		return true
   172  	case OpDiv32u:
   173  		return rewriteValueMIPS64_OpDiv32u(v)
   174  	case OpDiv64:
   175  		return rewriteValueMIPS64_OpDiv64(v)
   176  	case OpDiv64F:
   177  		v.Op = OpMIPS64DIVD
   178  		return true
   179  	case OpDiv64u:
   180  		return rewriteValueMIPS64_OpDiv64u(v)
   181  	case OpDiv8:
   182  		return rewriteValueMIPS64_OpDiv8(v)
   183  	case OpDiv8u:
   184  		return rewriteValueMIPS64_OpDiv8u(v)
   185  	case OpEq16:
   186  		return rewriteValueMIPS64_OpEq16(v)
   187  	case OpEq32:
   188  		return rewriteValueMIPS64_OpEq32(v)
   189  	case OpEq32F:
   190  		return rewriteValueMIPS64_OpEq32F(v)
   191  	case OpEq64:
   192  		return rewriteValueMIPS64_OpEq64(v)
   193  	case OpEq64F:
   194  		return rewriteValueMIPS64_OpEq64F(v)
   195  	case OpEq8:
   196  		return rewriteValueMIPS64_OpEq8(v)
   197  	case OpEqB:
   198  		return rewriteValueMIPS64_OpEqB(v)
   199  	case OpEqPtr:
   200  		return rewriteValueMIPS64_OpEqPtr(v)
   201  	case OpGetCallerPC:
   202  		v.Op = OpMIPS64LoweredGetCallerPC
   203  		return true
   204  	case OpGetCallerSP:
   205  		v.Op = OpMIPS64LoweredGetCallerSP
   206  		return true
   207  	case OpGetClosurePtr:
   208  		v.Op = OpMIPS64LoweredGetClosurePtr
   209  		return true
   210  	case OpHmul32:
   211  		return rewriteValueMIPS64_OpHmul32(v)
   212  	case OpHmul32u:
   213  		return rewriteValueMIPS64_OpHmul32u(v)
   214  	case OpHmul64:
   215  		return rewriteValueMIPS64_OpHmul64(v)
   216  	case OpHmul64u:
   217  		return rewriteValueMIPS64_OpHmul64u(v)
   218  	case OpInterCall:
   219  		v.Op = OpMIPS64CALLinter
   220  		return true
   221  	case OpIsInBounds:
   222  		return rewriteValueMIPS64_OpIsInBounds(v)
   223  	case OpIsNonNil:
   224  		return rewriteValueMIPS64_OpIsNonNil(v)
   225  	case OpIsSliceInBounds:
   226  		return rewriteValueMIPS64_OpIsSliceInBounds(v)
   227  	case OpLeq16:
   228  		return rewriteValueMIPS64_OpLeq16(v)
   229  	case OpLeq16U:
   230  		return rewriteValueMIPS64_OpLeq16U(v)
   231  	case OpLeq32:
   232  		return rewriteValueMIPS64_OpLeq32(v)
   233  	case OpLeq32F:
   234  		return rewriteValueMIPS64_OpLeq32F(v)
   235  	case OpLeq32U:
   236  		return rewriteValueMIPS64_OpLeq32U(v)
   237  	case OpLeq64:
   238  		return rewriteValueMIPS64_OpLeq64(v)
   239  	case OpLeq64F:
   240  		return rewriteValueMIPS64_OpLeq64F(v)
   241  	case OpLeq64U:
   242  		return rewriteValueMIPS64_OpLeq64U(v)
   243  	case OpLeq8:
   244  		return rewriteValueMIPS64_OpLeq8(v)
   245  	case OpLeq8U:
   246  		return rewriteValueMIPS64_OpLeq8U(v)
   247  	case OpLess16:
   248  		return rewriteValueMIPS64_OpLess16(v)
   249  	case OpLess16U:
   250  		return rewriteValueMIPS64_OpLess16U(v)
   251  	case OpLess32:
   252  		return rewriteValueMIPS64_OpLess32(v)
   253  	case OpLess32F:
   254  		return rewriteValueMIPS64_OpLess32F(v)
   255  	case OpLess32U:
   256  		return rewriteValueMIPS64_OpLess32U(v)
   257  	case OpLess64:
   258  		return rewriteValueMIPS64_OpLess64(v)
   259  	case OpLess64F:
   260  		return rewriteValueMIPS64_OpLess64F(v)
   261  	case OpLess64U:
   262  		return rewriteValueMIPS64_OpLess64U(v)
   263  	case OpLess8:
   264  		return rewriteValueMIPS64_OpLess8(v)
   265  	case OpLess8U:
   266  		return rewriteValueMIPS64_OpLess8U(v)
   267  	case OpLoad:
   268  		return rewriteValueMIPS64_OpLoad(v)
   269  	case OpLocalAddr:
   270  		return rewriteValueMIPS64_OpLocalAddr(v)
   271  	case OpLsh16x16:
   272  		return rewriteValueMIPS64_OpLsh16x16(v)
   273  	case OpLsh16x32:
   274  		return rewriteValueMIPS64_OpLsh16x32(v)
   275  	case OpLsh16x64:
   276  		return rewriteValueMIPS64_OpLsh16x64(v)
   277  	case OpLsh16x8:
   278  		return rewriteValueMIPS64_OpLsh16x8(v)
   279  	case OpLsh32x16:
   280  		return rewriteValueMIPS64_OpLsh32x16(v)
   281  	case OpLsh32x32:
   282  		return rewriteValueMIPS64_OpLsh32x32(v)
   283  	case OpLsh32x64:
   284  		return rewriteValueMIPS64_OpLsh32x64(v)
   285  	case OpLsh32x8:
   286  		return rewriteValueMIPS64_OpLsh32x8(v)
   287  	case OpLsh64x16:
   288  		return rewriteValueMIPS64_OpLsh64x16(v)
   289  	case OpLsh64x32:
   290  		return rewriteValueMIPS64_OpLsh64x32(v)
   291  	case OpLsh64x64:
   292  		return rewriteValueMIPS64_OpLsh64x64(v)
   293  	case OpLsh64x8:
   294  		return rewriteValueMIPS64_OpLsh64x8(v)
   295  	case OpLsh8x16:
   296  		return rewriteValueMIPS64_OpLsh8x16(v)
   297  	case OpLsh8x32:
   298  		return rewriteValueMIPS64_OpLsh8x32(v)
   299  	case OpLsh8x64:
   300  		return rewriteValueMIPS64_OpLsh8x64(v)
   301  	case OpLsh8x8:
   302  		return rewriteValueMIPS64_OpLsh8x8(v)
   303  	case OpMIPS64ADDV:
   304  		return rewriteValueMIPS64_OpMIPS64ADDV(v)
   305  	case OpMIPS64ADDVconst:
   306  		return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
   307  	case OpMIPS64AND:
   308  		return rewriteValueMIPS64_OpMIPS64AND(v)
   309  	case OpMIPS64ANDconst:
   310  		return rewriteValueMIPS64_OpMIPS64ANDconst(v)
   311  	case OpMIPS64LoweredAtomicAdd32:
   312  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
   313  	case OpMIPS64LoweredAtomicAdd64:
   314  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
   315  	case OpMIPS64LoweredAtomicStore32:
   316  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
   317  	case OpMIPS64LoweredAtomicStore64:
   318  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
   319  	case OpMIPS64LoweredPanicBoundsCR:
   320  		return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v)
   321  	case OpMIPS64LoweredPanicBoundsRC:
   322  		return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v)
   323  	case OpMIPS64LoweredPanicBoundsRR:
   324  		return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v)
   325  	case OpMIPS64MOVBUload:
   326  		return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
   327  	case OpMIPS64MOVBUreg:
   328  		return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
   329  	case OpMIPS64MOVBload:
   330  		return rewriteValueMIPS64_OpMIPS64MOVBload(v)
   331  	case OpMIPS64MOVBreg:
   332  		return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
   333  	case OpMIPS64MOVBstore:
   334  		return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
   335  	case OpMIPS64MOVBstorezero:
   336  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
   337  	case OpMIPS64MOVDload:
   338  		return rewriteValueMIPS64_OpMIPS64MOVDload(v)
   339  	case OpMIPS64MOVDstore:
   340  		return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
   341  	case OpMIPS64MOVFload:
   342  		return rewriteValueMIPS64_OpMIPS64MOVFload(v)
   343  	case OpMIPS64MOVFstore:
   344  		return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
   345  	case OpMIPS64MOVHUload:
   346  		return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
   347  	case OpMIPS64MOVHUreg:
   348  		return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
   349  	case OpMIPS64MOVHload:
   350  		return rewriteValueMIPS64_OpMIPS64MOVHload(v)
   351  	case OpMIPS64MOVHreg:
   352  		return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
   353  	case OpMIPS64MOVHstore:
   354  		return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
   355  	case OpMIPS64MOVHstorezero:
   356  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
   357  	case OpMIPS64MOVVload:
   358  		return rewriteValueMIPS64_OpMIPS64MOVVload(v)
   359  	case OpMIPS64MOVVnop:
   360  		return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
   361  	case OpMIPS64MOVVreg:
   362  		return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
   363  	case OpMIPS64MOVVstore:
   364  		return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
   365  	case OpMIPS64MOVVstorezero:
   366  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
   367  	case OpMIPS64MOVWUload:
   368  		return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
   369  	case OpMIPS64MOVWUreg:
   370  		return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
   371  	case OpMIPS64MOVWload:
   372  		return rewriteValueMIPS64_OpMIPS64MOVWload(v)
   373  	case OpMIPS64MOVWreg:
   374  		return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
   375  	case OpMIPS64MOVWstore:
   376  		return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
   377  	case OpMIPS64MOVWstorezero:
   378  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
   379  	case OpMIPS64NEGV:
   380  		return rewriteValueMIPS64_OpMIPS64NEGV(v)
   381  	case OpMIPS64NOR:
   382  		return rewriteValueMIPS64_OpMIPS64NOR(v)
   383  	case OpMIPS64NORconst:
   384  		return rewriteValueMIPS64_OpMIPS64NORconst(v)
   385  	case OpMIPS64OR:
   386  		return rewriteValueMIPS64_OpMIPS64OR(v)
   387  	case OpMIPS64ORconst:
   388  		return rewriteValueMIPS64_OpMIPS64ORconst(v)
   389  	case OpMIPS64SGT:
   390  		return rewriteValueMIPS64_OpMIPS64SGT(v)
   391  	case OpMIPS64SGTU:
   392  		return rewriteValueMIPS64_OpMIPS64SGTU(v)
   393  	case OpMIPS64SGTUconst:
   394  		return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
   395  	case OpMIPS64SGTconst:
   396  		return rewriteValueMIPS64_OpMIPS64SGTconst(v)
   397  	case OpMIPS64SLLV:
   398  		return rewriteValueMIPS64_OpMIPS64SLLV(v)
   399  	case OpMIPS64SLLVconst:
   400  		return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
   401  	case OpMIPS64SRAV:
   402  		return rewriteValueMIPS64_OpMIPS64SRAV(v)
   403  	case OpMIPS64SRAVconst:
   404  		return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
   405  	case OpMIPS64SRLV:
   406  		return rewriteValueMIPS64_OpMIPS64SRLV(v)
   407  	case OpMIPS64SRLVconst:
   408  		return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
   409  	case OpMIPS64SUBV:
   410  		return rewriteValueMIPS64_OpMIPS64SUBV(v)
   411  	case OpMIPS64SUBVconst:
   412  		return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
   413  	case OpMIPS64XOR:
   414  		return rewriteValueMIPS64_OpMIPS64XOR(v)
   415  	case OpMIPS64XORconst:
   416  		return rewriteValueMIPS64_OpMIPS64XORconst(v)
   417  	case OpMod16:
   418  		return rewriteValueMIPS64_OpMod16(v)
   419  	case OpMod16u:
   420  		return rewriteValueMIPS64_OpMod16u(v)
   421  	case OpMod32:
   422  		return rewriteValueMIPS64_OpMod32(v)
   423  	case OpMod32u:
   424  		return rewriteValueMIPS64_OpMod32u(v)
   425  	case OpMod64:
   426  		return rewriteValueMIPS64_OpMod64(v)
   427  	case OpMod64u:
   428  		return rewriteValueMIPS64_OpMod64u(v)
   429  	case OpMod8:
   430  		return rewriteValueMIPS64_OpMod8(v)
   431  	case OpMod8u:
   432  		return rewriteValueMIPS64_OpMod8u(v)
   433  	case OpMove:
   434  		return rewriteValueMIPS64_OpMove(v)
   435  	case OpMul16:
   436  		return rewriteValueMIPS64_OpMul16(v)
   437  	case OpMul32:
   438  		return rewriteValueMIPS64_OpMul32(v)
   439  	case OpMul32F:
   440  		v.Op = OpMIPS64MULF
   441  		return true
   442  	case OpMul64:
   443  		return rewriteValueMIPS64_OpMul64(v)
   444  	case OpMul64F:
   445  		v.Op = OpMIPS64MULD
   446  		return true
   447  	case OpMul64uhilo:
   448  		v.Op = OpMIPS64MULVU
   449  		return true
   450  	case OpMul8:
   451  		return rewriteValueMIPS64_OpMul8(v)
   452  	case OpNeg16:
   453  		v.Op = OpMIPS64NEGV
   454  		return true
   455  	case OpNeg32:
   456  		v.Op = OpMIPS64NEGV
   457  		return true
   458  	case OpNeg32F:
   459  		v.Op = OpMIPS64NEGF
   460  		return true
   461  	case OpNeg64:
   462  		v.Op = OpMIPS64NEGV
   463  		return true
   464  	case OpNeg64F:
   465  		v.Op = OpMIPS64NEGD
   466  		return true
   467  	case OpNeg8:
   468  		v.Op = OpMIPS64NEGV
   469  		return true
   470  	case OpNeq16:
   471  		return rewriteValueMIPS64_OpNeq16(v)
   472  	case OpNeq32:
   473  		return rewriteValueMIPS64_OpNeq32(v)
   474  	case OpNeq32F:
   475  		return rewriteValueMIPS64_OpNeq32F(v)
   476  	case OpNeq64:
   477  		return rewriteValueMIPS64_OpNeq64(v)
   478  	case OpNeq64F:
   479  		return rewriteValueMIPS64_OpNeq64F(v)
   480  	case OpNeq8:
   481  		return rewriteValueMIPS64_OpNeq8(v)
   482  	case OpNeqB:
   483  		v.Op = OpMIPS64XOR
   484  		return true
   485  	case OpNeqPtr:
   486  		return rewriteValueMIPS64_OpNeqPtr(v)
   487  	case OpNilCheck:
   488  		v.Op = OpMIPS64LoweredNilCheck
   489  		return true
   490  	case OpNot:
   491  		return rewriteValueMIPS64_OpNot(v)
   492  	case OpOffPtr:
   493  		return rewriteValueMIPS64_OpOffPtr(v)
   494  	case OpOr16:
   495  		v.Op = OpMIPS64OR
   496  		return true
   497  	case OpOr32:
   498  		v.Op = OpMIPS64OR
   499  		return true
   500  	case OpOr64:
   501  		v.Op = OpMIPS64OR
   502  		return true
   503  	case OpOr8:
   504  		v.Op = OpMIPS64OR
   505  		return true
   506  	case OpOrB:
   507  		v.Op = OpMIPS64OR
   508  		return true
   509  	case OpPanicBounds:
   510  		v.Op = OpMIPS64LoweredPanicBoundsRR
   511  		return true
   512  	case OpPubBarrier:
   513  		v.Op = OpMIPS64LoweredPubBarrier
   514  		return true
   515  	case OpRotateLeft16:
   516  		return rewriteValueMIPS64_OpRotateLeft16(v)
   517  	case OpRotateLeft32:
   518  		return rewriteValueMIPS64_OpRotateLeft32(v)
   519  	case OpRotateLeft64:
   520  		return rewriteValueMIPS64_OpRotateLeft64(v)
   521  	case OpRotateLeft8:
   522  		return rewriteValueMIPS64_OpRotateLeft8(v)
   523  	case OpRound32F:
   524  		v.Op = OpCopy
   525  		return true
   526  	case OpRound64F:
   527  		v.Op = OpCopy
   528  		return true
   529  	case OpRsh16Ux16:
   530  		return rewriteValueMIPS64_OpRsh16Ux16(v)
   531  	case OpRsh16Ux32:
   532  		return rewriteValueMIPS64_OpRsh16Ux32(v)
   533  	case OpRsh16Ux64:
   534  		return rewriteValueMIPS64_OpRsh16Ux64(v)
   535  	case OpRsh16Ux8:
   536  		return rewriteValueMIPS64_OpRsh16Ux8(v)
   537  	case OpRsh16x16:
   538  		return rewriteValueMIPS64_OpRsh16x16(v)
   539  	case OpRsh16x32:
   540  		return rewriteValueMIPS64_OpRsh16x32(v)
   541  	case OpRsh16x64:
   542  		return rewriteValueMIPS64_OpRsh16x64(v)
   543  	case OpRsh16x8:
   544  		return rewriteValueMIPS64_OpRsh16x8(v)
   545  	case OpRsh32Ux16:
   546  		return rewriteValueMIPS64_OpRsh32Ux16(v)
   547  	case OpRsh32Ux32:
   548  		return rewriteValueMIPS64_OpRsh32Ux32(v)
   549  	case OpRsh32Ux64:
   550  		return rewriteValueMIPS64_OpRsh32Ux64(v)
   551  	case OpRsh32Ux8:
   552  		return rewriteValueMIPS64_OpRsh32Ux8(v)
   553  	case OpRsh32x16:
   554  		return rewriteValueMIPS64_OpRsh32x16(v)
   555  	case OpRsh32x32:
   556  		return rewriteValueMIPS64_OpRsh32x32(v)
   557  	case OpRsh32x64:
   558  		return rewriteValueMIPS64_OpRsh32x64(v)
   559  	case OpRsh32x8:
   560  		return rewriteValueMIPS64_OpRsh32x8(v)
   561  	case OpRsh64Ux16:
   562  		return rewriteValueMIPS64_OpRsh64Ux16(v)
   563  	case OpRsh64Ux32:
   564  		return rewriteValueMIPS64_OpRsh64Ux32(v)
   565  	case OpRsh64Ux64:
   566  		return rewriteValueMIPS64_OpRsh64Ux64(v)
   567  	case OpRsh64Ux8:
   568  		return rewriteValueMIPS64_OpRsh64Ux8(v)
   569  	case OpRsh64x16:
   570  		return rewriteValueMIPS64_OpRsh64x16(v)
   571  	case OpRsh64x32:
   572  		return rewriteValueMIPS64_OpRsh64x32(v)
   573  	case OpRsh64x64:
   574  		return rewriteValueMIPS64_OpRsh64x64(v)
   575  	case OpRsh64x8:
   576  		return rewriteValueMIPS64_OpRsh64x8(v)
   577  	case OpRsh8Ux16:
   578  		return rewriteValueMIPS64_OpRsh8Ux16(v)
   579  	case OpRsh8Ux32:
   580  		return rewriteValueMIPS64_OpRsh8Ux32(v)
   581  	case OpRsh8Ux64:
   582  		return rewriteValueMIPS64_OpRsh8Ux64(v)
   583  	case OpRsh8Ux8:
   584  		return rewriteValueMIPS64_OpRsh8Ux8(v)
   585  	case OpRsh8x16:
   586  		return rewriteValueMIPS64_OpRsh8x16(v)
   587  	case OpRsh8x32:
   588  		return rewriteValueMIPS64_OpRsh8x32(v)
   589  	case OpRsh8x64:
   590  		return rewriteValueMIPS64_OpRsh8x64(v)
   591  	case OpRsh8x8:
   592  		return rewriteValueMIPS64_OpRsh8x8(v)
   593  	case OpSelect0:
   594  		return rewriteValueMIPS64_OpSelect0(v)
   595  	case OpSelect1:
   596  		return rewriteValueMIPS64_OpSelect1(v)
   597  	case OpSignExt16to32:
   598  		v.Op = OpMIPS64MOVHreg
   599  		return true
   600  	case OpSignExt16to64:
   601  		v.Op = OpMIPS64MOVHreg
   602  		return true
   603  	case OpSignExt32to64:
   604  		v.Op = OpMIPS64MOVWreg
   605  		return true
   606  	case OpSignExt8to16:
   607  		v.Op = OpMIPS64MOVBreg
   608  		return true
   609  	case OpSignExt8to32:
   610  		v.Op = OpMIPS64MOVBreg
   611  		return true
   612  	case OpSignExt8to64:
   613  		v.Op = OpMIPS64MOVBreg
   614  		return true
   615  	case OpSlicemask:
   616  		return rewriteValueMIPS64_OpSlicemask(v)
   617  	case OpSqrt:
   618  		v.Op = OpMIPS64SQRTD
   619  		return true
   620  	case OpSqrt32:
   621  		v.Op = OpMIPS64SQRTF
   622  		return true
   623  	case OpStaticCall:
   624  		v.Op = OpMIPS64CALLstatic
   625  		return true
   626  	case OpStore:
   627  		return rewriteValueMIPS64_OpStore(v)
   628  	case OpSub16:
   629  		v.Op = OpMIPS64SUBV
   630  		return true
   631  	case OpSub32:
   632  		v.Op = OpMIPS64SUBV
   633  		return true
   634  	case OpSub32F:
   635  		v.Op = OpMIPS64SUBF
   636  		return true
   637  	case OpSub64:
   638  		v.Op = OpMIPS64SUBV
   639  		return true
   640  	case OpSub64F:
   641  		v.Op = OpMIPS64SUBD
   642  		return true
   643  	case OpSub8:
   644  		v.Op = OpMIPS64SUBV
   645  		return true
   646  	case OpSubPtr:
   647  		v.Op = OpMIPS64SUBV
   648  		return true
   649  	case OpTailCall:
   650  		v.Op = OpMIPS64CALLtail
   651  		return true
   652  	case OpTrunc16to8:
   653  		v.Op = OpCopy
   654  		return true
   655  	case OpTrunc32to16:
   656  		v.Op = OpCopy
   657  		return true
   658  	case OpTrunc32to8:
   659  		v.Op = OpCopy
   660  		return true
   661  	case OpTrunc64to16:
   662  		v.Op = OpCopy
   663  		return true
   664  	case OpTrunc64to32:
   665  		v.Op = OpCopy
   666  		return true
   667  	case OpTrunc64to8:
   668  		v.Op = OpCopy
   669  		return true
   670  	case OpWB:
   671  		v.Op = OpMIPS64LoweredWB
   672  		return true
   673  	case OpXor16:
   674  		v.Op = OpMIPS64XOR
   675  		return true
   676  	case OpXor32:
   677  		v.Op = OpMIPS64XOR
   678  		return true
   679  	case OpXor64:
   680  		v.Op = OpMIPS64XOR
   681  		return true
   682  	case OpXor8:
   683  		v.Op = OpMIPS64XOR
   684  		return true
   685  	case OpZero:
   686  		return rewriteValueMIPS64_OpZero(v)
   687  	case OpZeroExt16to32:
   688  		v.Op = OpMIPS64MOVHUreg
   689  		return true
   690  	case OpZeroExt16to64:
   691  		v.Op = OpMIPS64MOVHUreg
   692  		return true
   693  	case OpZeroExt32to64:
   694  		v.Op = OpMIPS64MOVWUreg
   695  		return true
   696  	case OpZeroExt8to16:
   697  		v.Op = OpMIPS64MOVBUreg
   698  		return true
   699  	case OpZeroExt8to32:
   700  		v.Op = OpMIPS64MOVBUreg
   701  		return true
   702  	case OpZeroExt8to64:
   703  		v.Op = OpMIPS64MOVBUreg
   704  		return true
   705  	}
   706  	return false
   707  }
   708  func rewriteValueMIPS64_OpAddr(v *Value) bool {
   709  	v_0 := v.Args[0]
   710  	// match: (Addr {sym} base)
   711  	// result: (MOVVaddr {sym} base)
   712  	for {
   713  		sym := auxToSym(v.Aux)
   714  		base := v_0
   715  		v.reset(OpMIPS64MOVVaddr)
   716  		v.Aux = symToAux(sym)
   717  		v.AddArg(base)
   718  		return true
   719  	}
   720  }
   721  func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
   722  	v_2 := v.Args[2]
   723  	v_1 := v.Args[1]
   724  	v_0 := v.Args[0]
   725  	b := v.Block
   726  	config := b.Func.Config
   727  	typ := &b.Func.Config.Types
   728  	// match: (AtomicAnd8 ptr val mem)
   729  	// cond: !config.BigEndian
   730  	// result: (LoweredAtomicAnd32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (OR <typ.UInt64> (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) (NORconst [0] <typ.UInt64> (SLLV <typ.UInt64> (MOVVconst [0xff]) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))))) mem)
   731  	for {
   732  		ptr := v_0
   733  		val := v_1
   734  		mem := v_2
   735  		if !(!config.BigEndian) {
   736  			break
   737  		}
   738  		v.reset(OpMIPS64LoweredAtomicAnd32)
   739  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   740  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   741  		v1.AuxInt = int64ToAuxInt(^3)
   742  		v0.AddArg2(v1, ptr)
   743  		v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
   744  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   745  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   746  		v4.AddArg(val)
   747  		v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   748  		v5.AuxInt = int64ToAuxInt(3)
   749  		v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   750  		v6.AuxInt = int64ToAuxInt(3)
   751  		v6.AddArg(ptr)
   752  		v5.AddArg(v6)
   753  		v3.AddArg2(v4, v5)
   754  		v7 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
   755  		v7.AuxInt = int64ToAuxInt(0)
   756  		v8 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
   757  		v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   758  		v9.AuxInt = int64ToAuxInt(0xff)
   759  		v8.AddArg2(v9, v5)
   760  		v7.AddArg(v8)
   761  		v2.AddArg2(v3, v7)
   762  		v.AddArg3(v0, v2, mem)
   763  		return true
   764  	}
   765  	// match: (AtomicAnd8 ptr val mem)
   766  	// cond: config.BigEndian
   767  	// result: (LoweredAtomicAnd32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (OR <typ.UInt64> (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] (XORconst <typ.UInt64> [3] ptr)))) (NORconst [0] <typ.UInt64> (SLLV <typ.UInt64> (MOVVconst [0xff]) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] (XORconst <typ.UInt64> [3] ptr)))))) mem)
   768  	for {
   769  		ptr := v_0
   770  		val := v_1
   771  		mem := v_2
   772  		if !(config.BigEndian) {
   773  			break
   774  		}
   775  		v.reset(OpMIPS64LoweredAtomicAnd32)
   776  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   777  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   778  		v1.AuxInt = int64ToAuxInt(^3)
   779  		v0.AddArg2(v1, ptr)
   780  		v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
   781  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   782  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   783  		v4.AddArg(val)
   784  		v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   785  		v5.AuxInt = int64ToAuxInt(3)
   786  		v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   787  		v6.AuxInt = int64ToAuxInt(3)
   788  		v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
   789  		v7.AuxInt = int64ToAuxInt(3)
   790  		v7.AddArg(ptr)
   791  		v6.AddArg(v7)
   792  		v5.AddArg(v6)
   793  		v3.AddArg2(v4, v5)
   794  		v8 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
   795  		v8.AuxInt = int64ToAuxInt(0)
   796  		v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
   797  		v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   798  		v10.AuxInt = int64ToAuxInt(0xff)
   799  		v9.AddArg2(v10, v5)
   800  		v8.AddArg(v9)
   801  		v2.AddArg2(v3, v8)
   802  		v.AddArg3(v0, v2, mem)
   803  		return true
   804  	}
   805  	return false
   806  }
   807  func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
   808  	v_3 := v.Args[3]
   809  	v_2 := v.Args[2]
   810  	v_1 := v.Args[1]
   811  	v_0 := v.Args[0]
   812  	b := v.Block
   813  	typ := &b.Func.Config.Types
   814  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   815  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   816  	for {
   817  		ptr := v_0
   818  		old := v_1
   819  		new := v_2
   820  		mem := v_3
   821  		v.reset(OpMIPS64LoweredAtomicCas32)
   822  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   823  		v0.AddArg(old)
   824  		v.AddArg4(ptr, v0, new, mem)
   825  		return true
   826  	}
   827  }
   828  func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
   829  	v_2 := v.Args[2]
   830  	v_1 := v.Args[1]
   831  	v_0 := v.Args[0]
   832  	b := v.Block
   833  	config := b.Func.Config
   834  	typ := &b.Func.Config.Types
   835  	// match: (AtomicOr8 ptr val mem)
   836  	// cond: !config.BigEndian
   837  	// result: (LoweredAtomicOr32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   838  	for {
   839  		ptr := v_0
   840  		val := v_1
   841  		mem := v_2
   842  		if !(!config.BigEndian) {
   843  			break
   844  		}
   845  		v.reset(OpMIPS64LoweredAtomicOr32)
   846  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   847  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   848  		v1.AuxInt = int64ToAuxInt(^3)
   849  		v0.AddArg2(v1, ptr)
   850  		v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   851  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   852  		v3.AddArg(val)
   853  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   854  		v4.AuxInt = int64ToAuxInt(3)
   855  		v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   856  		v5.AuxInt = int64ToAuxInt(3)
   857  		v5.AddArg(ptr)
   858  		v4.AddArg(v5)
   859  		v2.AddArg2(v3, v4)
   860  		v.AddArg3(v0, v2, mem)
   861  		return true
   862  	}
   863  	// match: (AtomicOr8 ptr val mem)
   864  	// cond: config.BigEndian
   865  	// result: (LoweredAtomicOr32 (AND <typ.UInt32Ptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] (XORconst <typ.UInt64> [3] ptr)))) mem)
   866  	for {
   867  		ptr := v_0
   868  		val := v_1
   869  		mem := v_2
   870  		if !(config.BigEndian) {
   871  			break
   872  		}
   873  		v.reset(OpMIPS64LoweredAtomicOr32)
   874  		v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
   875  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   876  		v1.AuxInt = int64ToAuxInt(^3)
   877  		v0.AddArg2(v1, ptr)
   878  		v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
   879  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   880  		v3.AddArg(val)
   881  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
   882  		v4.AuxInt = int64ToAuxInt(3)
   883  		v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
   884  		v5.AuxInt = int64ToAuxInt(3)
   885  		v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
   886  		v6.AuxInt = int64ToAuxInt(3)
   887  		v6.AddArg(ptr)
   888  		v5.AddArg(v6)
   889  		v4.AddArg(v5)
   890  		v2.AddArg2(v3, v4)
   891  		v.AddArg3(v0, v2, mem)
   892  		return true
   893  	}
   894  	return false
   895  }
   896  func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
   897  	v_1 := v.Args[1]
   898  	v_0 := v.Args[0]
   899  	b := v.Block
   900  	// match: (Avg64u <t> x y)
   901  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   902  	for {
   903  		t := v.Type
   904  		x := v_0
   905  		y := v_1
   906  		v.reset(OpMIPS64ADDV)
   907  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   908  		v0.AuxInt = int64ToAuxInt(1)
   909  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   910  		v1.AddArg2(x, y)
   911  		v0.AddArg(v1)
   912  		v.AddArg2(v0, y)
   913  		return true
   914  	}
   915  }
   916  func rewriteValueMIPS64_OpCom16(v *Value) bool {
   917  	v_0 := v.Args[0]
   918  	b := v.Block
   919  	typ := &b.Func.Config.Types
   920  	// match: (Com16 x)
   921  	// result: (NOR (MOVVconst [0]) x)
   922  	for {
   923  		x := v_0
   924  		v.reset(OpMIPS64NOR)
   925  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   926  		v0.AuxInt = int64ToAuxInt(0)
   927  		v.AddArg2(v0, x)
   928  		return true
   929  	}
   930  }
   931  func rewriteValueMIPS64_OpCom32(v *Value) bool {
   932  	v_0 := v.Args[0]
   933  	b := v.Block
   934  	typ := &b.Func.Config.Types
   935  	// match: (Com32 x)
   936  	// result: (NOR (MOVVconst [0]) x)
   937  	for {
   938  		x := v_0
   939  		v.reset(OpMIPS64NOR)
   940  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   941  		v0.AuxInt = int64ToAuxInt(0)
   942  		v.AddArg2(v0, x)
   943  		return true
   944  	}
   945  }
   946  func rewriteValueMIPS64_OpCom64(v *Value) bool {
   947  	v_0 := v.Args[0]
   948  	b := v.Block
   949  	typ := &b.Func.Config.Types
   950  	// match: (Com64 x)
   951  	// result: (NOR (MOVVconst [0]) x)
   952  	for {
   953  		x := v_0
   954  		v.reset(OpMIPS64NOR)
   955  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   956  		v0.AuxInt = int64ToAuxInt(0)
   957  		v.AddArg2(v0, x)
   958  		return true
   959  	}
   960  }
   961  func rewriteValueMIPS64_OpCom8(v *Value) bool {
   962  	v_0 := v.Args[0]
   963  	b := v.Block
   964  	typ := &b.Func.Config.Types
   965  	// match: (Com8 x)
   966  	// result: (NOR (MOVVconst [0]) x)
   967  	for {
   968  		x := v_0
   969  		v.reset(OpMIPS64NOR)
   970  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   971  		v0.AuxInt = int64ToAuxInt(0)
   972  		v.AddArg2(v0, x)
   973  		return true
   974  	}
   975  }
   976  func rewriteValueMIPS64_OpConst16(v *Value) bool {
   977  	// match: (Const16 [val])
   978  	// result: (MOVVconst [int64(val)])
   979  	for {
   980  		val := auxIntToInt16(v.AuxInt)
   981  		v.reset(OpMIPS64MOVVconst)
   982  		v.AuxInt = int64ToAuxInt(int64(val))
   983  		return true
   984  	}
   985  }
   986  func rewriteValueMIPS64_OpConst32(v *Value) bool {
   987  	// match: (Const32 [val])
   988  	// result: (MOVVconst [int64(val)])
   989  	for {
   990  		val := auxIntToInt32(v.AuxInt)
   991  		v.reset(OpMIPS64MOVVconst)
   992  		v.AuxInt = int64ToAuxInt(int64(val))
   993  		return true
   994  	}
   995  }
   996  func rewriteValueMIPS64_OpConst32F(v *Value) bool {
   997  	// match: (Const32F [val])
   998  	// result: (MOVFconst [float64(val)])
   999  	for {
  1000  		val := auxIntToFloat32(v.AuxInt)
  1001  		v.reset(OpMIPS64MOVFconst)
  1002  		v.AuxInt = float64ToAuxInt(float64(val))
  1003  		return true
  1004  	}
  1005  }
  1006  func rewriteValueMIPS64_OpConst64(v *Value) bool {
  1007  	// match: (Const64 [val])
  1008  	// result: (MOVVconst [int64(val)])
  1009  	for {
  1010  		val := auxIntToInt64(v.AuxInt)
  1011  		v.reset(OpMIPS64MOVVconst)
  1012  		v.AuxInt = int64ToAuxInt(int64(val))
  1013  		return true
  1014  	}
  1015  }
  1016  func rewriteValueMIPS64_OpConst64F(v *Value) bool {
  1017  	// match: (Const64F [val])
  1018  	// result: (MOVDconst [float64(val)])
  1019  	for {
  1020  		val := auxIntToFloat64(v.AuxInt)
  1021  		v.reset(OpMIPS64MOVDconst)
  1022  		v.AuxInt = float64ToAuxInt(float64(val))
  1023  		return true
  1024  	}
  1025  }
  1026  func rewriteValueMIPS64_OpConst8(v *Value) bool {
  1027  	// match: (Const8 [val])
  1028  	// result: (MOVVconst [int64(val)])
  1029  	for {
  1030  		val := auxIntToInt8(v.AuxInt)
  1031  		v.reset(OpMIPS64MOVVconst)
  1032  		v.AuxInt = int64ToAuxInt(int64(val))
  1033  		return true
  1034  	}
  1035  }
  1036  func rewriteValueMIPS64_OpConstBool(v *Value) bool {
  1037  	// match: (ConstBool [t])
  1038  	// result: (MOVVconst [int64(b2i(t))])
  1039  	for {
  1040  		t := auxIntToBool(v.AuxInt)
  1041  		v.reset(OpMIPS64MOVVconst)
  1042  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1043  		return true
  1044  	}
  1045  }
  1046  func rewriteValueMIPS64_OpConstNil(v *Value) bool {
  1047  	// match: (ConstNil)
  1048  	// result: (MOVVconst [0])
  1049  	for {
  1050  		v.reset(OpMIPS64MOVVconst)
  1051  		v.AuxInt = int64ToAuxInt(0)
  1052  		return true
  1053  	}
  1054  }
  1055  func rewriteValueMIPS64_OpDiv16(v *Value) bool {
  1056  	v_1 := v.Args[1]
  1057  	v_0 := v.Args[0]
  1058  	b := v.Block
  1059  	typ := &b.Func.Config.Types
  1060  	// match: (Div16 x y)
  1061  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1062  	for {
  1063  		x := v_0
  1064  		y := v_1
  1065  		v.reset(OpSelect1)
  1066  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1067  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1068  		v1.AddArg(x)
  1069  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1070  		v2.AddArg(y)
  1071  		v0.AddArg2(v1, v2)
  1072  		v.AddArg(v0)
  1073  		return true
  1074  	}
  1075  }
  1076  func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
  1077  	v_1 := v.Args[1]
  1078  	v_0 := v.Args[0]
  1079  	b := v.Block
  1080  	typ := &b.Func.Config.Types
  1081  	// match: (Div16u x y)
  1082  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1083  	for {
  1084  		x := v_0
  1085  		y := v_1
  1086  		v.reset(OpSelect1)
  1087  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1088  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1089  		v1.AddArg(x)
  1090  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1091  		v2.AddArg(y)
  1092  		v0.AddArg2(v1, v2)
  1093  		v.AddArg(v0)
  1094  		return true
  1095  	}
  1096  }
  1097  func rewriteValueMIPS64_OpDiv32(v *Value) bool {
  1098  	v_1 := v.Args[1]
  1099  	v_0 := v.Args[0]
  1100  	b := v.Block
  1101  	typ := &b.Func.Config.Types
  1102  	// match: (Div32 x y)
  1103  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1104  	for {
  1105  		x := v_0
  1106  		y := v_1
  1107  		v.reset(OpSelect1)
  1108  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1109  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1110  		v1.AddArg(x)
  1111  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1112  		v2.AddArg(y)
  1113  		v0.AddArg2(v1, v2)
  1114  		v.AddArg(v0)
  1115  		return true
  1116  	}
  1117  }
  1118  func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
  1119  	v_1 := v.Args[1]
  1120  	v_0 := v.Args[0]
  1121  	b := v.Block
  1122  	typ := &b.Func.Config.Types
  1123  	// match: (Div32u x y)
  1124  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1125  	for {
  1126  		x := v_0
  1127  		y := v_1
  1128  		v.reset(OpSelect1)
  1129  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1130  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1131  		v1.AddArg(x)
  1132  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1133  		v2.AddArg(y)
  1134  		v0.AddArg2(v1, v2)
  1135  		v.AddArg(v0)
  1136  		return true
  1137  	}
  1138  }
  1139  func rewriteValueMIPS64_OpDiv64(v *Value) bool {
  1140  	v_1 := v.Args[1]
  1141  	v_0 := v.Args[0]
  1142  	b := v.Block
  1143  	typ := &b.Func.Config.Types
  1144  	// match: (Div64 x y)
  1145  	// result: (Select1 (DIVV x y))
  1146  	for {
  1147  		x := v_0
  1148  		y := v_1
  1149  		v.reset(OpSelect1)
  1150  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1151  		v0.AddArg2(x, y)
  1152  		v.AddArg(v0)
  1153  		return true
  1154  	}
  1155  }
  1156  func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
  1157  	v_1 := v.Args[1]
  1158  	v_0 := v.Args[0]
  1159  	b := v.Block
  1160  	typ := &b.Func.Config.Types
  1161  	// match: (Div64u x y)
  1162  	// result: (Select1 (DIVVU x y))
  1163  	for {
  1164  		x := v_0
  1165  		y := v_1
  1166  		v.reset(OpSelect1)
  1167  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1168  		v0.AddArg2(x, y)
  1169  		v.AddArg(v0)
  1170  		return true
  1171  	}
  1172  }
  1173  func rewriteValueMIPS64_OpDiv8(v *Value) bool {
  1174  	v_1 := v.Args[1]
  1175  	v_0 := v.Args[0]
  1176  	b := v.Block
  1177  	typ := &b.Func.Config.Types
  1178  	// match: (Div8 x y)
  1179  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1180  	for {
  1181  		x := v_0
  1182  		y := v_1
  1183  		v.reset(OpSelect1)
  1184  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1185  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1186  		v1.AddArg(x)
  1187  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1188  		v2.AddArg(y)
  1189  		v0.AddArg2(v1, v2)
  1190  		v.AddArg(v0)
  1191  		return true
  1192  	}
  1193  }
  1194  func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
  1195  	v_1 := v.Args[1]
  1196  	v_0 := v.Args[0]
  1197  	b := v.Block
  1198  	typ := &b.Func.Config.Types
  1199  	// match: (Div8u x y)
  1200  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1201  	for {
  1202  		x := v_0
  1203  		y := v_1
  1204  		v.reset(OpSelect1)
  1205  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1206  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1207  		v1.AddArg(x)
  1208  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1209  		v2.AddArg(y)
  1210  		v0.AddArg2(v1, v2)
  1211  		v.AddArg(v0)
  1212  		return true
  1213  	}
  1214  }
  1215  func rewriteValueMIPS64_OpEq16(v *Value) bool {
  1216  	v_1 := v.Args[1]
  1217  	v_0 := v.Args[0]
  1218  	b := v.Block
  1219  	typ := &b.Func.Config.Types
  1220  	// match: (Eq16 x y)
  1221  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1222  	for {
  1223  		x := v_0
  1224  		y := v_1
  1225  		v.reset(OpMIPS64SGTU)
  1226  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1227  		v0.AuxInt = int64ToAuxInt(1)
  1228  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1229  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1230  		v2.AddArg(x)
  1231  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1232  		v3.AddArg(y)
  1233  		v1.AddArg2(v2, v3)
  1234  		v.AddArg2(v0, v1)
  1235  		return true
  1236  	}
  1237  }
  1238  func rewriteValueMIPS64_OpEq32(v *Value) bool {
  1239  	v_1 := v.Args[1]
  1240  	v_0 := v.Args[0]
  1241  	b := v.Block
  1242  	typ := &b.Func.Config.Types
  1243  	// match: (Eq32 x y)
  1244  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1245  	for {
  1246  		x := v_0
  1247  		y := v_1
  1248  		v.reset(OpMIPS64SGTU)
  1249  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1250  		v0.AuxInt = int64ToAuxInt(1)
  1251  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1252  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1253  		v2.AddArg(x)
  1254  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1255  		v3.AddArg(y)
  1256  		v1.AddArg2(v2, v3)
  1257  		v.AddArg2(v0, v1)
  1258  		return true
  1259  	}
  1260  }
  1261  func rewriteValueMIPS64_OpEq32F(v *Value) bool {
  1262  	v_1 := v.Args[1]
  1263  	v_0 := v.Args[0]
  1264  	b := v.Block
  1265  	// match: (Eq32F x y)
  1266  	// result: (FPFlagTrue (CMPEQF x y))
  1267  	for {
  1268  		x := v_0
  1269  		y := v_1
  1270  		v.reset(OpMIPS64FPFlagTrue)
  1271  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1272  		v0.AddArg2(x, y)
  1273  		v.AddArg(v0)
  1274  		return true
  1275  	}
  1276  }
  1277  func rewriteValueMIPS64_OpEq64(v *Value) bool {
  1278  	v_1 := v.Args[1]
  1279  	v_0 := v.Args[0]
  1280  	b := v.Block
  1281  	typ := &b.Func.Config.Types
  1282  	// match: (Eq64 x y)
  1283  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1284  	for {
  1285  		x := v_0
  1286  		y := v_1
  1287  		v.reset(OpMIPS64SGTU)
  1288  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1289  		v0.AuxInt = int64ToAuxInt(1)
  1290  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1291  		v1.AddArg2(x, y)
  1292  		v.AddArg2(v0, v1)
  1293  		return true
  1294  	}
  1295  }
  1296  func rewriteValueMIPS64_OpEq64F(v *Value) bool {
  1297  	v_1 := v.Args[1]
  1298  	v_0 := v.Args[0]
  1299  	b := v.Block
  1300  	// match: (Eq64F x y)
  1301  	// result: (FPFlagTrue (CMPEQD x y))
  1302  	for {
  1303  		x := v_0
  1304  		y := v_1
  1305  		v.reset(OpMIPS64FPFlagTrue)
  1306  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1307  		v0.AddArg2(x, y)
  1308  		v.AddArg(v0)
  1309  		return true
  1310  	}
  1311  }
  1312  func rewriteValueMIPS64_OpEq8(v *Value) bool {
  1313  	v_1 := v.Args[1]
  1314  	v_0 := v.Args[0]
  1315  	b := v.Block
  1316  	typ := &b.Func.Config.Types
  1317  	// match: (Eq8 x y)
  1318  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1319  	for {
  1320  		x := v_0
  1321  		y := v_1
  1322  		v.reset(OpMIPS64SGTU)
  1323  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1324  		v0.AuxInt = int64ToAuxInt(1)
  1325  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1326  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1327  		v2.AddArg(x)
  1328  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1329  		v3.AddArg(y)
  1330  		v1.AddArg2(v2, v3)
  1331  		v.AddArg2(v0, v1)
  1332  		return true
  1333  	}
  1334  }
  1335  func rewriteValueMIPS64_OpEqB(v *Value) bool {
  1336  	v_1 := v.Args[1]
  1337  	v_0 := v.Args[0]
  1338  	b := v.Block
  1339  	typ := &b.Func.Config.Types
  1340  	// match: (EqB x y)
  1341  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1342  	for {
  1343  		x := v_0
  1344  		y := v_1
  1345  		v.reset(OpMIPS64XOR)
  1346  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1347  		v0.AuxInt = int64ToAuxInt(1)
  1348  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1349  		v1.AddArg2(x, y)
  1350  		v.AddArg2(v0, v1)
  1351  		return true
  1352  	}
  1353  }
  1354  func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
  1355  	v_1 := v.Args[1]
  1356  	v_0 := v.Args[0]
  1357  	b := v.Block
  1358  	typ := &b.Func.Config.Types
  1359  	// match: (EqPtr x y)
  1360  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1361  	for {
  1362  		x := v_0
  1363  		y := v_1
  1364  		v.reset(OpMIPS64SGTU)
  1365  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1366  		v0.AuxInt = int64ToAuxInt(1)
  1367  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1368  		v1.AddArg2(x, y)
  1369  		v.AddArg2(v0, v1)
  1370  		return true
  1371  	}
  1372  }
  1373  func rewriteValueMIPS64_OpHmul32(v *Value) bool {
  1374  	v_1 := v.Args[1]
  1375  	v_0 := v.Args[0]
  1376  	b := v.Block
  1377  	typ := &b.Func.Config.Types
  1378  	// match: (Hmul32 x y)
  1379  	// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1380  	for {
  1381  		x := v_0
  1382  		y := v_1
  1383  		v.reset(OpMIPS64SRAVconst)
  1384  		v.AuxInt = int64ToAuxInt(32)
  1385  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  1386  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1387  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1388  		v2.AddArg(x)
  1389  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1390  		v3.AddArg(y)
  1391  		v1.AddArg2(v2, v3)
  1392  		v0.AddArg(v1)
  1393  		v.AddArg(v0)
  1394  		return true
  1395  	}
  1396  }
  1397  func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
  1398  	v_1 := v.Args[1]
  1399  	v_0 := v.Args[0]
  1400  	b := v.Block
  1401  	typ := &b.Func.Config.Types
  1402  	// match: (Hmul32u x y)
  1403  	// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  1404  	for {
  1405  		x := v_0
  1406  		y := v_1
  1407  		v.reset(OpMIPS64SRLVconst)
  1408  		v.AuxInt = int64ToAuxInt(32)
  1409  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  1410  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1411  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1412  		v2.AddArg(x)
  1413  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1414  		v3.AddArg(y)
  1415  		v1.AddArg2(v2, v3)
  1416  		v0.AddArg(v1)
  1417  		v.AddArg(v0)
  1418  		return true
  1419  	}
  1420  }
  1421  func rewriteValueMIPS64_OpHmul64(v *Value) bool {
  1422  	v_1 := v.Args[1]
  1423  	v_0 := v.Args[0]
  1424  	b := v.Block
  1425  	typ := &b.Func.Config.Types
  1426  	// match: (Hmul64 x y)
  1427  	// result: (Select0 (MULV x y))
  1428  	for {
  1429  		x := v_0
  1430  		y := v_1
  1431  		v.reset(OpSelect0)
  1432  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1433  		v0.AddArg2(x, y)
  1434  		v.AddArg(v0)
  1435  		return true
  1436  	}
  1437  }
  1438  func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
  1439  	v_1 := v.Args[1]
  1440  	v_0 := v.Args[0]
  1441  	b := v.Block
  1442  	typ := &b.Func.Config.Types
  1443  	// match: (Hmul64u x y)
  1444  	// result: (Select0 (MULVU x y))
  1445  	for {
  1446  		x := v_0
  1447  		y := v_1
  1448  		v.reset(OpSelect0)
  1449  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1450  		v0.AddArg2(x, y)
  1451  		v.AddArg(v0)
  1452  		return true
  1453  	}
  1454  }
  1455  func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
  1456  	v_1 := v.Args[1]
  1457  	v_0 := v.Args[0]
  1458  	// match: (IsInBounds idx len)
  1459  	// result: (SGTU len idx)
  1460  	for {
  1461  		idx := v_0
  1462  		len := v_1
  1463  		v.reset(OpMIPS64SGTU)
  1464  		v.AddArg2(len, idx)
  1465  		return true
  1466  	}
  1467  }
  1468  func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
  1469  	v_0 := v.Args[0]
  1470  	b := v.Block
  1471  	typ := &b.Func.Config.Types
  1472  	// match: (IsNonNil ptr)
  1473  	// result: (SGTU ptr (MOVVconst [0]))
  1474  	for {
  1475  		ptr := v_0
  1476  		v.reset(OpMIPS64SGTU)
  1477  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1478  		v0.AuxInt = int64ToAuxInt(0)
  1479  		v.AddArg2(ptr, v0)
  1480  		return true
  1481  	}
  1482  }
  1483  func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
  1484  	v_1 := v.Args[1]
  1485  	v_0 := v.Args[0]
  1486  	b := v.Block
  1487  	typ := &b.Func.Config.Types
  1488  	// match: (IsSliceInBounds idx len)
  1489  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1490  	for {
  1491  		idx := v_0
  1492  		len := v_1
  1493  		v.reset(OpMIPS64XOR)
  1494  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1495  		v0.AuxInt = int64ToAuxInt(1)
  1496  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1497  		v1.AddArg2(idx, len)
  1498  		v.AddArg2(v0, v1)
  1499  		return true
  1500  	}
  1501  }
  1502  func rewriteValueMIPS64_OpLeq16(v *Value) bool {
  1503  	v_1 := v.Args[1]
  1504  	v_0 := v.Args[0]
  1505  	b := v.Block
  1506  	typ := &b.Func.Config.Types
  1507  	// match: (Leq16 x y)
  1508  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  1509  	for {
  1510  		x := v_0
  1511  		y := v_1
  1512  		v.reset(OpMIPS64XOR)
  1513  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1514  		v0.AuxInt = int64ToAuxInt(1)
  1515  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1516  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1517  		v2.AddArg(x)
  1518  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1519  		v3.AddArg(y)
  1520  		v1.AddArg2(v2, v3)
  1521  		v.AddArg2(v0, v1)
  1522  		return true
  1523  	}
  1524  }
  1525  func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
  1526  	v_1 := v.Args[1]
  1527  	v_0 := v.Args[0]
  1528  	b := v.Block
  1529  	typ := &b.Func.Config.Types
  1530  	// match: (Leq16U x y)
  1531  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1532  	for {
  1533  		x := v_0
  1534  		y := v_1
  1535  		v.reset(OpMIPS64XOR)
  1536  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1537  		v0.AuxInt = int64ToAuxInt(1)
  1538  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1539  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1540  		v2.AddArg(x)
  1541  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1542  		v3.AddArg(y)
  1543  		v1.AddArg2(v2, v3)
  1544  		v.AddArg2(v0, v1)
  1545  		return true
  1546  	}
  1547  }
  1548  func rewriteValueMIPS64_OpLeq32(v *Value) bool {
  1549  	v_1 := v.Args[1]
  1550  	v_0 := v.Args[0]
  1551  	b := v.Block
  1552  	typ := &b.Func.Config.Types
  1553  	// match: (Leq32 x y)
  1554  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  1555  	for {
  1556  		x := v_0
  1557  		y := v_1
  1558  		v.reset(OpMIPS64XOR)
  1559  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1560  		v0.AuxInt = int64ToAuxInt(1)
  1561  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1562  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1563  		v2.AddArg(x)
  1564  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1565  		v3.AddArg(y)
  1566  		v1.AddArg2(v2, v3)
  1567  		v.AddArg2(v0, v1)
  1568  		return true
  1569  	}
  1570  }
  1571  func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
  1572  	v_1 := v.Args[1]
  1573  	v_0 := v.Args[0]
  1574  	b := v.Block
  1575  	// match: (Leq32F x y)
  1576  	// result: (FPFlagTrue (CMPGEF y x))
  1577  	for {
  1578  		x := v_0
  1579  		y := v_1
  1580  		v.reset(OpMIPS64FPFlagTrue)
  1581  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1582  		v0.AddArg2(y, x)
  1583  		v.AddArg(v0)
  1584  		return true
  1585  	}
  1586  }
  1587  func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
  1588  	v_1 := v.Args[1]
  1589  	v_0 := v.Args[0]
  1590  	b := v.Block
  1591  	typ := &b.Func.Config.Types
  1592  	// match: (Leq32U x y)
  1593  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1594  	for {
  1595  		x := v_0
  1596  		y := v_1
  1597  		v.reset(OpMIPS64XOR)
  1598  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1599  		v0.AuxInt = int64ToAuxInt(1)
  1600  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1601  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1602  		v2.AddArg(x)
  1603  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1604  		v3.AddArg(y)
  1605  		v1.AddArg2(v2, v3)
  1606  		v.AddArg2(v0, v1)
  1607  		return true
  1608  	}
  1609  }
  1610  func rewriteValueMIPS64_OpLeq64(v *Value) bool {
  1611  	v_1 := v.Args[1]
  1612  	v_0 := v.Args[0]
  1613  	b := v.Block
  1614  	typ := &b.Func.Config.Types
  1615  	// match: (Leq64 x y)
  1616  	// result: (XOR (MOVVconst [1]) (SGT x y))
  1617  	for {
  1618  		x := v_0
  1619  		y := v_1
  1620  		v.reset(OpMIPS64XOR)
  1621  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1622  		v0.AuxInt = int64ToAuxInt(1)
  1623  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1624  		v1.AddArg2(x, y)
  1625  		v.AddArg2(v0, v1)
  1626  		return true
  1627  	}
  1628  }
  1629  func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
  1630  	v_1 := v.Args[1]
  1631  	v_0 := v.Args[0]
  1632  	b := v.Block
  1633  	// match: (Leq64F x y)
  1634  	// result: (FPFlagTrue (CMPGED y x))
  1635  	for {
  1636  		x := v_0
  1637  		y := v_1
  1638  		v.reset(OpMIPS64FPFlagTrue)
  1639  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1640  		v0.AddArg2(y, x)
  1641  		v.AddArg(v0)
  1642  		return true
  1643  	}
  1644  }
  1645  func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
  1646  	v_1 := v.Args[1]
  1647  	v_0 := v.Args[0]
  1648  	b := v.Block
  1649  	typ := &b.Func.Config.Types
  1650  	// match: (Leq64U x y)
  1651  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  1652  	for {
  1653  		x := v_0
  1654  		y := v_1
  1655  		v.reset(OpMIPS64XOR)
  1656  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1657  		v0.AuxInt = int64ToAuxInt(1)
  1658  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1659  		v1.AddArg2(x, y)
  1660  		v.AddArg2(v0, v1)
  1661  		return true
  1662  	}
  1663  }
  1664  func rewriteValueMIPS64_OpLeq8(v *Value) bool {
  1665  	v_1 := v.Args[1]
  1666  	v_0 := v.Args[0]
  1667  	b := v.Block
  1668  	typ := &b.Func.Config.Types
  1669  	// match: (Leq8 x y)
  1670  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  1671  	for {
  1672  		x := v_0
  1673  		y := v_1
  1674  		v.reset(OpMIPS64XOR)
  1675  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1676  		v0.AuxInt = int64ToAuxInt(1)
  1677  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1678  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1679  		v2.AddArg(x)
  1680  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1681  		v3.AddArg(y)
  1682  		v1.AddArg2(v2, v3)
  1683  		v.AddArg2(v0, v1)
  1684  		return true
  1685  	}
  1686  }
  1687  func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
  1688  	v_1 := v.Args[1]
  1689  	v_0 := v.Args[0]
  1690  	b := v.Block
  1691  	typ := &b.Func.Config.Types
  1692  	// match: (Leq8U x y)
  1693  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1694  	for {
  1695  		x := v_0
  1696  		y := v_1
  1697  		v.reset(OpMIPS64XOR)
  1698  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1699  		v0.AuxInt = int64ToAuxInt(1)
  1700  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1701  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1702  		v2.AddArg(x)
  1703  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1704  		v3.AddArg(y)
  1705  		v1.AddArg2(v2, v3)
  1706  		v.AddArg2(v0, v1)
  1707  		return true
  1708  	}
  1709  }
  1710  func rewriteValueMIPS64_OpLess16(v *Value) bool {
  1711  	v_1 := v.Args[1]
  1712  	v_0 := v.Args[0]
  1713  	b := v.Block
  1714  	typ := &b.Func.Config.Types
  1715  	// match: (Less16 x y)
  1716  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  1717  	for {
  1718  		x := v_0
  1719  		y := v_1
  1720  		v.reset(OpMIPS64SGT)
  1721  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1722  		v0.AddArg(y)
  1723  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1724  		v1.AddArg(x)
  1725  		v.AddArg2(v0, v1)
  1726  		return true
  1727  	}
  1728  }
  1729  func rewriteValueMIPS64_OpLess16U(v *Value) bool {
  1730  	v_1 := v.Args[1]
  1731  	v_0 := v.Args[0]
  1732  	b := v.Block
  1733  	typ := &b.Func.Config.Types
  1734  	// match: (Less16U x y)
  1735  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  1736  	for {
  1737  		x := v_0
  1738  		y := v_1
  1739  		v.reset(OpMIPS64SGTU)
  1740  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1741  		v0.AddArg(y)
  1742  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1743  		v1.AddArg(x)
  1744  		v.AddArg2(v0, v1)
  1745  		return true
  1746  	}
  1747  }
  1748  func rewriteValueMIPS64_OpLess32(v *Value) bool {
  1749  	v_1 := v.Args[1]
  1750  	v_0 := v.Args[0]
  1751  	b := v.Block
  1752  	typ := &b.Func.Config.Types
  1753  	// match: (Less32 x y)
  1754  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  1755  	for {
  1756  		x := v_0
  1757  		y := v_1
  1758  		v.reset(OpMIPS64SGT)
  1759  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1760  		v0.AddArg(y)
  1761  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1762  		v1.AddArg(x)
  1763  		v.AddArg2(v0, v1)
  1764  		return true
  1765  	}
  1766  }
  1767  func rewriteValueMIPS64_OpLess32F(v *Value) bool {
  1768  	v_1 := v.Args[1]
  1769  	v_0 := v.Args[0]
  1770  	b := v.Block
  1771  	// match: (Less32F x y)
  1772  	// result: (FPFlagTrue (CMPGTF y x))
  1773  	for {
  1774  		x := v_0
  1775  		y := v_1
  1776  		v.reset(OpMIPS64FPFlagTrue)
  1777  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  1778  		v0.AddArg2(y, x)
  1779  		v.AddArg(v0)
  1780  		return true
  1781  	}
  1782  }
  1783  func rewriteValueMIPS64_OpLess32U(v *Value) bool {
  1784  	v_1 := v.Args[1]
  1785  	v_0 := v.Args[0]
  1786  	b := v.Block
  1787  	typ := &b.Func.Config.Types
  1788  	// match: (Less32U x y)
  1789  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  1790  	for {
  1791  		x := v_0
  1792  		y := v_1
  1793  		v.reset(OpMIPS64SGTU)
  1794  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1795  		v0.AddArg(y)
  1796  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1797  		v1.AddArg(x)
  1798  		v.AddArg2(v0, v1)
  1799  		return true
  1800  	}
  1801  }
  1802  func rewriteValueMIPS64_OpLess64(v *Value) bool {
  1803  	v_1 := v.Args[1]
  1804  	v_0 := v.Args[0]
  1805  	// match: (Less64 x y)
  1806  	// result: (SGT y x)
  1807  	for {
  1808  		x := v_0
  1809  		y := v_1
  1810  		v.reset(OpMIPS64SGT)
  1811  		v.AddArg2(y, x)
  1812  		return true
  1813  	}
  1814  }
  1815  func rewriteValueMIPS64_OpLess64F(v *Value) bool {
  1816  	v_1 := v.Args[1]
  1817  	v_0 := v.Args[0]
  1818  	b := v.Block
  1819  	// match: (Less64F x y)
  1820  	// result: (FPFlagTrue (CMPGTD y x))
  1821  	for {
  1822  		x := v_0
  1823  		y := v_1
  1824  		v.reset(OpMIPS64FPFlagTrue)
  1825  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  1826  		v0.AddArg2(y, x)
  1827  		v.AddArg(v0)
  1828  		return true
  1829  	}
  1830  }
  1831  func rewriteValueMIPS64_OpLess64U(v *Value) bool {
  1832  	v_1 := v.Args[1]
  1833  	v_0 := v.Args[0]
  1834  	// match: (Less64U x y)
  1835  	// result: (SGTU y x)
  1836  	for {
  1837  		x := v_0
  1838  		y := v_1
  1839  		v.reset(OpMIPS64SGTU)
  1840  		v.AddArg2(y, x)
  1841  		return true
  1842  	}
  1843  }
  1844  func rewriteValueMIPS64_OpLess8(v *Value) bool {
  1845  	v_1 := v.Args[1]
  1846  	v_0 := v.Args[0]
  1847  	b := v.Block
  1848  	typ := &b.Func.Config.Types
  1849  	// match: (Less8 x y)
  1850  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  1851  	for {
  1852  		x := v_0
  1853  		y := v_1
  1854  		v.reset(OpMIPS64SGT)
  1855  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1856  		v0.AddArg(y)
  1857  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1858  		v1.AddArg(x)
  1859  		v.AddArg2(v0, v1)
  1860  		return true
  1861  	}
  1862  }
  1863  func rewriteValueMIPS64_OpLess8U(v *Value) bool {
  1864  	v_1 := v.Args[1]
  1865  	v_0 := v.Args[0]
  1866  	b := v.Block
  1867  	typ := &b.Func.Config.Types
  1868  	// match: (Less8U x y)
  1869  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  1870  	for {
  1871  		x := v_0
  1872  		y := v_1
  1873  		v.reset(OpMIPS64SGTU)
  1874  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1875  		v0.AddArg(y)
  1876  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1877  		v1.AddArg(x)
  1878  		v.AddArg2(v0, v1)
  1879  		return true
  1880  	}
  1881  }
  1882  func rewriteValueMIPS64_OpLoad(v *Value) bool {
  1883  	v_1 := v.Args[1]
  1884  	v_0 := v.Args[0]
  1885  	// match: (Load <t> ptr mem)
  1886  	// cond: t.IsBoolean()
  1887  	// result: (MOVBUload ptr mem)
  1888  	for {
  1889  		t := v.Type
  1890  		ptr := v_0
  1891  		mem := v_1
  1892  		if !(t.IsBoolean()) {
  1893  			break
  1894  		}
  1895  		v.reset(OpMIPS64MOVBUload)
  1896  		v.AddArg2(ptr, mem)
  1897  		return true
  1898  	}
  1899  	// match: (Load <t> ptr mem)
  1900  	// cond: (is8BitInt(t) && t.IsSigned())
  1901  	// result: (MOVBload ptr mem)
  1902  	for {
  1903  		t := v.Type
  1904  		ptr := v_0
  1905  		mem := v_1
  1906  		if !(is8BitInt(t) && t.IsSigned()) {
  1907  			break
  1908  		}
  1909  		v.reset(OpMIPS64MOVBload)
  1910  		v.AddArg2(ptr, mem)
  1911  		return true
  1912  	}
  1913  	// match: (Load <t> ptr mem)
  1914  	// cond: (is8BitInt(t) && !t.IsSigned())
  1915  	// result: (MOVBUload ptr mem)
  1916  	for {
  1917  		t := v.Type
  1918  		ptr := v_0
  1919  		mem := v_1
  1920  		if !(is8BitInt(t) && !t.IsSigned()) {
  1921  			break
  1922  		}
  1923  		v.reset(OpMIPS64MOVBUload)
  1924  		v.AddArg2(ptr, mem)
  1925  		return true
  1926  	}
  1927  	// match: (Load <t> ptr mem)
  1928  	// cond: (is16BitInt(t) && t.IsSigned())
  1929  	// result: (MOVHload ptr mem)
  1930  	for {
  1931  		t := v.Type
  1932  		ptr := v_0
  1933  		mem := v_1
  1934  		if !(is16BitInt(t) && t.IsSigned()) {
  1935  			break
  1936  		}
  1937  		v.reset(OpMIPS64MOVHload)
  1938  		v.AddArg2(ptr, mem)
  1939  		return true
  1940  	}
  1941  	// match: (Load <t> ptr mem)
  1942  	// cond: (is16BitInt(t) && !t.IsSigned())
  1943  	// result: (MOVHUload ptr mem)
  1944  	for {
  1945  		t := v.Type
  1946  		ptr := v_0
  1947  		mem := v_1
  1948  		if !(is16BitInt(t) && !t.IsSigned()) {
  1949  			break
  1950  		}
  1951  		v.reset(OpMIPS64MOVHUload)
  1952  		v.AddArg2(ptr, mem)
  1953  		return true
  1954  	}
  1955  	// match: (Load <t> ptr mem)
  1956  	// cond: (is32BitInt(t) && t.IsSigned())
  1957  	// result: (MOVWload ptr mem)
  1958  	for {
  1959  		t := v.Type
  1960  		ptr := v_0
  1961  		mem := v_1
  1962  		if !(is32BitInt(t) && t.IsSigned()) {
  1963  			break
  1964  		}
  1965  		v.reset(OpMIPS64MOVWload)
  1966  		v.AddArg2(ptr, mem)
  1967  		return true
  1968  	}
  1969  	// match: (Load <t> ptr mem)
  1970  	// cond: (is32BitInt(t) && !t.IsSigned())
  1971  	// result: (MOVWUload ptr mem)
  1972  	for {
  1973  		t := v.Type
  1974  		ptr := v_0
  1975  		mem := v_1
  1976  		if !(is32BitInt(t) && !t.IsSigned()) {
  1977  			break
  1978  		}
  1979  		v.reset(OpMIPS64MOVWUload)
  1980  		v.AddArg2(ptr, mem)
  1981  		return true
  1982  	}
  1983  	// match: (Load <t> ptr mem)
  1984  	// cond: (is64BitInt(t) || isPtr(t))
  1985  	// result: (MOVVload ptr mem)
  1986  	for {
  1987  		t := v.Type
  1988  		ptr := v_0
  1989  		mem := v_1
  1990  		if !(is64BitInt(t) || isPtr(t)) {
  1991  			break
  1992  		}
  1993  		v.reset(OpMIPS64MOVVload)
  1994  		v.AddArg2(ptr, mem)
  1995  		return true
  1996  	}
  1997  	// match: (Load <t> ptr mem)
  1998  	// cond: is32BitFloat(t)
  1999  	// result: (MOVFload ptr mem)
  2000  	for {
  2001  		t := v.Type
  2002  		ptr := v_0
  2003  		mem := v_1
  2004  		if !(is32BitFloat(t)) {
  2005  			break
  2006  		}
  2007  		v.reset(OpMIPS64MOVFload)
  2008  		v.AddArg2(ptr, mem)
  2009  		return true
  2010  	}
  2011  	// match: (Load <t> ptr mem)
  2012  	// cond: is64BitFloat(t)
  2013  	// result: (MOVDload ptr mem)
  2014  	for {
  2015  		t := v.Type
  2016  		ptr := v_0
  2017  		mem := v_1
  2018  		if !(is64BitFloat(t)) {
  2019  			break
  2020  		}
  2021  		v.reset(OpMIPS64MOVDload)
  2022  		v.AddArg2(ptr, mem)
  2023  		return true
  2024  	}
  2025  	return false
  2026  }
  2027  func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
  2028  	v_1 := v.Args[1]
  2029  	v_0 := v.Args[0]
  2030  	b := v.Block
  2031  	typ := &b.Func.Config.Types
  2032  	// match: (LocalAddr <t> {sym} base mem)
  2033  	// cond: t.Elem().HasPointers()
  2034  	// result: (MOVVaddr {sym} (SPanchored base mem))
  2035  	for {
  2036  		t := v.Type
  2037  		sym := auxToSym(v.Aux)
  2038  		base := v_0
  2039  		mem := v_1
  2040  		if !(t.Elem().HasPointers()) {
  2041  			break
  2042  		}
  2043  		v.reset(OpMIPS64MOVVaddr)
  2044  		v.Aux = symToAux(sym)
  2045  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  2046  		v0.AddArg2(base, mem)
  2047  		v.AddArg(v0)
  2048  		return true
  2049  	}
  2050  	// match: (LocalAddr <t> {sym} base _)
  2051  	// cond: !t.Elem().HasPointers()
  2052  	// result: (MOVVaddr {sym} base)
  2053  	for {
  2054  		t := v.Type
  2055  		sym := auxToSym(v.Aux)
  2056  		base := v_0
  2057  		if !(!t.Elem().HasPointers()) {
  2058  			break
  2059  		}
  2060  		v.reset(OpMIPS64MOVVaddr)
  2061  		v.Aux = symToAux(sym)
  2062  		v.AddArg(base)
  2063  		return true
  2064  	}
  2065  	return false
  2066  }
  2067  func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
  2068  	v_1 := v.Args[1]
  2069  	v_0 := v.Args[0]
  2070  	b := v.Block
  2071  	typ := &b.Func.Config.Types
  2072  	// match: (Lsh16x16 <t> x y)
  2073  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2074  	for {
  2075  		t := v.Type
  2076  		x := v_0
  2077  		y := v_1
  2078  		v.reset(OpMIPS64AND)
  2079  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2080  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2081  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2082  		v2.AuxInt = int64ToAuxInt(64)
  2083  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2084  		v3.AddArg(y)
  2085  		v1.AddArg2(v2, v3)
  2086  		v0.AddArg(v1)
  2087  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2088  		v4.AddArg2(x, v3)
  2089  		v.AddArg2(v0, v4)
  2090  		return true
  2091  	}
  2092  }
  2093  func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
  2094  	v_1 := v.Args[1]
  2095  	v_0 := v.Args[0]
  2096  	b := v.Block
  2097  	typ := &b.Func.Config.Types
  2098  	// match: (Lsh16x32 <t> x y)
  2099  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2100  	for {
  2101  		t := v.Type
  2102  		x := v_0
  2103  		y := v_1
  2104  		v.reset(OpMIPS64AND)
  2105  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2106  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2107  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2108  		v2.AuxInt = int64ToAuxInt(64)
  2109  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2110  		v3.AddArg(y)
  2111  		v1.AddArg2(v2, v3)
  2112  		v0.AddArg(v1)
  2113  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2114  		v4.AddArg2(x, v3)
  2115  		v.AddArg2(v0, v4)
  2116  		return true
  2117  	}
  2118  }
  2119  func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
  2120  	v_1 := v.Args[1]
  2121  	v_0 := v.Args[0]
  2122  	b := v.Block
  2123  	typ := &b.Func.Config.Types
  2124  	// match: (Lsh16x64 <t> x y)
  2125  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2126  	for {
  2127  		t := v.Type
  2128  		x := v_0
  2129  		y := v_1
  2130  		v.reset(OpMIPS64AND)
  2131  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2132  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2133  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2134  		v2.AuxInt = int64ToAuxInt(64)
  2135  		v1.AddArg2(v2, y)
  2136  		v0.AddArg(v1)
  2137  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2138  		v3.AddArg2(x, y)
  2139  		v.AddArg2(v0, v3)
  2140  		return true
  2141  	}
  2142  }
  2143  func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
  2144  	v_1 := v.Args[1]
  2145  	v_0 := v.Args[0]
  2146  	b := v.Block
  2147  	typ := &b.Func.Config.Types
  2148  	// match: (Lsh16x8 <t> x y)
  2149  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2150  	for {
  2151  		t := v.Type
  2152  		x := v_0
  2153  		y := v_1
  2154  		v.reset(OpMIPS64AND)
  2155  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2156  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2157  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2158  		v2.AuxInt = int64ToAuxInt(64)
  2159  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2160  		v3.AddArg(y)
  2161  		v1.AddArg2(v2, v3)
  2162  		v0.AddArg(v1)
  2163  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2164  		v4.AddArg2(x, v3)
  2165  		v.AddArg2(v0, v4)
  2166  		return true
  2167  	}
  2168  }
  2169  func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
  2170  	v_1 := v.Args[1]
  2171  	v_0 := v.Args[0]
  2172  	b := v.Block
  2173  	typ := &b.Func.Config.Types
  2174  	// match: (Lsh32x16 <t> x y)
  2175  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2176  	for {
  2177  		t := v.Type
  2178  		x := v_0
  2179  		y := v_1
  2180  		v.reset(OpMIPS64AND)
  2181  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2182  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2183  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2184  		v2.AuxInt = int64ToAuxInt(64)
  2185  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2186  		v3.AddArg(y)
  2187  		v1.AddArg2(v2, v3)
  2188  		v0.AddArg(v1)
  2189  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2190  		v4.AddArg2(x, v3)
  2191  		v.AddArg2(v0, v4)
  2192  		return true
  2193  	}
  2194  }
  2195  func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
  2196  	v_1 := v.Args[1]
  2197  	v_0 := v.Args[0]
  2198  	b := v.Block
  2199  	typ := &b.Func.Config.Types
  2200  	// match: (Lsh32x32 <t> x y)
  2201  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2202  	for {
  2203  		t := v.Type
  2204  		x := v_0
  2205  		y := v_1
  2206  		v.reset(OpMIPS64AND)
  2207  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2208  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2209  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2210  		v2.AuxInt = int64ToAuxInt(64)
  2211  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2212  		v3.AddArg(y)
  2213  		v1.AddArg2(v2, v3)
  2214  		v0.AddArg(v1)
  2215  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2216  		v4.AddArg2(x, v3)
  2217  		v.AddArg2(v0, v4)
  2218  		return true
  2219  	}
  2220  }
  2221  func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
  2222  	v_1 := v.Args[1]
  2223  	v_0 := v.Args[0]
  2224  	b := v.Block
  2225  	typ := &b.Func.Config.Types
  2226  	// match: (Lsh32x64 <t> x y)
  2227  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2228  	for {
  2229  		t := v.Type
  2230  		x := v_0
  2231  		y := v_1
  2232  		v.reset(OpMIPS64AND)
  2233  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2234  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2235  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2236  		v2.AuxInt = int64ToAuxInt(64)
  2237  		v1.AddArg2(v2, y)
  2238  		v0.AddArg(v1)
  2239  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2240  		v3.AddArg2(x, y)
  2241  		v.AddArg2(v0, v3)
  2242  		return true
  2243  	}
  2244  }
  2245  func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
  2246  	v_1 := v.Args[1]
  2247  	v_0 := v.Args[0]
  2248  	b := v.Block
  2249  	typ := &b.Func.Config.Types
  2250  	// match: (Lsh32x8 <t> x y)
  2251  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2252  	for {
  2253  		t := v.Type
  2254  		x := v_0
  2255  		y := v_1
  2256  		v.reset(OpMIPS64AND)
  2257  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2258  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2259  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2260  		v2.AuxInt = int64ToAuxInt(64)
  2261  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2262  		v3.AddArg(y)
  2263  		v1.AddArg2(v2, v3)
  2264  		v0.AddArg(v1)
  2265  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2266  		v4.AddArg2(x, v3)
  2267  		v.AddArg2(v0, v4)
  2268  		return true
  2269  	}
  2270  }
  2271  func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
  2272  	v_1 := v.Args[1]
  2273  	v_0 := v.Args[0]
  2274  	b := v.Block
  2275  	typ := &b.Func.Config.Types
  2276  	// match: (Lsh64x16 <t> x y)
  2277  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2278  	for {
  2279  		t := v.Type
  2280  		x := v_0
  2281  		y := v_1
  2282  		v.reset(OpMIPS64AND)
  2283  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2284  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2285  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2286  		v2.AuxInt = int64ToAuxInt(64)
  2287  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2288  		v3.AddArg(y)
  2289  		v1.AddArg2(v2, v3)
  2290  		v0.AddArg(v1)
  2291  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2292  		v4.AddArg2(x, v3)
  2293  		v.AddArg2(v0, v4)
  2294  		return true
  2295  	}
  2296  }
  2297  func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
  2298  	v_1 := v.Args[1]
  2299  	v_0 := v.Args[0]
  2300  	b := v.Block
  2301  	typ := &b.Func.Config.Types
  2302  	// match: (Lsh64x32 <t> x y)
  2303  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2304  	for {
  2305  		t := v.Type
  2306  		x := v_0
  2307  		y := v_1
  2308  		v.reset(OpMIPS64AND)
  2309  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2310  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2311  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2312  		v2.AuxInt = int64ToAuxInt(64)
  2313  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2314  		v3.AddArg(y)
  2315  		v1.AddArg2(v2, v3)
  2316  		v0.AddArg(v1)
  2317  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2318  		v4.AddArg2(x, v3)
  2319  		v.AddArg2(v0, v4)
  2320  		return true
  2321  	}
  2322  }
  2323  func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
  2324  	v_1 := v.Args[1]
  2325  	v_0 := v.Args[0]
  2326  	b := v.Block
  2327  	typ := &b.Func.Config.Types
  2328  	// match: (Lsh64x64 <t> x y)
  2329  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2330  	for {
  2331  		t := v.Type
  2332  		x := v_0
  2333  		y := v_1
  2334  		v.reset(OpMIPS64AND)
  2335  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2336  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2337  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2338  		v2.AuxInt = int64ToAuxInt(64)
  2339  		v1.AddArg2(v2, y)
  2340  		v0.AddArg(v1)
  2341  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2342  		v3.AddArg2(x, y)
  2343  		v.AddArg2(v0, v3)
  2344  		return true
  2345  	}
  2346  }
  2347  func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
  2348  	v_1 := v.Args[1]
  2349  	v_0 := v.Args[0]
  2350  	b := v.Block
  2351  	typ := &b.Func.Config.Types
  2352  	// match: (Lsh64x8 <t> x y)
  2353  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2354  	for {
  2355  		t := v.Type
  2356  		x := v_0
  2357  		y := v_1
  2358  		v.reset(OpMIPS64AND)
  2359  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2360  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2361  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2362  		v2.AuxInt = int64ToAuxInt(64)
  2363  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2364  		v3.AddArg(y)
  2365  		v1.AddArg2(v2, v3)
  2366  		v0.AddArg(v1)
  2367  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2368  		v4.AddArg2(x, v3)
  2369  		v.AddArg2(v0, v4)
  2370  		return true
  2371  	}
  2372  }
  2373  func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
  2374  	v_1 := v.Args[1]
  2375  	v_0 := v.Args[0]
  2376  	b := v.Block
  2377  	typ := &b.Func.Config.Types
  2378  	// match: (Lsh8x16 <t> x y)
  2379  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2380  	for {
  2381  		t := v.Type
  2382  		x := v_0
  2383  		y := v_1
  2384  		v.reset(OpMIPS64AND)
  2385  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2386  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2387  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2388  		v2.AuxInt = int64ToAuxInt(64)
  2389  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2390  		v3.AddArg(y)
  2391  		v1.AddArg2(v2, v3)
  2392  		v0.AddArg(v1)
  2393  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2394  		v4.AddArg2(x, v3)
  2395  		v.AddArg2(v0, v4)
  2396  		return true
  2397  	}
  2398  }
  2399  func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
  2400  	v_1 := v.Args[1]
  2401  	v_0 := v.Args[0]
  2402  	b := v.Block
  2403  	typ := &b.Func.Config.Types
  2404  	// match: (Lsh8x32 <t> x y)
  2405  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2406  	for {
  2407  		t := v.Type
  2408  		x := v_0
  2409  		y := v_1
  2410  		v.reset(OpMIPS64AND)
  2411  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2412  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2413  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2414  		v2.AuxInt = int64ToAuxInt(64)
  2415  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2416  		v3.AddArg(y)
  2417  		v1.AddArg2(v2, v3)
  2418  		v0.AddArg(v1)
  2419  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2420  		v4.AddArg2(x, v3)
  2421  		v.AddArg2(v0, v4)
  2422  		return true
  2423  	}
  2424  }
  2425  func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
  2426  	v_1 := v.Args[1]
  2427  	v_0 := v.Args[0]
  2428  	b := v.Block
  2429  	typ := &b.Func.Config.Types
  2430  	// match: (Lsh8x64 <t> x y)
  2431  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2432  	for {
  2433  		t := v.Type
  2434  		x := v_0
  2435  		y := v_1
  2436  		v.reset(OpMIPS64AND)
  2437  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2438  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2439  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2440  		v2.AuxInt = int64ToAuxInt(64)
  2441  		v1.AddArg2(v2, y)
  2442  		v0.AddArg(v1)
  2443  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2444  		v3.AddArg2(x, y)
  2445  		v.AddArg2(v0, v3)
  2446  		return true
  2447  	}
  2448  }
  2449  func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
  2450  	v_1 := v.Args[1]
  2451  	v_0 := v.Args[0]
  2452  	b := v.Block
  2453  	typ := &b.Func.Config.Types
  2454  	// match: (Lsh8x8 <t> x y)
  2455  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2456  	for {
  2457  		t := v.Type
  2458  		x := v_0
  2459  		y := v_1
  2460  		v.reset(OpMIPS64AND)
  2461  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2462  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2463  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2464  		v2.AuxInt = int64ToAuxInt(64)
  2465  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2466  		v3.AddArg(y)
  2467  		v1.AddArg2(v2, v3)
  2468  		v0.AddArg(v1)
  2469  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2470  		v4.AddArg2(x, v3)
  2471  		v.AddArg2(v0, v4)
  2472  		return true
  2473  	}
  2474  }
  2475  func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
  2476  	v_1 := v.Args[1]
  2477  	v_0 := v.Args[0]
  2478  	// match: (ADDV x (MOVVconst <t> [c]))
  2479  	// cond: is32Bit(c) && !t.IsPtr()
  2480  	// result: (ADDVconst [c] x)
  2481  	for {
  2482  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2483  			x := v_0
  2484  			if v_1.Op != OpMIPS64MOVVconst {
  2485  				continue
  2486  			}
  2487  			t := v_1.Type
  2488  			c := auxIntToInt64(v_1.AuxInt)
  2489  			if !(is32Bit(c) && !t.IsPtr()) {
  2490  				continue
  2491  			}
  2492  			v.reset(OpMIPS64ADDVconst)
  2493  			v.AuxInt = int64ToAuxInt(c)
  2494  			v.AddArg(x)
  2495  			return true
  2496  		}
  2497  		break
  2498  	}
  2499  	// match: (ADDV x (NEGV y))
  2500  	// result: (SUBV x y)
  2501  	for {
  2502  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2503  			x := v_0
  2504  			if v_1.Op != OpMIPS64NEGV {
  2505  				continue
  2506  			}
  2507  			y := v_1.Args[0]
  2508  			v.reset(OpMIPS64SUBV)
  2509  			v.AddArg2(x, y)
  2510  			return true
  2511  		}
  2512  		break
  2513  	}
  2514  	return false
  2515  }
  2516  func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
  2517  	v_0 := v.Args[0]
  2518  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  2519  	// cond: is32Bit(off1+int64(off2))
  2520  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  2521  	for {
  2522  		off1 := auxIntToInt64(v.AuxInt)
  2523  		if v_0.Op != OpMIPS64MOVVaddr {
  2524  			break
  2525  		}
  2526  		off2 := auxIntToInt32(v_0.AuxInt)
  2527  		sym := auxToSym(v_0.Aux)
  2528  		ptr := v_0.Args[0]
  2529  		if !(is32Bit(off1 + int64(off2))) {
  2530  			break
  2531  		}
  2532  		v.reset(OpMIPS64MOVVaddr)
  2533  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  2534  		v.Aux = symToAux(sym)
  2535  		v.AddArg(ptr)
  2536  		return true
  2537  	}
  2538  	// match: (ADDVconst [0] x)
  2539  	// result: x
  2540  	for {
  2541  		if auxIntToInt64(v.AuxInt) != 0 {
  2542  			break
  2543  		}
  2544  		x := v_0
  2545  		v.copyOf(x)
  2546  		return true
  2547  	}
  2548  	// match: (ADDVconst [c] (MOVVconst [d]))
  2549  	// result: (MOVVconst [c+d])
  2550  	for {
  2551  		c := auxIntToInt64(v.AuxInt)
  2552  		if v_0.Op != OpMIPS64MOVVconst {
  2553  			break
  2554  		}
  2555  		d := auxIntToInt64(v_0.AuxInt)
  2556  		v.reset(OpMIPS64MOVVconst)
  2557  		v.AuxInt = int64ToAuxInt(c + d)
  2558  		return true
  2559  	}
  2560  	// match: (ADDVconst [c] (ADDVconst [d] x))
  2561  	// cond: is32Bit(c+d)
  2562  	// result: (ADDVconst [c+d] x)
  2563  	for {
  2564  		c := auxIntToInt64(v.AuxInt)
  2565  		if v_0.Op != OpMIPS64ADDVconst {
  2566  			break
  2567  		}
  2568  		d := auxIntToInt64(v_0.AuxInt)
  2569  		x := v_0.Args[0]
  2570  		if !(is32Bit(c + d)) {
  2571  			break
  2572  		}
  2573  		v.reset(OpMIPS64ADDVconst)
  2574  		v.AuxInt = int64ToAuxInt(c + d)
  2575  		v.AddArg(x)
  2576  		return true
  2577  	}
  2578  	// match: (ADDVconst [c] (SUBVconst [d] x))
  2579  	// cond: is32Bit(c-d)
  2580  	// result: (ADDVconst [c-d] x)
  2581  	for {
  2582  		c := auxIntToInt64(v.AuxInt)
  2583  		if v_0.Op != OpMIPS64SUBVconst {
  2584  			break
  2585  		}
  2586  		d := auxIntToInt64(v_0.AuxInt)
  2587  		x := v_0.Args[0]
  2588  		if !(is32Bit(c - d)) {
  2589  			break
  2590  		}
  2591  		v.reset(OpMIPS64ADDVconst)
  2592  		v.AuxInt = int64ToAuxInt(c - d)
  2593  		v.AddArg(x)
  2594  		return true
  2595  	}
  2596  	return false
  2597  }
  2598  func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
  2599  	v_1 := v.Args[1]
  2600  	v_0 := v.Args[0]
  2601  	// match: (AND x (MOVVconst [c]))
  2602  	// cond: is32Bit(c)
  2603  	// result: (ANDconst [c] x)
  2604  	for {
  2605  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2606  			x := v_0
  2607  			if v_1.Op != OpMIPS64MOVVconst {
  2608  				continue
  2609  			}
  2610  			c := auxIntToInt64(v_1.AuxInt)
  2611  			if !(is32Bit(c)) {
  2612  				continue
  2613  			}
  2614  			v.reset(OpMIPS64ANDconst)
  2615  			v.AuxInt = int64ToAuxInt(c)
  2616  			v.AddArg(x)
  2617  			return true
  2618  		}
  2619  		break
  2620  	}
  2621  	// match: (AND x x)
  2622  	// result: x
  2623  	for {
  2624  		x := v_0
  2625  		if x != v_1 {
  2626  			break
  2627  		}
  2628  		v.copyOf(x)
  2629  		return true
  2630  	}
  2631  	return false
  2632  }
  2633  func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
  2634  	v_0 := v.Args[0]
  2635  	// match: (ANDconst [0] _)
  2636  	// result: (MOVVconst [0])
  2637  	for {
  2638  		if auxIntToInt64(v.AuxInt) != 0 {
  2639  			break
  2640  		}
  2641  		v.reset(OpMIPS64MOVVconst)
  2642  		v.AuxInt = int64ToAuxInt(0)
  2643  		return true
  2644  	}
  2645  	// match: (ANDconst [-1] x)
  2646  	// result: x
  2647  	for {
  2648  		if auxIntToInt64(v.AuxInt) != -1 {
  2649  			break
  2650  		}
  2651  		x := v_0
  2652  		v.copyOf(x)
  2653  		return true
  2654  	}
  2655  	// match: (ANDconst [c] (MOVVconst [d]))
  2656  	// result: (MOVVconst [c&d])
  2657  	for {
  2658  		c := auxIntToInt64(v.AuxInt)
  2659  		if v_0.Op != OpMIPS64MOVVconst {
  2660  			break
  2661  		}
  2662  		d := auxIntToInt64(v_0.AuxInt)
  2663  		v.reset(OpMIPS64MOVVconst)
  2664  		v.AuxInt = int64ToAuxInt(c & d)
  2665  		return true
  2666  	}
  2667  	// match: (ANDconst [c] (ANDconst [d] x))
  2668  	// result: (ANDconst [c&d] x)
  2669  	for {
  2670  		c := auxIntToInt64(v.AuxInt)
  2671  		if v_0.Op != OpMIPS64ANDconst {
  2672  			break
  2673  		}
  2674  		d := auxIntToInt64(v_0.AuxInt)
  2675  		x := v_0.Args[0]
  2676  		v.reset(OpMIPS64ANDconst)
  2677  		v.AuxInt = int64ToAuxInt(c & d)
  2678  		v.AddArg(x)
  2679  		return true
  2680  	}
  2681  	return false
  2682  }
  2683  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
  2684  	v_2 := v.Args[2]
  2685  	v_1 := v.Args[1]
  2686  	v_0 := v.Args[0]
  2687  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  2688  	// cond: is32Bit(c)
  2689  	// result: (LoweredAtomicAddconst32 [int32(c)] ptr mem)
  2690  	for {
  2691  		ptr := v_0
  2692  		if v_1.Op != OpMIPS64MOVVconst {
  2693  			break
  2694  		}
  2695  		c := auxIntToInt64(v_1.AuxInt)
  2696  		mem := v_2
  2697  		if !(is32Bit(c)) {
  2698  			break
  2699  		}
  2700  		v.reset(OpMIPS64LoweredAtomicAddconst32)
  2701  		v.AuxInt = int32ToAuxInt(int32(c))
  2702  		v.AddArg2(ptr, mem)
  2703  		return true
  2704  	}
  2705  	return false
  2706  }
  2707  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
  2708  	v_2 := v.Args[2]
  2709  	v_1 := v.Args[1]
  2710  	v_0 := v.Args[0]
  2711  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  2712  	// cond: is32Bit(c)
  2713  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  2714  	for {
  2715  		ptr := v_0
  2716  		if v_1.Op != OpMIPS64MOVVconst {
  2717  			break
  2718  		}
  2719  		c := auxIntToInt64(v_1.AuxInt)
  2720  		mem := v_2
  2721  		if !(is32Bit(c)) {
  2722  			break
  2723  		}
  2724  		v.reset(OpMIPS64LoweredAtomicAddconst64)
  2725  		v.AuxInt = int64ToAuxInt(c)
  2726  		v.AddArg2(ptr, mem)
  2727  		return true
  2728  	}
  2729  	return false
  2730  }
  2731  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
  2732  	v_2 := v.Args[2]
  2733  	v_1 := v.Args[1]
  2734  	v_0 := v.Args[0]
  2735  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  2736  	// result: (LoweredAtomicStorezero32 ptr mem)
  2737  	for {
  2738  		ptr := v_0
  2739  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2740  			break
  2741  		}
  2742  		mem := v_2
  2743  		v.reset(OpMIPS64LoweredAtomicStorezero32)
  2744  		v.AddArg2(ptr, mem)
  2745  		return true
  2746  	}
  2747  	return false
  2748  }
  2749  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
  2750  	v_2 := v.Args[2]
  2751  	v_1 := v.Args[1]
  2752  	v_0 := v.Args[0]
  2753  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  2754  	// result: (LoweredAtomicStorezero64 ptr mem)
  2755  	for {
  2756  		ptr := v_0
  2757  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2758  			break
  2759  		}
  2760  		mem := v_2
  2761  		v.reset(OpMIPS64LoweredAtomicStorezero64)
  2762  		v.AddArg2(ptr, mem)
  2763  		return true
  2764  	}
  2765  	return false
  2766  }
  2767  func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v *Value) bool {
  2768  	v_1 := v.Args[1]
  2769  	v_0 := v.Args[0]
  2770  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVVconst [c]) mem)
  2771  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  2772  	for {
  2773  		kind := auxIntToInt64(v.AuxInt)
  2774  		p := auxToPanicBoundsC(v.Aux)
  2775  		if v_0.Op != OpMIPS64MOVVconst {
  2776  			break
  2777  		}
  2778  		c := auxIntToInt64(v_0.AuxInt)
  2779  		mem := v_1
  2780  		v.reset(OpMIPS64LoweredPanicBoundsCC)
  2781  		v.AuxInt = int64ToAuxInt(kind)
  2782  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  2783  		v.AddArg(mem)
  2784  		return true
  2785  	}
  2786  	return false
  2787  }
  2788  func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v *Value) bool {
  2789  	v_1 := v.Args[1]
  2790  	v_0 := v.Args[0]
  2791  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVVconst [c]) mem)
  2792  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  2793  	for {
  2794  		kind := auxIntToInt64(v.AuxInt)
  2795  		p := auxToPanicBoundsC(v.Aux)
  2796  		if v_0.Op != OpMIPS64MOVVconst {
  2797  			break
  2798  		}
  2799  		c := auxIntToInt64(v_0.AuxInt)
  2800  		mem := v_1
  2801  		v.reset(OpMIPS64LoweredPanicBoundsCC)
  2802  		v.AuxInt = int64ToAuxInt(kind)
  2803  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  2804  		v.AddArg(mem)
  2805  		return true
  2806  	}
  2807  	return false
  2808  }
  2809  func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v *Value) bool {
  2810  	v_2 := v.Args[2]
  2811  	v_1 := v.Args[1]
  2812  	v_0 := v.Args[0]
  2813  	// match: (LoweredPanicBoundsRR [kind] x (MOVVconst [c]) mem)
  2814  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  2815  	for {
  2816  		kind := auxIntToInt64(v.AuxInt)
  2817  		x := v_0
  2818  		if v_1.Op != OpMIPS64MOVVconst {
  2819  			break
  2820  		}
  2821  		c := auxIntToInt64(v_1.AuxInt)
  2822  		mem := v_2
  2823  		v.reset(OpMIPS64LoweredPanicBoundsRC)
  2824  		v.AuxInt = int64ToAuxInt(kind)
  2825  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2826  		v.AddArg2(x, mem)
  2827  		return true
  2828  	}
  2829  	// match: (LoweredPanicBoundsRR [kind] (MOVVconst [c]) y mem)
  2830  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  2831  	for {
  2832  		kind := auxIntToInt64(v.AuxInt)
  2833  		if v_0.Op != OpMIPS64MOVVconst {
  2834  			break
  2835  		}
  2836  		c := auxIntToInt64(v_0.AuxInt)
  2837  		y := v_1
  2838  		mem := v_2
  2839  		v.reset(OpMIPS64LoweredPanicBoundsCR)
  2840  		v.AuxInt = int64ToAuxInt(kind)
  2841  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2842  		v.AddArg2(y, mem)
  2843  		return true
  2844  	}
  2845  	return false
  2846  }
  2847  func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
  2848  	v_1 := v.Args[1]
  2849  	v_0 := v.Args[0]
  2850  	b := v.Block
  2851  	config := b.Func.Config
  2852  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2853  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2854  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2855  	for {
  2856  		off1 := auxIntToInt32(v.AuxInt)
  2857  		sym := auxToSym(v.Aux)
  2858  		if v_0.Op != OpMIPS64ADDVconst {
  2859  			break
  2860  		}
  2861  		off2 := auxIntToInt64(v_0.AuxInt)
  2862  		ptr := v_0.Args[0]
  2863  		mem := v_1
  2864  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2865  			break
  2866  		}
  2867  		v.reset(OpMIPS64MOVBUload)
  2868  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2869  		v.Aux = symToAux(sym)
  2870  		v.AddArg2(ptr, mem)
  2871  		return true
  2872  	}
  2873  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2874  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2875  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2876  	for {
  2877  		off1 := auxIntToInt32(v.AuxInt)
  2878  		sym1 := auxToSym(v.Aux)
  2879  		if v_0.Op != OpMIPS64MOVVaddr {
  2880  			break
  2881  		}
  2882  		off2 := auxIntToInt32(v_0.AuxInt)
  2883  		sym2 := auxToSym(v_0.Aux)
  2884  		ptr := v_0.Args[0]
  2885  		mem := v_1
  2886  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2887  			break
  2888  		}
  2889  		v.reset(OpMIPS64MOVBUload)
  2890  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2891  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2892  		v.AddArg2(ptr, mem)
  2893  		return true
  2894  	}
  2895  	// match: (MOVBUload [off] {sym} (SB) _)
  2896  	// cond: symIsRO(sym)
  2897  	// result: (MOVVconst [int64(read8(sym, int64(off)))])
  2898  	for {
  2899  		off := auxIntToInt32(v.AuxInt)
  2900  		sym := auxToSym(v.Aux)
  2901  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2902  			break
  2903  		}
  2904  		v.reset(OpMIPS64MOVVconst)
  2905  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  2906  		return true
  2907  	}
  2908  	return false
  2909  }
  2910  func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
  2911  	v_0 := v.Args[0]
  2912  	// match: (MOVBUreg x:(MOVBUload _ _))
  2913  	// result: (MOVVreg x)
  2914  	for {
  2915  		x := v_0
  2916  		if x.Op != OpMIPS64MOVBUload {
  2917  			break
  2918  		}
  2919  		v.reset(OpMIPS64MOVVreg)
  2920  		v.AddArg(x)
  2921  		return true
  2922  	}
  2923  	// match: (MOVBUreg x:(MOVBUreg _))
  2924  	// result: (MOVVreg x)
  2925  	for {
  2926  		x := v_0
  2927  		if x.Op != OpMIPS64MOVBUreg {
  2928  			break
  2929  		}
  2930  		v.reset(OpMIPS64MOVVreg)
  2931  		v.AddArg(x)
  2932  		return true
  2933  	}
  2934  	// match: (MOVBUreg (MOVVconst [c]))
  2935  	// result: (MOVVconst [int64(uint8(c))])
  2936  	for {
  2937  		if v_0.Op != OpMIPS64MOVVconst {
  2938  			break
  2939  		}
  2940  		c := auxIntToInt64(v_0.AuxInt)
  2941  		v.reset(OpMIPS64MOVVconst)
  2942  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2943  		return true
  2944  	}
  2945  	return false
  2946  }
  2947  func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
  2948  	v_1 := v.Args[1]
  2949  	v_0 := v.Args[0]
  2950  	b := v.Block
  2951  	config := b.Func.Config
  2952  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2953  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2954  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2955  	for {
  2956  		off1 := auxIntToInt32(v.AuxInt)
  2957  		sym := auxToSym(v.Aux)
  2958  		if v_0.Op != OpMIPS64ADDVconst {
  2959  			break
  2960  		}
  2961  		off2 := auxIntToInt64(v_0.AuxInt)
  2962  		ptr := v_0.Args[0]
  2963  		mem := v_1
  2964  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2965  			break
  2966  		}
  2967  		v.reset(OpMIPS64MOVBload)
  2968  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2969  		v.Aux = symToAux(sym)
  2970  		v.AddArg2(ptr, mem)
  2971  		return true
  2972  	}
  2973  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2974  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  2975  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2976  	for {
  2977  		off1 := auxIntToInt32(v.AuxInt)
  2978  		sym1 := auxToSym(v.Aux)
  2979  		if v_0.Op != OpMIPS64MOVVaddr {
  2980  			break
  2981  		}
  2982  		off2 := auxIntToInt32(v_0.AuxInt)
  2983  		sym2 := auxToSym(v_0.Aux)
  2984  		ptr := v_0.Args[0]
  2985  		mem := v_1
  2986  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  2987  			break
  2988  		}
  2989  		v.reset(OpMIPS64MOVBload)
  2990  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2991  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2992  		v.AddArg2(ptr, mem)
  2993  		return true
  2994  	}
  2995  	// match: (MOVBload [off] {sym} (SB) _)
  2996  	// cond: symIsRO(sym)
  2997  	// result: (MOVVconst [int64(int8(read8(sym, int64(off))))])
  2998  	for {
  2999  		off := auxIntToInt32(v.AuxInt)
  3000  		sym := auxToSym(v.Aux)
  3001  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3002  			break
  3003  		}
  3004  		v.reset(OpMIPS64MOVVconst)
  3005  		v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
  3006  		return true
  3007  	}
  3008  	return false
  3009  }
  3010  func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
  3011  	v_0 := v.Args[0]
  3012  	// match: (MOVBreg x:(MOVBload _ _))
  3013  	// result: (MOVVreg x)
  3014  	for {
  3015  		x := v_0
  3016  		if x.Op != OpMIPS64MOVBload {
  3017  			break
  3018  		}
  3019  		v.reset(OpMIPS64MOVVreg)
  3020  		v.AddArg(x)
  3021  		return true
  3022  	}
  3023  	// match: (MOVBreg x:(MOVBreg _))
  3024  	// result: (MOVVreg x)
  3025  	for {
  3026  		x := v_0
  3027  		if x.Op != OpMIPS64MOVBreg {
  3028  			break
  3029  		}
  3030  		v.reset(OpMIPS64MOVVreg)
  3031  		v.AddArg(x)
  3032  		return true
  3033  	}
  3034  	// match: (MOVBreg (MOVVconst [c]))
  3035  	// result: (MOVVconst [int64(int8(c))])
  3036  	for {
  3037  		if v_0.Op != OpMIPS64MOVVconst {
  3038  			break
  3039  		}
  3040  		c := auxIntToInt64(v_0.AuxInt)
  3041  		v.reset(OpMIPS64MOVVconst)
  3042  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  3043  		return true
  3044  	}
  3045  	return false
  3046  }
  3047  func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
  3048  	v_2 := v.Args[2]
  3049  	v_1 := v.Args[1]
  3050  	v_0 := v.Args[0]
  3051  	b := v.Block
  3052  	config := b.Func.Config
  3053  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3054  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3055  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  3056  	for {
  3057  		off1 := auxIntToInt32(v.AuxInt)
  3058  		sym := auxToSym(v.Aux)
  3059  		if v_0.Op != OpMIPS64ADDVconst {
  3060  			break
  3061  		}
  3062  		off2 := auxIntToInt64(v_0.AuxInt)
  3063  		ptr := v_0.Args[0]
  3064  		val := v_1
  3065  		mem := v_2
  3066  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3067  			break
  3068  		}
  3069  		v.reset(OpMIPS64MOVBstore)
  3070  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3071  		v.Aux = symToAux(sym)
  3072  		v.AddArg3(ptr, val, mem)
  3073  		return true
  3074  	}
  3075  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3076  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3077  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3078  	for {
  3079  		off1 := auxIntToInt32(v.AuxInt)
  3080  		sym1 := auxToSym(v.Aux)
  3081  		if v_0.Op != OpMIPS64MOVVaddr {
  3082  			break
  3083  		}
  3084  		off2 := auxIntToInt32(v_0.AuxInt)
  3085  		sym2 := auxToSym(v_0.Aux)
  3086  		ptr := v_0.Args[0]
  3087  		val := v_1
  3088  		mem := v_2
  3089  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3090  			break
  3091  		}
  3092  		v.reset(OpMIPS64MOVBstore)
  3093  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3094  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3095  		v.AddArg3(ptr, val, mem)
  3096  		return true
  3097  	}
  3098  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  3099  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3100  	for {
  3101  		off := auxIntToInt32(v.AuxInt)
  3102  		sym := auxToSym(v.Aux)
  3103  		ptr := v_0
  3104  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3105  			break
  3106  		}
  3107  		mem := v_2
  3108  		v.reset(OpMIPS64MOVBstorezero)
  3109  		v.AuxInt = int32ToAuxInt(off)
  3110  		v.Aux = symToAux(sym)
  3111  		v.AddArg2(ptr, mem)
  3112  		return true
  3113  	}
  3114  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3115  	// result: (MOVBstore [off] {sym} ptr x mem)
  3116  	for {
  3117  		off := auxIntToInt32(v.AuxInt)
  3118  		sym := auxToSym(v.Aux)
  3119  		ptr := v_0
  3120  		if v_1.Op != OpMIPS64MOVBreg {
  3121  			break
  3122  		}
  3123  		x := v_1.Args[0]
  3124  		mem := v_2
  3125  		v.reset(OpMIPS64MOVBstore)
  3126  		v.AuxInt = int32ToAuxInt(off)
  3127  		v.Aux = symToAux(sym)
  3128  		v.AddArg3(ptr, x, mem)
  3129  		return true
  3130  	}
  3131  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3132  	// result: (MOVBstore [off] {sym} ptr x mem)
  3133  	for {
  3134  		off := auxIntToInt32(v.AuxInt)
  3135  		sym := auxToSym(v.Aux)
  3136  		ptr := v_0
  3137  		if v_1.Op != OpMIPS64MOVBUreg {
  3138  			break
  3139  		}
  3140  		x := v_1.Args[0]
  3141  		mem := v_2
  3142  		v.reset(OpMIPS64MOVBstore)
  3143  		v.AuxInt = int32ToAuxInt(off)
  3144  		v.Aux = symToAux(sym)
  3145  		v.AddArg3(ptr, x, mem)
  3146  		return true
  3147  	}
  3148  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3149  	// result: (MOVBstore [off] {sym} ptr x mem)
  3150  	for {
  3151  		off := auxIntToInt32(v.AuxInt)
  3152  		sym := auxToSym(v.Aux)
  3153  		ptr := v_0
  3154  		if v_1.Op != OpMIPS64MOVHreg {
  3155  			break
  3156  		}
  3157  		x := v_1.Args[0]
  3158  		mem := v_2
  3159  		v.reset(OpMIPS64MOVBstore)
  3160  		v.AuxInt = int32ToAuxInt(off)
  3161  		v.Aux = symToAux(sym)
  3162  		v.AddArg3(ptr, x, mem)
  3163  		return true
  3164  	}
  3165  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3166  	// result: (MOVBstore [off] {sym} ptr x mem)
  3167  	for {
  3168  		off := auxIntToInt32(v.AuxInt)
  3169  		sym := auxToSym(v.Aux)
  3170  		ptr := v_0
  3171  		if v_1.Op != OpMIPS64MOVHUreg {
  3172  			break
  3173  		}
  3174  		x := v_1.Args[0]
  3175  		mem := v_2
  3176  		v.reset(OpMIPS64MOVBstore)
  3177  		v.AuxInt = int32ToAuxInt(off)
  3178  		v.Aux = symToAux(sym)
  3179  		v.AddArg3(ptr, x, mem)
  3180  		return true
  3181  	}
  3182  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3183  	// result: (MOVBstore [off] {sym} ptr x mem)
  3184  	for {
  3185  		off := auxIntToInt32(v.AuxInt)
  3186  		sym := auxToSym(v.Aux)
  3187  		ptr := v_0
  3188  		if v_1.Op != OpMIPS64MOVWreg {
  3189  			break
  3190  		}
  3191  		x := v_1.Args[0]
  3192  		mem := v_2
  3193  		v.reset(OpMIPS64MOVBstore)
  3194  		v.AuxInt = int32ToAuxInt(off)
  3195  		v.Aux = symToAux(sym)
  3196  		v.AddArg3(ptr, x, mem)
  3197  		return true
  3198  	}
  3199  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3200  	// result: (MOVBstore [off] {sym} ptr x mem)
  3201  	for {
  3202  		off := auxIntToInt32(v.AuxInt)
  3203  		sym := auxToSym(v.Aux)
  3204  		ptr := v_0
  3205  		if v_1.Op != OpMIPS64MOVWUreg {
  3206  			break
  3207  		}
  3208  		x := v_1.Args[0]
  3209  		mem := v_2
  3210  		v.reset(OpMIPS64MOVBstore)
  3211  		v.AuxInt = int32ToAuxInt(off)
  3212  		v.Aux = symToAux(sym)
  3213  		v.AddArg3(ptr, x, mem)
  3214  		return true
  3215  	}
  3216  	return false
  3217  }
  3218  func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
  3219  	v_1 := v.Args[1]
  3220  	v_0 := v.Args[0]
  3221  	b := v.Block
  3222  	config := b.Func.Config
  3223  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3224  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3225  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  3226  	for {
  3227  		off1 := auxIntToInt32(v.AuxInt)
  3228  		sym := auxToSym(v.Aux)
  3229  		if v_0.Op != OpMIPS64ADDVconst {
  3230  			break
  3231  		}
  3232  		off2 := auxIntToInt64(v_0.AuxInt)
  3233  		ptr := v_0.Args[0]
  3234  		mem := v_1
  3235  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3236  			break
  3237  		}
  3238  		v.reset(OpMIPS64MOVBstorezero)
  3239  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3240  		v.Aux = symToAux(sym)
  3241  		v.AddArg2(ptr, mem)
  3242  		return true
  3243  	}
  3244  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3245  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3246  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3247  	for {
  3248  		off1 := auxIntToInt32(v.AuxInt)
  3249  		sym1 := auxToSym(v.Aux)
  3250  		if v_0.Op != OpMIPS64MOVVaddr {
  3251  			break
  3252  		}
  3253  		off2 := auxIntToInt32(v_0.AuxInt)
  3254  		sym2 := auxToSym(v_0.Aux)
  3255  		ptr := v_0.Args[0]
  3256  		mem := v_1
  3257  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3258  			break
  3259  		}
  3260  		v.reset(OpMIPS64MOVBstorezero)
  3261  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3262  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3263  		v.AddArg2(ptr, mem)
  3264  		return true
  3265  	}
  3266  	return false
  3267  }
  3268  func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
  3269  	v_1 := v.Args[1]
  3270  	v_0 := v.Args[0]
  3271  	b := v.Block
  3272  	config := b.Func.Config
  3273  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  3274  	// result: (MOVVgpfp val)
  3275  	for {
  3276  		off := auxIntToInt32(v.AuxInt)
  3277  		sym := auxToSym(v.Aux)
  3278  		ptr := v_0
  3279  		if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3280  			break
  3281  		}
  3282  		val := v_1.Args[1]
  3283  		if ptr != v_1.Args[0] {
  3284  			break
  3285  		}
  3286  		v.reset(OpMIPS64MOVVgpfp)
  3287  		v.AddArg(val)
  3288  		return true
  3289  	}
  3290  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3291  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3292  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  3293  	for {
  3294  		off1 := auxIntToInt32(v.AuxInt)
  3295  		sym := auxToSym(v.Aux)
  3296  		if v_0.Op != OpMIPS64ADDVconst {
  3297  			break
  3298  		}
  3299  		off2 := auxIntToInt64(v_0.AuxInt)
  3300  		ptr := v_0.Args[0]
  3301  		mem := v_1
  3302  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3303  			break
  3304  		}
  3305  		v.reset(OpMIPS64MOVDload)
  3306  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3307  		v.Aux = symToAux(sym)
  3308  		v.AddArg2(ptr, mem)
  3309  		return true
  3310  	}
  3311  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3312  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3313  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3314  	for {
  3315  		off1 := auxIntToInt32(v.AuxInt)
  3316  		sym1 := auxToSym(v.Aux)
  3317  		if v_0.Op != OpMIPS64MOVVaddr {
  3318  			break
  3319  		}
  3320  		off2 := auxIntToInt32(v_0.AuxInt)
  3321  		sym2 := auxToSym(v_0.Aux)
  3322  		ptr := v_0.Args[0]
  3323  		mem := v_1
  3324  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3325  			break
  3326  		}
  3327  		v.reset(OpMIPS64MOVDload)
  3328  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3329  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3330  		v.AddArg2(ptr, mem)
  3331  		return true
  3332  	}
  3333  	return false
  3334  }
  3335  func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
  3336  	v_2 := v.Args[2]
  3337  	v_1 := v.Args[1]
  3338  	v_0 := v.Args[0]
  3339  	b := v.Block
  3340  	config := b.Func.Config
  3341  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3342  	// result: (MOVVstore [off] {sym} ptr val mem)
  3343  	for {
  3344  		off := auxIntToInt32(v.AuxInt)
  3345  		sym := auxToSym(v.Aux)
  3346  		ptr := v_0
  3347  		if v_1.Op != OpMIPS64MOVVgpfp {
  3348  			break
  3349  		}
  3350  		val := v_1.Args[0]
  3351  		mem := v_2
  3352  		v.reset(OpMIPS64MOVVstore)
  3353  		v.AuxInt = int32ToAuxInt(off)
  3354  		v.Aux = symToAux(sym)
  3355  		v.AddArg3(ptr, val, mem)
  3356  		return true
  3357  	}
  3358  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3359  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3360  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3361  	for {
  3362  		off1 := auxIntToInt32(v.AuxInt)
  3363  		sym := auxToSym(v.Aux)
  3364  		if v_0.Op != OpMIPS64ADDVconst {
  3365  			break
  3366  		}
  3367  		off2 := auxIntToInt64(v_0.AuxInt)
  3368  		ptr := v_0.Args[0]
  3369  		val := v_1
  3370  		mem := v_2
  3371  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3372  			break
  3373  		}
  3374  		v.reset(OpMIPS64MOVDstore)
  3375  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3376  		v.Aux = symToAux(sym)
  3377  		v.AddArg3(ptr, val, mem)
  3378  		return true
  3379  	}
  3380  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3381  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3382  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3383  	for {
  3384  		off1 := auxIntToInt32(v.AuxInt)
  3385  		sym1 := auxToSym(v.Aux)
  3386  		if v_0.Op != OpMIPS64MOVVaddr {
  3387  			break
  3388  		}
  3389  		off2 := auxIntToInt32(v_0.AuxInt)
  3390  		sym2 := auxToSym(v_0.Aux)
  3391  		ptr := v_0.Args[0]
  3392  		val := v_1
  3393  		mem := v_2
  3394  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3395  			break
  3396  		}
  3397  		v.reset(OpMIPS64MOVDstore)
  3398  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3399  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3400  		v.AddArg3(ptr, val, mem)
  3401  		return true
  3402  	}
  3403  	return false
  3404  }
  3405  func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
  3406  	v_1 := v.Args[1]
  3407  	v_0 := v.Args[0]
  3408  	b := v.Block
  3409  	config := b.Func.Config
  3410  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3411  	// result: (MOVWgpfp val)
  3412  	for {
  3413  		off := auxIntToInt32(v.AuxInt)
  3414  		sym := auxToSym(v.Aux)
  3415  		ptr := v_0
  3416  		if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3417  			break
  3418  		}
  3419  		val := v_1.Args[1]
  3420  		if ptr != v_1.Args[0] {
  3421  			break
  3422  		}
  3423  		v.reset(OpMIPS64MOVWgpfp)
  3424  		v.AddArg(val)
  3425  		return true
  3426  	}
  3427  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3428  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3429  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3430  	for {
  3431  		off1 := auxIntToInt32(v.AuxInt)
  3432  		sym := auxToSym(v.Aux)
  3433  		if v_0.Op != OpMIPS64ADDVconst {
  3434  			break
  3435  		}
  3436  		off2 := auxIntToInt64(v_0.AuxInt)
  3437  		ptr := v_0.Args[0]
  3438  		mem := v_1
  3439  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3440  			break
  3441  		}
  3442  		v.reset(OpMIPS64MOVFload)
  3443  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3444  		v.Aux = symToAux(sym)
  3445  		v.AddArg2(ptr, mem)
  3446  		return true
  3447  	}
  3448  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3449  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3450  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3451  	for {
  3452  		off1 := auxIntToInt32(v.AuxInt)
  3453  		sym1 := auxToSym(v.Aux)
  3454  		if v_0.Op != OpMIPS64MOVVaddr {
  3455  			break
  3456  		}
  3457  		off2 := auxIntToInt32(v_0.AuxInt)
  3458  		sym2 := auxToSym(v_0.Aux)
  3459  		ptr := v_0.Args[0]
  3460  		mem := v_1
  3461  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3462  			break
  3463  		}
  3464  		v.reset(OpMIPS64MOVFload)
  3465  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3466  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3467  		v.AddArg2(ptr, mem)
  3468  		return true
  3469  	}
  3470  	return false
  3471  }
  3472  func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
  3473  	v_2 := v.Args[2]
  3474  	v_1 := v.Args[1]
  3475  	v_0 := v.Args[0]
  3476  	b := v.Block
  3477  	config := b.Func.Config
  3478  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3479  	// result: (MOVWstore [off] {sym} ptr val mem)
  3480  	for {
  3481  		off := auxIntToInt32(v.AuxInt)
  3482  		sym := auxToSym(v.Aux)
  3483  		ptr := v_0
  3484  		if v_1.Op != OpMIPS64MOVWgpfp {
  3485  			break
  3486  		}
  3487  		val := v_1.Args[0]
  3488  		mem := v_2
  3489  		v.reset(OpMIPS64MOVWstore)
  3490  		v.AuxInt = int32ToAuxInt(off)
  3491  		v.Aux = symToAux(sym)
  3492  		v.AddArg3(ptr, val, mem)
  3493  		return true
  3494  	}
  3495  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3496  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3497  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3498  	for {
  3499  		off1 := auxIntToInt32(v.AuxInt)
  3500  		sym := auxToSym(v.Aux)
  3501  		if v_0.Op != OpMIPS64ADDVconst {
  3502  			break
  3503  		}
  3504  		off2 := auxIntToInt64(v_0.AuxInt)
  3505  		ptr := v_0.Args[0]
  3506  		val := v_1
  3507  		mem := v_2
  3508  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3509  			break
  3510  		}
  3511  		v.reset(OpMIPS64MOVFstore)
  3512  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3513  		v.Aux = symToAux(sym)
  3514  		v.AddArg3(ptr, val, mem)
  3515  		return true
  3516  	}
  3517  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3518  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3519  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3520  	for {
  3521  		off1 := auxIntToInt32(v.AuxInt)
  3522  		sym1 := auxToSym(v.Aux)
  3523  		if v_0.Op != OpMIPS64MOVVaddr {
  3524  			break
  3525  		}
  3526  		off2 := auxIntToInt32(v_0.AuxInt)
  3527  		sym2 := auxToSym(v_0.Aux)
  3528  		ptr := v_0.Args[0]
  3529  		val := v_1
  3530  		mem := v_2
  3531  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3532  			break
  3533  		}
  3534  		v.reset(OpMIPS64MOVFstore)
  3535  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3536  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3537  		v.AddArg3(ptr, val, mem)
  3538  		return true
  3539  	}
  3540  	return false
  3541  }
  3542  func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
  3543  	v_1 := v.Args[1]
  3544  	v_0 := v.Args[0]
  3545  	b := v.Block
  3546  	config := b.Func.Config
  3547  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3548  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3549  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3550  	for {
  3551  		off1 := auxIntToInt32(v.AuxInt)
  3552  		sym := auxToSym(v.Aux)
  3553  		if v_0.Op != OpMIPS64ADDVconst {
  3554  			break
  3555  		}
  3556  		off2 := auxIntToInt64(v_0.AuxInt)
  3557  		ptr := v_0.Args[0]
  3558  		mem := v_1
  3559  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3560  			break
  3561  		}
  3562  		v.reset(OpMIPS64MOVHUload)
  3563  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3564  		v.Aux = symToAux(sym)
  3565  		v.AddArg2(ptr, mem)
  3566  		return true
  3567  	}
  3568  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3569  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3570  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3571  	for {
  3572  		off1 := auxIntToInt32(v.AuxInt)
  3573  		sym1 := auxToSym(v.Aux)
  3574  		if v_0.Op != OpMIPS64MOVVaddr {
  3575  			break
  3576  		}
  3577  		off2 := auxIntToInt32(v_0.AuxInt)
  3578  		sym2 := auxToSym(v_0.Aux)
  3579  		ptr := v_0.Args[0]
  3580  		mem := v_1
  3581  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3582  			break
  3583  		}
  3584  		v.reset(OpMIPS64MOVHUload)
  3585  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3586  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3587  		v.AddArg2(ptr, mem)
  3588  		return true
  3589  	}
  3590  	// match: (MOVHUload [off] {sym} (SB) _)
  3591  	// cond: symIsRO(sym)
  3592  	// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3593  	for {
  3594  		off := auxIntToInt32(v.AuxInt)
  3595  		sym := auxToSym(v.Aux)
  3596  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3597  			break
  3598  		}
  3599  		v.reset(OpMIPS64MOVVconst)
  3600  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3601  		return true
  3602  	}
  3603  	return false
  3604  }
  3605  func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
  3606  	v_0 := v.Args[0]
  3607  	// match: (MOVHUreg x:(MOVBUload _ _))
  3608  	// result: (MOVVreg x)
  3609  	for {
  3610  		x := v_0
  3611  		if x.Op != OpMIPS64MOVBUload {
  3612  			break
  3613  		}
  3614  		v.reset(OpMIPS64MOVVreg)
  3615  		v.AddArg(x)
  3616  		return true
  3617  	}
  3618  	// match: (MOVHUreg x:(MOVHUload _ _))
  3619  	// result: (MOVVreg x)
  3620  	for {
  3621  		x := v_0
  3622  		if x.Op != OpMIPS64MOVHUload {
  3623  			break
  3624  		}
  3625  		v.reset(OpMIPS64MOVVreg)
  3626  		v.AddArg(x)
  3627  		return true
  3628  	}
  3629  	// match: (MOVHUreg x:(MOVBUreg _))
  3630  	// result: (MOVVreg x)
  3631  	for {
  3632  		x := v_0
  3633  		if x.Op != OpMIPS64MOVBUreg {
  3634  			break
  3635  		}
  3636  		v.reset(OpMIPS64MOVVreg)
  3637  		v.AddArg(x)
  3638  		return true
  3639  	}
  3640  	// match: (MOVHUreg x:(MOVHUreg _))
  3641  	// result: (MOVVreg x)
  3642  	for {
  3643  		x := v_0
  3644  		if x.Op != OpMIPS64MOVHUreg {
  3645  			break
  3646  		}
  3647  		v.reset(OpMIPS64MOVVreg)
  3648  		v.AddArg(x)
  3649  		return true
  3650  	}
  3651  	// match: (MOVHUreg (MOVVconst [c]))
  3652  	// result: (MOVVconst [int64(uint16(c))])
  3653  	for {
  3654  		if v_0.Op != OpMIPS64MOVVconst {
  3655  			break
  3656  		}
  3657  		c := auxIntToInt64(v_0.AuxInt)
  3658  		v.reset(OpMIPS64MOVVconst)
  3659  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3660  		return true
  3661  	}
  3662  	return false
  3663  }
  3664  func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
  3665  	v_1 := v.Args[1]
  3666  	v_0 := v.Args[0]
  3667  	b := v.Block
  3668  	config := b.Func.Config
  3669  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3670  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3671  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3672  	for {
  3673  		off1 := auxIntToInt32(v.AuxInt)
  3674  		sym := auxToSym(v.Aux)
  3675  		if v_0.Op != OpMIPS64ADDVconst {
  3676  			break
  3677  		}
  3678  		off2 := auxIntToInt64(v_0.AuxInt)
  3679  		ptr := v_0.Args[0]
  3680  		mem := v_1
  3681  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3682  			break
  3683  		}
  3684  		v.reset(OpMIPS64MOVHload)
  3685  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3686  		v.Aux = symToAux(sym)
  3687  		v.AddArg2(ptr, mem)
  3688  		return true
  3689  	}
  3690  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3691  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3692  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3693  	for {
  3694  		off1 := auxIntToInt32(v.AuxInt)
  3695  		sym1 := auxToSym(v.Aux)
  3696  		if v_0.Op != OpMIPS64MOVVaddr {
  3697  			break
  3698  		}
  3699  		off2 := auxIntToInt32(v_0.AuxInt)
  3700  		sym2 := auxToSym(v_0.Aux)
  3701  		ptr := v_0.Args[0]
  3702  		mem := v_1
  3703  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3704  			break
  3705  		}
  3706  		v.reset(OpMIPS64MOVHload)
  3707  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3708  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3709  		v.AddArg2(ptr, mem)
  3710  		return true
  3711  	}
  3712  	// match: (MOVHload [off] {sym} (SB) _)
  3713  	// cond: symIsRO(sym)
  3714  	// result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  3715  	for {
  3716  		off := auxIntToInt32(v.AuxInt)
  3717  		sym := auxToSym(v.Aux)
  3718  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3719  			break
  3720  		}
  3721  		v.reset(OpMIPS64MOVVconst)
  3722  		v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  3723  		return true
  3724  	}
  3725  	return false
  3726  }
  3727  func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
  3728  	v_0 := v.Args[0]
  3729  	// match: (MOVHreg x:(MOVBload _ _))
  3730  	// result: (MOVVreg x)
  3731  	for {
  3732  		x := v_0
  3733  		if x.Op != OpMIPS64MOVBload {
  3734  			break
  3735  		}
  3736  		v.reset(OpMIPS64MOVVreg)
  3737  		v.AddArg(x)
  3738  		return true
  3739  	}
  3740  	// match: (MOVHreg x:(MOVBUload _ _))
  3741  	// result: (MOVVreg x)
  3742  	for {
  3743  		x := v_0
  3744  		if x.Op != OpMIPS64MOVBUload {
  3745  			break
  3746  		}
  3747  		v.reset(OpMIPS64MOVVreg)
  3748  		v.AddArg(x)
  3749  		return true
  3750  	}
  3751  	// match: (MOVHreg x:(MOVHload _ _))
  3752  	// result: (MOVVreg x)
  3753  	for {
  3754  		x := v_0
  3755  		if x.Op != OpMIPS64MOVHload {
  3756  			break
  3757  		}
  3758  		v.reset(OpMIPS64MOVVreg)
  3759  		v.AddArg(x)
  3760  		return true
  3761  	}
  3762  	// match: (MOVHreg x:(MOVBreg _))
  3763  	// result: (MOVVreg x)
  3764  	for {
  3765  		x := v_0
  3766  		if x.Op != OpMIPS64MOVBreg {
  3767  			break
  3768  		}
  3769  		v.reset(OpMIPS64MOVVreg)
  3770  		v.AddArg(x)
  3771  		return true
  3772  	}
  3773  	// match: (MOVHreg x:(MOVBUreg _))
  3774  	// result: (MOVVreg x)
  3775  	for {
  3776  		x := v_0
  3777  		if x.Op != OpMIPS64MOVBUreg {
  3778  			break
  3779  		}
  3780  		v.reset(OpMIPS64MOVVreg)
  3781  		v.AddArg(x)
  3782  		return true
  3783  	}
  3784  	// match: (MOVHreg x:(MOVHreg _))
  3785  	// result: (MOVVreg x)
  3786  	for {
  3787  		x := v_0
  3788  		if x.Op != OpMIPS64MOVHreg {
  3789  			break
  3790  		}
  3791  		v.reset(OpMIPS64MOVVreg)
  3792  		v.AddArg(x)
  3793  		return true
  3794  	}
  3795  	// match: (MOVHreg (MOVVconst [c]))
  3796  	// result: (MOVVconst [int64(int16(c))])
  3797  	for {
  3798  		if v_0.Op != OpMIPS64MOVVconst {
  3799  			break
  3800  		}
  3801  		c := auxIntToInt64(v_0.AuxInt)
  3802  		v.reset(OpMIPS64MOVVconst)
  3803  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3804  		return true
  3805  	}
  3806  	return false
  3807  }
  3808  func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
  3809  	v_2 := v.Args[2]
  3810  	v_1 := v.Args[1]
  3811  	v_0 := v.Args[0]
  3812  	b := v.Block
  3813  	config := b.Func.Config
  3814  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3815  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3816  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3817  	for {
  3818  		off1 := auxIntToInt32(v.AuxInt)
  3819  		sym := auxToSym(v.Aux)
  3820  		if v_0.Op != OpMIPS64ADDVconst {
  3821  			break
  3822  		}
  3823  		off2 := auxIntToInt64(v_0.AuxInt)
  3824  		ptr := v_0.Args[0]
  3825  		val := v_1
  3826  		mem := v_2
  3827  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3828  			break
  3829  		}
  3830  		v.reset(OpMIPS64MOVHstore)
  3831  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3832  		v.Aux = symToAux(sym)
  3833  		v.AddArg3(ptr, val, mem)
  3834  		return true
  3835  	}
  3836  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3837  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3838  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3839  	for {
  3840  		off1 := auxIntToInt32(v.AuxInt)
  3841  		sym1 := auxToSym(v.Aux)
  3842  		if v_0.Op != OpMIPS64MOVVaddr {
  3843  			break
  3844  		}
  3845  		off2 := auxIntToInt32(v_0.AuxInt)
  3846  		sym2 := auxToSym(v_0.Aux)
  3847  		ptr := v_0.Args[0]
  3848  		val := v_1
  3849  		mem := v_2
  3850  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3851  			break
  3852  		}
  3853  		v.reset(OpMIPS64MOVHstore)
  3854  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3855  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3856  		v.AddArg3(ptr, val, mem)
  3857  		return true
  3858  	}
  3859  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  3860  	// result: (MOVHstorezero [off] {sym} ptr mem)
  3861  	for {
  3862  		off := auxIntToInt32(v.AuxInt)
  3863  		sym := auxToSym(v.Aux)
  3864  		ptr := v_0
  3865  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3866  			break
  3867  		}
  3868  		mem := v_2
  3869  		v.reset(OpMIPS64MOVHstorezero)
  3870  		v.AuxInt = int32ToAuxInt(off)
  3871  		v.Aux = symToAux(sym)
  3872  		v.AddArg2(ptr, mem)
  3873  		return true
  3874  	}
  3875  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  3876  	// result: (MOVHstore [off] {sym} ptr x mem)
  3877  	for {
  3878  		off := auxIntToInt32(v.AuxInt)
  3879  		sym := auxToSym(v.Aux)
  3880  		ptr := v_0
  3881  		if v_1.Op != OpMIPS64MOVHreg {
  3882  			break
  3883  		}
  3884  		x := v_1.Args[0]
  3885  		mem := v_2
  3886  		v.reset(OpMIPS64MOVHstore)
  3887  		v.AuxInt = int32ToAuxInt(off)
  3888  		v.Aux = symToAux(sym)
  3889  		v.AddArg3(ptr, x, mem)
  3890  		return true
  3891  	}
  3892  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  3893  	// result: (MOVHstore [off] {sym} ptr x mem)
  3894  	for {
  3895  		off := auxIntToInt32(v.AuxInt)
  3896  		sym := auxToSym(v.Aux)
  3897  		ptr := v_0
  3898  		if v_1.Op != OpMIPS64MOVHUreg {
  3899  			break
  3900  		}
  3901  		x := v_1.Args[0]
  3902  		mem := v_2
  3903  		v.reset(OpMIPS64MOVHstore)
  3904  		v.AuxInt = int32ToAuxInt(off)
  3905  		v.Aux = symToAux(sym)
  3906  		v.AddArg3(ptr, x, mem)
  3907  		return true
  3908  	}
  3909  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  3910  	// result: (MOVHstore [off] {sym} ptr x mem)
  3911  	for {
  3912  		off := auxIntToInt32(v.AuxInt)
  3913  		sym := auxToSym(v.Aux)
  3914  		ptr := v_0
  3915  		if v_1.Op != OpMIPS64MOVWreg {
  3916  			break
  3917  		}
  3918  		x := v_1.Args[0]
  3919  		mem := v_2
  3920  		v.reset(OpMIPS64MOVHstore)
  3921  		v.AuxInt = int32ToAuxInt(off)
  3922  		v.Aux = symToAux(sym)
  3923  		v.AddArg3(ptr, x, mem)
  3924  		return true
  3925  	}
  3926  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  3927  	// result: (MOVHstore [off] {sym} ptr x mem)
  3928  	for {
  3929  		off := auxIntToInt32(v.AuxInt)
  3930  		sym := auxToSym(v.Aux)
  3931  		ptr := v_0
  3932  		if v_1.Op != OpMIPS64MOVWUreg {
  3933  			break
  3934  		}
  3935  		x := v_1.Args[0]
  3936  		mem := v_2
  3937  		v.reset(OpMIPS64MOVHstore)
  3938  		v.AuxInt = int32ToAuxInt(off)
  3939  		v.Aux = symToAux(sym)
  3940  		v.AddArg3(ptr, x, mem)
  3941  		return true
  3942  	}
  3943  	return false
  3944  }
  3945  func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
  3946  	v_1 := v.Args[1]
  3947  	v_0 := v.Args[0]
  3948  	b := v.Block
  3949  	config := b.Func.Config
  3950  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3951  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3952  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  3953  	for {
  3954  		off1 := auxIntToInt32(v.AuxInt)
  3955  		sym := auxToSym(v.Aux)
  3956  		if v_0.Op != OpMIPS64ADDVconst {
  3957  			break
  3958  		}
  3959  		off2 := auxIntToInt64(v_0.AuxInt)
  3960  		ptr := v_0.Args[0]
  3961  		mem := v_1
  3962  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3963  			break
  3964  		}
  3965  		v.reset(OpMIPS64MOVHstorezero)
  3966  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3967  		v.Aux = symToAux(sym)
  3968  		v.AddArg2(ptr, mem)
  3969  		return true
  3970  	}
  3971  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3972  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  3973  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3974  	for {
  3975  		off1 := auxIntToInt32(v.AuxInt)
  3976  		sym1 := auxToSym(v.Aux)
  3977  		if v_0.Op != OpMIPS64MOVVaddr {
  3978  			break
  3979  		}
  3980  		off2 := auxIntToInt32(v_0.AuxInt)
  3981  		sym2 := auxToSym(v_0.Aux)
  3982  		ptr := v_0.Args[0]
  3983  		mem := v_1
  3984  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  3985  			break
  3986  		}
  3987  		v.reset(OpMIPS64MOVHstorezero)
  3988  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3989  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3990  		v.AddArg2(ptr, mem)
  3991  		return true
  3992  	}
  3993  	return false
  3994  }
  3995  func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
  3996  	v_1 := v.Args[1]
  3997  	v_0 := v.Args[0]
  3998  	b := v.Block
  3999  	config := b.Func.Config
  4000  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4001  	// result: (MOVVfpgp val)
  4002  	for {
  4003  		off := auxIntToInt32(v.AuxInt)
  4004  		sym := auxToSym(v.Aux)
  4005  		ptr := v_0
  4006  		if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4007  			break
  4008  		}
  4009  		val := v_1.Args[1]
  4010  		if ptr != v_1.Args[0] {
  4011  			break
  4012  		}
  4013  		v.reset(OpMIPS64MOVVfpgp)
  4014  		v.AddArg(val)
  4015  		return true
  4016  	}
  4017  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4018  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4019  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  4020  	for {
  4021  		off1 := auxIntToInt32(v.AuxInt)
  4022  		sym := auxToSym(v.Aux)
  4023  		if v_0.Op != OpMIPS64ADDVconst {
  4024  			break
  4025  		}
  4026  		off2 := auxIntToInt64(v_0.AuxInt)
  4027  		ptr := v_0.Args[0]
  4028  		mem := v_1
  4029  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4030  			break
  4031  		}
  4032  		v.reset(OpMIPS64MOVVload)
  4033  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4034  		v.Aux = symToAux(sym)
  4035  		v.AddArg2(ptr, mem)
  4036  		return true
  4037  	}
  4038  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4039  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4040  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4041  	for {
  4042  		off1 := auxIntToInt32(v.AuxInt)
  4043  		sym1 := auxToSym(v.Aux)
  4044  		if v_0.Op != OpMIPS64MOVVaddr {
  4045  			break
  4046  		}
  4047  		off2 := auxIntToInt32(v_0.AuxInt)
  4048  		sym2 := auxToSym(v_0.Aux)
  4049  		ptr := v_0.Args[0]
  4050  		mem := v_1
  4051  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4052  			break
  4053  		}
  4054  		v.reset(OpMIPS64MOVVload)
  4055  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4056  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4057  		v.AddArg2(ptr, mem)
  4058  		return true
  4059  	}
  4060  	// match: (MOVVload [off] {sym} (SB) _)
  4061  	// cond: symIsRO(sym)
  4062  	// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  4063  	for {
  4064  		off := auxIntToInt32(v.AuxInt)
  4065  		sym := auxToSym(v.Aux)
  4066  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4067  			break
  4068  		}
  4069  		v.reset(OpMIPS64MOVVconst)
  4070  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  4071  		return true
  4072  	}
  4073  	return false
  4074  }
  4075  func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
  4076  	v_0 := v.Args[0]
  4077  	// match: (MOVVnop (MOVVconst [c]))
  4078  	// result: (MOVVconst [c])
  4079  	for {
  4080  		if v_0.Op != OpMIPS64MOVVconst {
  4081  			break
  4082  		}
  4083  		c := auxIntToInt64(v_0.AuxInt)
  4084  		v.reset(OpMIPS64MOVVconst)
  4085  		v.AuxInt = int64ToAuxInt(c)
  4086  		return true
  4087  	}
  4088  	return false
  4089  }
  4090  func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
  4091  	v_0 := v.Args[0]
  4092  	// match: (MOVVreg x)
  4093  	// cond: x.Uses == 1
  4094  	// result: (MOVVnop x)
  4095  	for {
  4096  		x := v_0
  4097  		if !(x.Uses == 1) {
  4098  			break
  4099  		}
  4100  		v.reset(OpMIPS64MOVVnop)
  4101  		v.AddArg(x)
  4102  		return true
  4103  	}
  4104  	// match: (MOVVreg (MOVVconst [c]))
  4105  	// result: (MOVVconst [c])
  4106  	for {
  4107  		if v_0.Op != OpMIPS64MOVVconst {
  4108  			break
  4109  		}
  4110  		c := auxIntToInt64(v_0.AuxInt)
  4111  		v.reset(OpMIPS64MOVVconst)
  4112  		v.AuxInt = int64ToAuxInt(c)
  4113  		return true
  4114  	}
  4115  	return false
  4116  }
  4117  func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
  4118  	v_2 := v.Args[2]
  4119  	v_1 := v.Args[1]
  4120  	v_0 := v.Args[0]
  4121  	b := v.Block
  4122  	config := b.Func.Config
  4123  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4124  	// result: (MOVDstore [off] {sym} ptr val mem)
  4125  	for {
  4126  		off := auxIntToInt32(v.AuxInt)
  4127  		sym := auxToSym(v.Aux)
  4128  		ptr := v_0
  4129  		if v_1.Op != OpMIPS64MOVVfpgp {
  4130  			break
  4131  		}
  4132  		val := v_1.Args[0]
  4133  		mem := v_2
  4134  		v.reset(OpMIPS64MOVDstore)
  4135  		v.AuxInt = int32ToAuxInt(off)
  4136  		v.Aux = symToAux(sym)
  4137  		v.AddArg3(ptr, val, mem)
  4138  		return true
  4139  	}
  4140  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4141  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4142  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4143  	for {
  4144  		off1 := auxIntToInt32(v.AuxInt)
  4145  		sym := auxToSym(v.Aux)
  4146  		if v_0.Op != OpMIPS64ADDVconst {
  4147  			break
  4148  		}
  4149  		off2 := auxIntToInt64(v_0.AuxInt)
  4150  		ptr := v_0.Args[0]
  4151  		val := v_1
  4152  		mem := v_2
  4153  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4154  			break
  4155  		}
  4156  		v.reset(OpMIPS64MOVVstore)
  4157  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4158  		v.Aux = symToAux(sym)
  4159  		v.AddArg3(ptr, val, mem)
  4160  		return true
  4161  	}
  4162  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4163  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4164  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4165  	for {
  4166  		off1 := auxIntToInt32(v.AuxInt)
  4167  		sym1 := auxToSym(v.Aux)
  4168  		if v_0.Op != OpMIPS64MOVVaddr {
  4169  			break
  4170  		}
  4171  		off2 := auxIntToInt32(v_0.AuxInt)
  4172  		sym2 := auxToSym(v_0.Aux)
  4173  		ptr := v_0.Args[0]
  4174  		val := v_1
  4175  		mem := v_2
  4176  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4177  			break
  4178  		}
  4179  		v.reset(OpMIPS64MOVVstore)
  4180  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4181  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4182  		v.AddArg3(ptr, val, mem)
  4183  		return true
  4184  	}
  4185  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4186  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4187  	for {
  4188  		off := auxIntToInt32(v.AuxInt)
  4189  		sym := auxToSym(v.Aux)
  4190  		ptr := v_0
  4191  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4192  			break
  4193  		}
  4194  		mem := v_2
  4195  		v.reset(OpMIPS64MOVVstorezero)
  4196  		v.AuxInt = int32ToAuxInt(off)
  4197  		v.Aux = symToAux(sym)
  4198  		v.AddArg2(ptr, mem)
  4199  		return true
  4200  	}
  4201  	return false
  4202  }
  4203  func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
  4204  	v_1 := v.Args[1]
  4205  	v_0 := v.Args[0]
  4206  	b := v.Block
  4207  	config := b.Func.Config
  4208  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4209  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4210  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  4211  	for {
  4212  		off1 := auxIntToInt32(v.AuxInt)
  4213  		sym := auxToSym(v.Aux)
  4214  		if v_0.Op != OpMIPS64ADDVconst {
  4215  			break
  4216  		}
  4217  		off2 := auxIntToInt64(v_0.AuxInt)
  4218  		ptr := v_0.Args[0]
  4219  		mem := v_1
  4220  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4221  			break
  4222  		}
  4223  		v.reset(OpMIPS64MOVVstorezero)
  4224  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4225  		v.Aux = symToAux(sym)
  4226  		v.AddArg2(ptr, mem)
  4227  		return true
  4228  	}
  4229  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4230  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4231  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4232  	for {
  4233  		off1 := auxIntToInt32(v.AuxInt)
  4234  		sym1 := auxToSym(v.Aux)
  4235  		if v_0.Op != OpMIPS64MOVVaddr {
  4236  			break
  4237  		}
  4238  		off2 := auxIntToInt32(v_0.AuxInt)
  4239  		sym2 := auxToSym(v_0.Aux)
  4240  		ptr := v_0.Args[0]
  4241  		mem := v_1
  4242  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4243  			break
  4244  		}
  4245  		v.reset(OpMIPS64MOVVstorezero)
  4246  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4247  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4248  		v.AddArg2(ptr, mem)
  4249  		return true
  4250  	}
  4251  	return false
  4252  }
  4253  func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
  4254  	v_1 := v.Args[1]
  4255  	v_0 := v.Args[0]
  4256  	b := v.Block
  4257  	config := b.Func.Config
  4258  	typ := &b.Func.Config.Types
  4259  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  4260  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  4261  	for {
  4262  		off := auxIntToInt32(v.AuxInt)
  4263  		sym := auxToSym(v.Aux)
  4264  		ptr := v_0
  4265  		if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4266  			break
  4267  		}
  4268  		val := v_1.Args[1]
  4269  		if ptr != v_1.Args[0] {
  4270  			break
  4271  		}
  4272  		v.reset(OpZeroExt32to64)
  4273  		v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
  4274  		v0.AddArg(val)
  4275  		v.AddArg(v0)
  4276  		return true
  4277  	}
  4278  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4279  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4280  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  4281  	for {
  4282  		off1 := auxIntToInt32(v.AuxInt)
  4283  		sym := auxToSym(v.Aux)
  4284  		if v_0.Op != OpMIPS64ADDVconst {
  4285  			break
  4286  		}
  4287  		off2 := auxIntToInt64(v_0.AuxInt)
  4288  		ptr := v_0.Args[0]
  4289  		mem := v_1
  4290  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4291  			break
  4292  		}
  4293  		v.reset(OpMIPS64MOVWUload)
  4294  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4295  		v.Aux = symToAux(sym)
  4296  		v.AddArg2(ptr, mem)
  4297  		return true
  4298  	}
  4299  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4300  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4301  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4302  	for {
  4303  		off1 := auxIntToInt32(v.AuxInt)
  4304  		sym1 := auxToSym(v.Aux)
  4305  		if v_0.Op != OpMIPS64MOVVaddr {
  4306  			break
  4307  		}
  4308  		off2 := auxIntToInt32(v_0.AuxInt)
  4309  		sym2 := auxToSym(v_0.Aux)
  4310  		ptr := v_0.Args[0]
  4311  		mem := v_1
  4312  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4313  			break
  4314  		}
  4315  		v.reset(OpMIPS64MOVWUload)
  4316  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4317  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4318  		v.AddArg2(ptr, mem)
  4319  		return true
  4320  	}
  4321  	// match: (MOVWUload [off] {sym} (SB) _)
  4322  	// cond: symIsRO(sym)
  4323  	// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  4324  	for {
  4325  		off := auxIntToInt32(v.AuxInt)
  4326  		sym := auxToSym(v.Aux)
  4327  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4328  			break
  4329  		}
  4330  		v.reset(OpMIPS64MOVVconst)
  4331  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  4332  		return true
  4333  	}
  4334  	return false
  4335  }
  4336  func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
  4337  	v_0 := v.Args[0]
  4338  	// match: (MOVWUreg x:(MOVBUload _ _))
  4339  	// result: (MOVVreg x)
  4340  	for {
  4341  		x := v_0
  4342  		if x.Op != OpMIPS64MOVBUload {
  4343  			break
  4344  		}
  4345  		v.reset(OpMIPS64MOVVreg)
  4346  		v.AddArg(x)
  4347  		return true
  4348  	}
  4349  	// match: (MOVWUreg x:(MOVHUload _ _))
  4350  	// result: (MOVVreg x)
  4351  	for {
  4352  		x := v_0
  4353  		if x.Op != OpMIPS64MOVHUload {
  4354  			break
  4355  		}
  4356  		v.reset(OpMIPS64MOVVreg)
  4357  		v.AddArg(x)
  4358  		return true
  4359  	}
  4360  	// match: (MOVWUreg x:(MOVWUload _ _))
  4361  	// result: (MOVVreg x)
  4362  	for {
  4363  		x := v_0
  4364  		if x.Op != OpMIPS64MOVWUload {
  4365  			break
  4366  		}
  4367  		v.reset(OpMIPS64MOVVreg)
  4368  		v.AddArg(x)
  4369  		return true
  4370  	}
  4371  	// match: (MOVWUreg x:(MOVBUreg _))
  4372  	// result: (MOVVreg x)
  4373  	for {
  4374  		x := v_0
  4375  		if x.Op != OpMIPS64MOVBUreg {
  4376  			break
  4377  		}
  4378  		v.reset(OpMIPS64MOVVreg)
  4379  		v.AddArg(x)
  4380  		return true
  4381  	}
  4382  	// match: (MOVWUreg x:(MOVHUreg _))
  4383  	// result: (MOVVreg x)
  4384  	for {
  4385  		x := v_0
  4386  		if x.Op != OpMIPS64MOVHUreg {
  4387  			break
  4388  		}
  4389  		v.reset(OpMIPS64MOVVreg)
  4390  		v.AddArg(x)
  4391  		return true
  4392  	}
  4393  	// match: (MOVWUreg x:(MOVWUreg _))
  4394  	// result: (MOVVreg x)
  4395  	for {
  4396  		x := v_0
  4397  		if x.Op != OpMIPS64MOVWUreg {
  4398  			break
  4399  		}
  4400  		v.reset(OpMIPS64MOVVreg)
  4401  		v.AddArg(x)
  4402  		return true
  4403  	}
  4404  	// match: (MOVWUreg (MOVVconst [c]))
  4405  	// result: (MOVVconst [int64(uint32(c))])
  4406  	for {
  4407  		if v_0.Op != OpMIPS64MOVVconst {
  4408  			break
  4409  		}
  4410  		c := auxIntToInt64(v_0.AuxInt)
  4411  		v.reset(OpMIPS64MOVVconst)
  4412  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  4413  		return true
  4414  	}
  4415  	return false
  4416  }
  4417  func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
  4418  	v_1 := v.Args[1]
  4419  	v_0 := v.Args[0]
  4420  	b := v.Block
  4421  	config := b.Func.Config
  4422  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4423  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4424  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  4425  	for {
  4426  		off1 := auxIntToInt32(v.AuxInt)
  4427  		sym := auxToSym(v.Aux)
  4428  		if v_0.Op != OpMIPS64ADDVconst {
  4429  			break
  4430  		}
  4431  		off2 := auxIntToInt64(v_0.AuxInt)
  4432  		ptr := v_0.Args[0]
  4433  		mem := v_1
  4434  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4435  			break
  4436  		}
  4437  		v.reset(OpMIPS64MOVWload)
  4438  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4439  		v.Aux = symToAux(sym)
  4440  		v.AddArg2(ptr, mem)
  4441  		return true
  4442  	}
  4443  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4444  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4445  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4446  	for {
  4447  		off1 := auxIntToInt32(v.AuxInt)
  4448  		sym1 := auxToSym(v.Aux)
  4449  		if v_0.Op != OpMIPS64MOVVaddr {
  4450  			break
  4451  		}
  4452  		off2 := auxIntToInt32(v_0.AuxInt)
  4453  		sym2 := auxToSym(v_0.Aux)
  4454  		ptr := v_0.Args[0]
  4455  		mem := v_1
  4456  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4457  			break
  4458  		}
  4459  		v.reset(OpMIPS64MOVWload)
  4460  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4461  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4462  		v.AddArg2(ptr, mem)
  4463  		return true
  4464  	}
  4465  	// match: (MOVWload [off] {sym} (SB) _)
  4466  	// cond: symIsRO(sym)
  4467  	// result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  4468  	for {
  4469  		off := auxIntToInt32(v.AuxInt)
  4470  		sym := auxToSym(v.Aux)
  4471  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4472  			break
  4473  		}
  4474  		v.reset(OpMIPS64MOVVconst)
  4475  		v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  4476  		return true
  4477  	}
  4478  	return false
  4479  }
  4480  func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
  4481  	v_0 := v.Args[0]
  4482  	// match: (MOVWreg x:(MOVBload _ _))
  4483  	// result: (MOVVreg x)
  4484  	for {
  4485  		x := v_0
  4486  		if x.Op != OpMIPS64MOVBload {
  4487  			break
  4488  		}
  4489  		v.reset(OpMIPS64MOVVreg)
  4490  		v.AddArg(x)
  4491  		return true
  4492  	}
  4493  	// match: (MOVWreg x:(MOVBUload _ _))
  4494  	// result: (MOVVreg x)
  4495  	for {
  4496  		x := v_0
  4497  		if x.Op != OpMIPS64MOVBUload {
  4498  			break
  4499  		}
  4500  		v.reset(OpMIPS64MOVVreg)
  4501  		v.AddArg(x)
  4502  		return true
  4503  	}
  4504  	// match: (MOVWreg x:(MOVHload _ _))
  4505  	// result: (MOVVreg x)
  4506  	for {
  4507  		x := v_0
  4508  		if x.Op != OpMIPS64MOVHload {
  4509  			break
  4510  		}
  4511  		v.reset(OpMIPS64MOVVreg)
  4512  		v.AddArg(x)
  4513  		return true
  4514  	}
  4515  	// match: (MOVWreg x:(MOVHUload _ _))
  4516  	// result: (MOVVreg x)
  4517  	for {
  4518  		x := v_0
  4519  		if x.Op != OpMIPS64MOVHUload {
  4520  			break
  4521  		}
  4522  		v.reset(OpMIPS64MOVVreg)
  4523  		v.AddArg(x)
  4524  		return true
  4525  	}
  4526  	// match: (MOVWreg x:(MOVWload _ _))
  4527  	// result: (MOVVreg x)
  4528  	for {
  4529  		x := v_0
  4530  		if x.Op != OpMIPS64MOVWload {
  4531  			break
  4532  		}
  4533  		v.reset(OpMIPS64MOVVreg)
  4534  		v.AddArg(x)
  4535  		return true
  4536  	}
  4537  	// match: (MOVWreg x:(MOVBreg _))
  4538  	// result: (MOVVreg x)
  4539  	for {
  4540  		x := v_0
  4541  		if x.Op != OpMIPS64MOVBreg {
  4542  			break
  4543  		}
  4544  		v.reset(OpMIPS64MOVVreg)
  4545  		v.AddArg(x)
  4546  		return true
  4547  	}
  4548  	// match: (MOVWreg x:(MOVBUreg _))
  4549  	// result: (MOVVreg x)
  4550  	for {
  4551  		x := v_0
  4552  		if x.Op != OpMIPS64MOVBUreg {
  4553  			break
  4554  		}
  4555  		v.reset(OpMIPS64MOVVreg)
  4556  		v.AddArg(x)
  4557  		return true
  4558  	}
  4559  	// match: (MOVWreg x:(MOVHreg _))
  4560  	// result: (MOVVreg x)
  4561  	for {
  4562  		x := v_0
  4563  		if x.Op != OpMIPS64MOVHreg {
  4564  			break
  4565  		}
  4566  		v.reset(OpMIPS64MOVVreg)
  4567  		v.AddArg(x)
  4568  		return true
  4569  	}
  4570  	// match: (MOVWreg x:(MOVWreg _))
  4571  	// result: (MOVVreg x)
  4572  	for {
  4573  		x := v_0
  4574  		if x.Op != OpMIPS64MOVWreg {
  4575  			break
  4576  		}
  4577  		v.reset(OpMIPS64MOVVreg)
  4578  		v.AddArg(x)
  4579  		return true
  4580  	}
  4581  	// match: (MOVWreg (MOVVconst [c]))
  4582  	// result: (MOVVconst [int64(int32(c))])
  4583  	for {
  4584  		if v_0.Op != OpMIPS64MOVVconst {
  4585  			break
  4586  		}
  4587  		c := auxIntToInt64(v_0.AuxInt)
  4588  		v.reset(OpMIPS64MOVVconst)
  4589  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  4590  		return true
  4591  	}
  4592  	return false
  4593  }
  4594  func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
  4595  	v_2 := v.Args[2]
  4596  	v_1 := v.Args[1]
  4597  	v_0 := v.Args[0]
  4598  	b := v.Block
  4599  	config := b.Func.Config
  4600  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  4601  	// result: (MOVFstore [off] {sym} ptr val mem)
  4602  	for {
  4603  		off := auxIntToInt32(v.AuxInt)
  4604  		sym := auxToSym(v.Aux)
  4605  		ptr := v_0
  4606  		if v_1.Op != OpMIPS64MOVWfpgp {
  4607  			break
  4608  		}
  4609  		val := v_1.Args[0]
  4610  		mem := v_2
  4611  		v.reset(OpMIPS64MOVFstore)
  4612  		v.AuxInt = int32ToAuxInt(off)
  4613  		v.Aux = symToAux(sym)
  4614  		v.AddArg3(ptr, val, mem)
  4615  		return true
  4616  	}
  4617  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4618  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4619  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  4620  	for {
  4621  		off1 := auxIntToInt32(v.AuxInt)
  4622  		sym := auxToSym(v.Aux)
  4623  		if v_0.Op != OpMIPS64ADDVconst {
  4624  			break
  4625  		}
  4626  		off2 := auxIntToInt64(v_0.AuxInt)
  4627  		ptr := v_0.Args[0]
  4628  		val := v_1
  4629  		mem := v_2
  4630  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4631  			break
  4632  		}
  4633  		v.reset(OpMIPS64MOVWstore)
  4634  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4635  		v.Aux = symToAux(sym)
  4636  		v.AddArg3(ptr, val, mem)
  4637  		return true
  4638  	}
  4639  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4640  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4641  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4642  	for {
  4643  		off1 := auxIntToInt32(v.AuxInt)
  4644  		sym1 := auxToSym(v.Aux)
  4645  		if v_0.Op != OpMIPS64MOVVaddr {
  4646  			break
  4647  		}
  4648  		off2 := auxIntToInt32(v_0.AuxInt)
  4649  		sym2 := auxToSym(v_0.Aux)
  4650  		ptr := v_0.Args[0]
  4651  		val := v_1
  4652  		mem := v_2
  4653  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4654  			break
  4655  		}
  4656  		v.reset(OpMIPS64MOVWstore)
  4657  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4658  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4659  		v.AddArg3(ptr, val, mem)
  4660  		return true
  4661  	}
  4662  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  4663  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4664  	for {
  4665  		off := auxIntToInt32(v.AuxInt)
  4666  		sym := auxToSym(v.Aux)
  4667  		ptr := v_0
  4668  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4669  			break
  4670  		}
  4671  		mem := v_2
  4672  		v.reset(OpMIPS64MOVWstorezero)
  4673  		v.AuxInt = int32ToAuxInt(off)
  4674  		v.Aux = symToAux(sym)
  4675  		v.AddArg2(ptr, mem)
  4676  		return true
  4677  	}
  4678  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4679  	// result: (MOVWstore [off] {sym} ptr x mem)
  4680  	for {
  4681  		off := auxIntToInt32(v.AuxInt)
  4682  		sym := auxToSym(v.Aux)
  4683  		ptr := v_0
  4684  		if v_1.Op != OpMIPS64MOVWreg {
  4685  			break
  4686  		}
  4687  		x := v_1.Args[0]
  4688  		mem := v_2
  4689  		v.reset(OpMIPS64MOVWstore)
  4690  		v.AuxInt = int32ToAuxInt(off)
  4691  		v.Aux = symToAux(sym)
  4692  		v.AddArg3(ptr, x, mem)
  4693  		return true
  4694  	}
  4695  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  4696  	// result: (MOVWstore [off] {sym} ptr x mem)
  4697  	for {
  4698  		off := auxIntToInt32(v.AuxInt)
  4699  		sym := auxToSym(v.Aux)
  4700  		ptr := v_0
  4701  		if v_1.Op != OpMIPS64MOVWUreg {
  4702  			break
  4703  		}
  4704  		x := v_1.Args[0]
  4705  		mem := v_2
  4706  		v.reset(OpMIPS64MOVWstore)
  4707  		v.AuxInt = int32ToAuxInt(off)
  4708  		v.Aux = symToAux(sym)
  4709  		v.AddArg3(ptr, x, mem)
  4710  		return true
  4711  	}
  4712  	return false
  4713  }
  4714  func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
  4715  	v_1 := v.Args[1]
  4716  	v_0 := v.Args[0]
  4717  	b := v.Block
  4718  	config := b.Func.Config
  4719  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4720  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4721  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  4722  	for {
  4723  		off1 := auxIntToInt32(v.AuxInt)
  4724  		sym := auxToSym(v.Aux)
  4725  		if v_0.Op != OpMIPS64ADDVconst {
  4726  			break
  4727  		}
  4728  		off2 := auxIntToInt64(v_0.AuxInt)
  4729  		ptr := v_0.Args[0]
  4730  		mem := v_1
  4731  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4732  			break
  4733  		}
  4734  		v.reset(OpMIPS64MOVWstorezero)
  4735  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4736  		v.Aux = symToAux(sym)
  4737  		v.AddArg2(ptr, mem)
  4738  		return true
  4739  	}
  4740  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4741  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  4742  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4743  	for {
  4744  		off1 := auxIntToInt32(v.AuxInt)
  4745  		sym1 := auxToSym(v.Aux)
  4746  		if v_0.Op != OpMIPS64MOVVaddr {
  4747  			break
  4748  		}
  4749  		off2 := auxIntToInt32(v_0.AuxInt)
  4750  		sym2 := auxToSym(v_0.Aux)
  4751  		ptr := v_0.Args[0]
  4752  		mem := v_1
  4753  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  4754  			break
  4755  		}
  4756  		v.reset(OpMIPS64MOVWstorezero)
  4757  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4758  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4759  		v.AddArg2(ptr, mem)
  4760  		return true
  4761  	}
  4762  	return false
  4763  }
  4764  func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
  4765  	v_0 := v.Args[0]
  4766  	// match: (NEGV (SUBV x y))
  4767  	// result: (SUBV y x)
  4768  	for {
  4769  		if v_0.Op != OpMIPS64SUBV {
  4770  			break
  4771  		}
  4772  		y := v_0.Args[1]
  4773  		x := v_0.Args[0]
  4774  		v.reset(OpMIPS64SUBV)
  4775  		v.AddArg2(y, x)
  4776  		return true
  4777  	}
  4778  	// match: (NEGV (NEGV x))
  4779  	// result: x
  4780  	for {
  4781  		if v_0.Op != OpMIPS64NEGV {
  4782  			break
  4783  		}
  4784  		x := v_0.Args[0]
  4785  		v.copyOf(x)
  4786  		return true
  4787  	}
  4788  	// match: (NEGV (MOVVconst [c]))
  4789  	// result: (MOVVconst [-c])
  4790  	for {
  4791  		if v_0.Op != OpMIPS64MOVVconst {
  4792  			break
  4793  		}
  4794  		c := auxIntToInt64(v_0.AuxInt)
  4795  		v.reset(OpMIPS64MOVVconst)
  4796  		v.AuxInt = int64ToAuxInt(-c)
  4797  		return true
  4798  	}
  4799  	return false
  4800  }
  4801  func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
  4802  	v_1 := v.Args[1]
  4803  	v_0 := v.Args[0]
  4804  	// match: (NOR x (MOVVconst [c]))
  4805  	// cond: is32Bit(c)
  4806  	// result: (NORconst [c] x)
  4807  	for {
  4808  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4809  			x := v_0
  4810  			if v_1.Op != OpMIPS64MOVVconst {
  4811  				continue
  4812  			}
  4813  			c := auxIntToInt64(v_1.AuxInt)
  4814  			if !(is32Bit(c)) {
  4815  				continue
  4816  			}
  4817  			v.reset(OpMIPS64NORconst)
  4818  			v.AuxInt = int64ToAuxInt(c)
  4819  			v.AddArg(x)
  4820  			return true
  4821  		}
  4822  		break
  4823  	}
  4824  	return false
  4825  }
  4826  func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
  4827  	v_0 := v.Args[0]
  4828  	// match: (NORconst [c] (MOVVconst [d]))
  4829  	// result: (MOVVconst [^(c|d)])
  4830  	for {
  4831  		c := auxIntToInt64(v.AuxInt)
  4832  		if v_0.Op != OpMIPS64MOVVconst {
  4833  			break
  4834  		}
  4835  		d := auxIntToInt64(v_0.AuxInt)
  4836  		v.reset(OpMIPS64MOVVconst)
  4837  		v.AuxInt = int64ToAuxInt(^(c | d))
  4838  		return true
  4839  	}
  4840  	return false
  4841  }
  4842  func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
  4843  	v_1 := v.Args[1]
  4844  	v_0 := v.Args[0]
  4845  	// match: (OR x (MOVVconst [c]))
  4846  	// cond: is32Bit(c)
  4847  	// result: (ORconst [c] x)
  4848  	for {
  4849  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4850  			x := v_0
  4851  			if v_1.Op != OpMIPS64MOVVconst {
  4852  				continue
  4853  			}
  4854  			c := auxIntToInt64(v_1.AuxInt)
  4855  			if !(is32Bit(c)) {
  4856  				continue
  4857  			}
  4858  			v.reset(OpMIPS64ORconst)
  4859  			v.AuxInt = int64ToAuxInt(c)
  4860  			v.AddArg(x)
  4861  			return true
  4862  		}
  4863  		break
  4864  	}
  4865  	// match: (OR x x)
  4866  	// result: x
  4867  	for {
  4868  		x := v_0
  4869  		if x != v_1 {
  4870  			break
  4871  		}
  4872  		v.copyOf(x)
  4873  		return true
  4874  	}
  4875  	return false
  4876  }
  4877  func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
  4878  	v_0 := v.Args[0]
  4879  	// match: (ORconst [0] x)
  4880  	// result: x
  4881  	for {
  4882  		if auxIntToInt64(v.AuxInt) != 0 {
  4883  			break
  4884  		}
  4885  		x := v_0
  4886  		v.copyOf(x)
  4887  		return true
  4888  	}
  4889  	// match: (ORconst [-1] _)
  4890  	// result: (MOVVconst [-1])
  4891  	for {
  4892  		if auxIntToInt64(v.AuxInt) != -1 {
  4893  			break
  4894  		}
  4895  		v.reset(OpMIPS64MOVVconst)
  4896  		v.AuxInt = int64ToAuxInt(-1)
  4897  		return true
  4898  	}
  4899  	// match: (ORconst [c] (MOVVconst [d]))
  4900  	// result: (MOVVconst [c|d])
  4901  	for {
  4902  		c := auxIntToInt64(v.AuxInt)
  4903  		if v_0.Op != OpMIPS64MOVVconst {
  4904  			break
  4905  		}
  4906  		d := auxIntToInt64(v_0.AuxInt)
  4907  		v.reset(OpMIPS64MOVVconst)
  4908  		v.AuxInt = int64ToAuxInt(c | d)
  4909  		return true
  4910  	}
  4911  	// match: (ORconst [c] (ORconst [d] x))
  4912  	// cond: is32Bit(c|d)
  4913  	// result: (ORconst [c|d] x)
  4914  	for {
  4915  		c := auxIntToInt64(v.AuxInt)
  4916  		if v_0.Op != OpMIPS64ORconst {
  4917  			break
  4918  		}
  4919  		d := auxIntToInt64(v_0.AuxInt)
  4920  		x := v_0.Args[0]
  4921  		if !(is32Bit(c | d)) {
  4922  			break
  4923  		}
  4924  		v.reset(OpMIPS64ORconst)
  4925  		v.AuxInt = int64ToAuxInt(c | d)
  4926  		v.AddArg(x)
  4927  		return true
  4928  	}
  4929  	return false
  4930  }
  4931  func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
  4932  	v_1 := v.Args[1]
  4933  	v_0 := v.Args[0]
  4934  	// match: (SGT (MOVVconst [c]) x)
  4935  	// cond: is32Bit(c)
  4936  	// result: (SGTconst [c] x)
  4937  	for {
  4938  		if v_0.Op != OpMIPS64MOVVconst {
  4939  			break
  4940  		}
  4941  		c := auxIntToInt64(v_0.AuxInt)
  4942  		x := v_1
  4943  		if !(is32Bit(c)) {
  4944  			break
  4945  		}
  4946  		v.reset(OpMIPS64SGTconst)
  4947  		v.AuxInt = int64ToAuxInt(c)
  4948  		v.AddArg(x)
  4949  		return true
  4950  	}
  4951  	// match: (SGT x x)
  4952  	// result: (MOVVconst [0])
  4953  	for {
  4954  		x := v_0
  4955  		if x != v_1 {
  4956  			break
  4957  		}
  4958  		v.reset(OpMIPS64MOVVconst)
  4959  		v.AuxInt = int64ToAuxInt(0)
  4960  		return true
  4961  	}
  4962  	return false
  4963  }
  4964  func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
  4965  	v_1 := v.Args[1]
  4966  	v_0 := v.Args[0]
  4967  	// match: (SGTU (MOVVconst [c]) x)
  4968  	// cond: is32Bit(c)
  4969  	// result: (SGTUconst [c] x)
  4970  	for {
  4971  		if v_0.Op != OpMIPS64MOVVconst {
  4972  			break
  4973  		}
  4974  		c := auxIntToInt64(v_0.AuxInt)
  4975  		x := v_1
  4976  		if !(is32Bit(c)) {
  4977  			break
  4978  		}
  4979  		v.reset(OpMIPS64SGTUconst)
  4980  		v.AuxInt = int64ToAuxInt(c)
  4981  		v.AddArg(x)
  4982  		return true
  4983  	}
  4984  	// match: (SGTU x x)
  4985  	// result: (MOVVconst [0])
  4986  	for {
  4987  		x := v_0
  4988  		if x != v_1 {
  4989  			break
  4990  		}
  4991  		v.reset(OpMIPS64MOVVconst)
  4992  		v.AuxInt = int64ToAuxInt(0)
  4993  		return true
  4994  	}
  4995  	return false
  4996  }
  4997  func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
  4998  	v_0 := v.Args[0]
  4999  	// match: (SGTUconst [c] (MOVVconst [d]))
  5000  	// cond: uint64(c)>uint64(d)
  5001  	// result: (MOVVconst [1])
  5002  	for {
  5003  		c := auxIntToInt64(v.AuxInt)
  5004  		if v_0.Op != OpMIPS64MOVVconst {
  5005  			break
  5006  		}
  5007  		d := auxIntToInt64(v_0.AuxInt)
  5008  		if !(uint64(c) > uint64(d)) {
  5009  			break
  5010  		}
  5011  		v.reset(OpMIPS64MOVVconst)
  5012  		v.AuxInt = int64ToAuxInt(1)
  5013  		return true
  5014  	}
  5015  	// match: (SGTUconst [c] (MOVVconst [d]))
  5016  	// cond: uint64(c)<=uint64(d)
  5017  	// result: (MOVVconst [0])
  5018  	for {
  5019  		c := auxIntToInt64(v.AuxInt)
  5020  		if v_0.Op != OpMIPS64MOVVconst {
  5021  			break
  5022  		}
  5023  		d := auxIntToInt64(v_0.AuxInt)
  5024  		if !(uint64(c) <= uint64(d)) {
  5025  			break
  5026  		}
  5027  		v.reset(OpMIPS64MOVVconst)
  5028  		v.AuxInt = int64ToAuxInt(0)
  5029  		return true
  5030  	}
  5031  	// match: (SGTUconst [c] (MOVBUreg _))
  5032  	// cond: 0xff < uint64(c)
  5033  	// result: (MOVVconst [1])
  5034  	for {
  5035  		c := auxIntToInt64(v.AuxInt)
  5036  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
  5037  			break
  5038  		}
  5039  		v.reset(OpMIPS64MOVVconst)
  5040  		v.AuxInt = int64ToAuxInt(1)
  5041  		return true
  5042  	}
  5043  	// match: (SGTUconst [c] (MOVHUreg _))
  5044  	// cond: 0xffff < uint64(c)
  5045  	// result: (MOVVconst [1])
  5046  	for {
  5047  		c := auxIntToInt64(v.AuxInt)
  5048  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
  5049  			break
  5050  		}
  5051  		v.reset(OpMIPS64MOVVconst)
  5052  		v.AuxInt = int64ToAuxInt(1)
  5053  		return true
  5054  	}
  5055  	// match: (SGTUconst [c] (ANDconst [m] _))
  5056  	// cond: uint64(m) < uint64(c)
  5057  	// result: (MOVVconst [1])
  5058  	for {
  5059  		c := auxIntToInt64(v.AuxInt)
  5060  		if v_0.Op != OpMIPS64ANDconst {
  5061  			break
  5062  		}
  5063  		m := auxIntToInt64(v_0.AuxInt)
  5064  		if !(uint64(m) < uint64(c)) {
  5065  			break
  5066  		}
  5067  		v.reset(OpMIPS64MOVVconst)
  5068  		v.AuxInt = int64ToAuxInt(1)
  5069  		return true
  5070  	}
  5071  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5072  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  5073  	// result: (MOVVconst [1])
  5074  	for {
  5075  		c := auxIntToInt64(v.AuxInt)
  5076  		if v_0.Op != OpMIPS64SRLVconst {
  5077  			break
  5078  		}
  5079  		d := auxIntToInt64(v_0.AuxInt)
  5080  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  5081  			break
  5082  		}
  5083  		v.reset(OpMIPS64MOVVconst)
  5084  		v.AuxInt = int64ToAuxInt(1)
  5085  		return true
  5086  	}
  5087  	return false
  5088  }
  5089  func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
  5090  	v_0 := v.Args[0]
  5091  	// match: (SGTconst [c] (MOVVconst [d]))
  5092  	// cond: c>d
  5093  	// result: (MOVVconst [1])
  5094  	for {
  5095  		c := auxIntToInt64(v.AuxInt)
  5096  		if v_0.Op != OpMIPS64MOVVconst {
  5097  			break
  5098  		}
  5099  		d := auxIntToInt64(v_0.AuxInt)
  5100  		if !(c > d) {
  5101  			break
  5102  		}
  5103  		v.reset(OpMIPS64MOVVconst)
  5104  		v.AuxInt = int64ToAuxInt(1)
  5105  		return true
  5106  	}
  5107  	// match: (SGTconst [c] (MOVVconst [d]))
  5108  	// cond: c<=d
  5109  	// result: (MOVVconst [0])
  5110  	for {
  5111  		c := auxIntToInt64(v.AuxInt)
  5112  		if v_0.Op != OpMIPS64MOVVconst {
  5113  			break
  5114  		}
  5115  		d := auxIntToInt64(v_0.AuxInt)
  5116  		if !(c <= d) {
  5117  			break
  5118  		}
  5119  		v.reset(OpMIPS64MOVVconst)
  5120  		v.AuxInt = int64ToAuxInt(0)
  5121  		return true
  5122  	}
  5123  	// match: (SGTconst [c] (MOVBreg _))
  5124  	// cond: 0x7f < c
  5125  	// result: (MOVVconst [1])
  5126  	for {
  5127  		c := auxIntToInt64(v.AuxInt)
  5128  		if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
  5129  			break
  5130  		}
  5131  		v.reset(OpMIPS64MOVVconst)
  5132  		v.AuxInt = int64ToAuxInt(1)
  5133  		return true
  5134  	}
  5135  	// match: (SGTconst [c] (MOVBreg _))
  5136  	// cond: c <= -0x80
  5137  	// result: (MOVVconst [0])
  5138  	for {
  5139  		c := auxIntToInt64(v.AuxInt)
  5140  		if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
  5141  			break
  5142  		}
  5143  		v.reset(OpMIPS64MOVVconst)
  5144  		v.AuxInt = int64ToAuxInt(0)
  5145  		return true
  5146  	}
  5147  	// match: (SGTconst [c] (MOVBUreg _))
  5148  	// cond: 0xff < c
  5149  	// result: (MOVVconst [1])
  5150  	for {
  5151  		c := auxIntToInt64(v.AuxInt)
  5152  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
  5153  			break
  5154  		}
  5155  		v.reset(OpMIPS64MOVVconst)
  5156  		v.AuxInt = int64ToAuxInt(1)
  5157  		return true
  5158  	}
  5159  	// match: (SGTconst [c] (MOVBUreg _))
  5160  	// cond: c < 0
  5161  	// result: (MOVVconst [0])
  5162  	for {
  5163  		c := auxIntToInt64(v.AuxInt)
  5164  		if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
  5165  			break
  5166  		}
  5167  		v.reset(OpMIPS64MOVVconst)
  5168  		v.AuxInt = int64ToAuxInt(0)
  5169  		return true
  5170  	}
  5171  	// match: (SGTconst [c] (MOVHreg _))
  5172  	// cond: 0x7fff < c
  5173  	// result: (MOVVconst [1])
  5174  	for {
  5175  		c := auxIntToInt64(v.AuxInt)
  5176  		if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
  5177  			break
  5178  		}
  5179  		v.reset(OpMIPS64MOVVconst)
  5180  		v.AuxInt = int64ToAuxInt(1)
  5181  		return true
  5182  	}
  5183  	// match: (SGTconst [c] (MOVHreg _))
  5184  	// cond: c <= -0x8000
  5185  	// result: (MOVVconst [0])
  5186  	for {
  5187  		c := auxIntToInt64(v.AuxInt)
  5188  		if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
  5189  			break
  5190  		}
  5191  		v.reset(OpMIPS64MOVVconst)
  5192  		v.AuxInt = int64ToAuxInt(0)
  5193  		return true
  5194  	}
  5195  	// match: (SGTconst [c] (MOVHUreg _))
  5196  	// cond: 0xffff < c
  5197  	// result: (MOVVconst [1])
  5198  	for {
  5199  		c := auxIntToInt64(v.AuxInt)
  5200  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
  5201  			break
  5202  		}
  5203  		v.reset(OpMIPS64MOVVconst)
  5204  		v.AuxInt = int64ToAuxInt(1)
  5205  		return true
  5206  	}
  5207  	// match: (SGTconst [c] (MOVHUreg _))
  5208  	// cond: c < 0
  5209  	// result: (MOVVconst [0])
  5210  	for {
  5211  		c := auxIntToInt64(v.AuxInt)
  5212  		if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
  5213  			break
  5214  		}
  5215  		v.reset(OpMIPS64MOVVconst)
  5216  		v.AuxInt = int64ToAuxInt(0)
  5217  		return true
  5218  	}
  5219  	// match: (SGTconst [c] (MOVWUreg _))
  5220  	// cond: c < 0
  5221  	// result: (MOVVconst [0])
  5222  	for {
  5223  		c := auxIntToInt64(v.AuxInt)
  5224  		if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
  5225  			break
  5226  		}
  5227  		v.reset(OpMIPS64MOVVconst)
  5228  		v.AuxInt = int64ToAuxInt(0)
  5229  		return true
  5230  	}
  5231  	// match: (SGTconst [c] (ANDconst [m] _))
  5232  	// cond: 0 <= m && m < c
  5233  	// result: (MOVVconst [1])
  5234  	for {
  5235  		c := auxIntToInt64(v.AuxInt)
  5236  		if v_0.Op != OpMIPS64ANDconst {
  5237  			break
  5238  		}
  5239  		m := auxIntToInt64(v_0.AuxInt)
  5240  		if !(0 <= m && m < c) {
  5241  			break
  5242  		}
  5243  		v.reset(OpMIPS64MOVVconst)
  5244  		v.AuxInt = int64ToAuxInt(1)
  5245  		return true
  5246  	}
  5247  	// match: (SGTconst [c] (SRLVconst _ [d]))
  5248  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  5249  	// result: (MOVVconst [1])
  5250  	for {
  5251  		c := auxIntToInt64(v.AuxInt)
  5252  		if v_0.Op != OpMIPS64SRLVconst {
  5253  			break
  5254  		}
  5255  		d := auxIntToInt64(v_0.AuxInt)
  5256  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  5257  			break
  5258  		}
  5259  		v.reset(OpMIPS64MOVVconst)
  5260  		v.AuxInt = int64ToAuxInt(1)
  5261  		return true
  5262  	}
  5263  	return false
  5264  }
  5265  func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
  5266  	v_1 := v.Args[1]
  5267  	v_0 := v.Args[0]
  5268  	// match: (SLLV _ (MOVVconst [c]))
  5269  	// cond: uint64(c)>=64
  5270  	// result: (MOVVconst [0])
  5271  	for {
  5272  		if v_1.Op != OpMIPS64MOVVconst {
  5273  			break
  5274  		}
  5275  		c := auxIntToInt64(v_1.AuxInt)
  5276  		if !(uint64(c) >= 64) {
  5277  			break
  5278  		}
  5279  		v.reset(OpMIPS64MOVVconst)
  5280  		v.AuxInt = int64ToAuxInt(0)
  5281  		return true
  5282  	}
  5283  	// match: (SLLV x (MOVVconst [c]))
  5284  	// result: (SLLVconst x [c])
  5285  	for {
  5286  		x := v_0
  5287  		if v_1.Op != OpMIPS64MOVVconst {
  5288  			break
  5289  		}
  5290  		c := auxIntToInt64(v_1.AuxInt)
  5291  		v.reset(OpMIPS64SLLVconst)
  5292  		v.AuxInt = int64ToAuxInt(c)
  5293  		v.AddArg(x)
  5294  		return true
  5295  	}
  5296  	return false
  5297  }
  5298  func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
  5299  	v_0 := v.Args[0]
  5300  	// match: (SLLVconst [c] (MOVVconst [d]))
  5301  	// result: (MOVVconst [d<<uint64(c)])
  5302  	for {
  5303  		c := auxIntToInt64(v.AuxInt)
  5304  		if v_0.Op != OpMIPS64MOVVconst {
  5305  			break
  5306  		}
  5307  		d := auxIntToInt64(v_0.AuxInt)
  5308  		v.reset(OpMIPS64MOVVconst)
  5309  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  5310  		return true
  5311  	}
  5312  	return false
  5313  }
  5314  func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
  5315  	v_1 := v.Args[1]
  5316  	v_0 := v.Args[0]
  5317  	// match: (SRAV x (MOVVconst [c]))
  5318  	// cond: uint64(c)>=64
  5319  	// result: (SRAVconst x [63])
  5320  	for {
  5321  		x := v_0
  5322  		if v_1.Op != OpMIPS64MOVVconst {
  5323  			break
  5324  		}
  5325  		c := auxIntToInt64(v_1.AuxInt)
  5326  		if !(uint64(c) >= 64) {
  5327  			break
  5328  		}
  5329  		v.reset(OpMIPS64SRAVconst)
  5330  		v.AuxInt = int64ToAuxInt(63)
  5331  		v.AddArg(x)
  5332  		return true
  5333  	}
  5334  	// match: (SRAV x (MOVVconst [c]))
  5335  	// result: (SRAVconst x [c])
  5336  	for {
  5337  		x := v_0
  5338  		if v_1.Op != OpMIPS64MOVVconst {
  5339  			break
  5340  		}
  5341  		c := auxIntToInt64(v_1.AuxInt)
  5342  		v.reset(OpMIPS64SRAVconst)
  5343  		v.AuxInt = int64ToAuxInt(c)
  5344  		v.AddArg(x)
  5345  		return true
  5346  	}
  5347  	return false
  5348  }
  5349  func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
  5350  	v_0 := v.Args[0]
  5351  	// match: (SRAVconst [c] (MOVVconst [d]))
  5352  	// result: (MOVVconst [d>>uint64(c)])
  5353  	for {
  5354  		c := auxIntToInt64(v.AuxInt)
  5355  		if v_0.Op != OpMIPS64MOVVconst {
  5356  			break
  5357  		}
  5358  		d := auxIntToInt64(v_0.AuxInt)
  5359  		v.reset(OpMIPS64MOVVconst)
  5360  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  5361  		return true
  5362  	}
  5363  	return false
  5364  }
  5365  func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
  5366  	v_1 := v.Args[1]
  5367  	v_0 := v.Args[0]
  5368  	// match: (SRLV _ (MOVVconst [c]))
  5369  	// cond: uint64(c)>=64
  5370  	// result: (MOVVconst [0])
  5371  	for {
  5372  		if v_1.Op != OpMIPS64MOVVconst {
  5373  			break
  5374  		}
  5375  		c := auxIntToInt64(v_1.AuxInt)
  5376  		if !(uint64(c) >= 64) {
  5377  			break
  5378  		}
  5379  		v.reset(OpMIPS64MOVVconst)
  5380  		v.AuxInt = int64ToAuxInt(0)
  5381  		return true
  5382  	}
  5383  	// match: (SRLV x (MOVVconst [c]))
  5384  	// result: (SRLVconst x [c])
  5385  	for {
  5386  		x := v_0
  5387  		if v_1.Op != OpMIPS64MOVVconst {
  5388  			break
  5389  		}
  5390  		c := auxIntToInt64(v_1.AuxInt)
  5391  		v.reset(OpMIPS64SRLVconst)
  5392  		v.AuxInt = int64ToAuxInt(c)
  5393  		v.AddArg(x)
  5394  		return true
  5395  	}
  5396  	return false
  5397  }
  5398  func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
  5399  	v_0 := v.Args[0]
  5400  	// match: (SRLVconst [c] (MOVVconst [d]))
  5401  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  5402  	for {
  5403  		c := auxIntToInt64(v.AuxInt)
  5404  		if v_0.Op != OpMIPS64MOVVconst {
  5405  			break
  5406  		}
  5407  		d := auxIntToInt64(v_0.AuxInt)
  5408  		v.reset(OpMIPS64MOVVconst)
  5409  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  5410  		return true
  5411  	}
  5412  	return false
  5413  }
  5414  func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
  5415  	v_1 := v.Args[1]
  5416  	v_0 := v.Args[0]
  5417  	// match: (SUBV x (MOVVconst [c]))
  5418  	// cond: is32Bit(c)
  5419  	// result: (SUBVconst [c] x)
  5420  	for {
  5421  		x := v_0
  5422  		if v_1.Op != OpMIPS64MOVVconst {
  5423  			break
  5424  		}
  5425  		c := auxIntToInt64(v_1.AuxInt)
  5426  		if !(is32Bit(c)) {
  5427  			break
  5428  		}
  5429  		v.reset(OpMIPS64SUBVconst)
  5430  		v.AuxInt = int64ToAuxInt(c)
  5431  		v.AddArg(x)
  5432  		return true
  5433  	}
  5434  	// match: (SUBV x (NEGV y))
  5435  	// result: (ADDV x y)
  5436  	for {
  5437  		x := v_0
  5438  		if v_1.Op != OpMIPS64NEGV {
  5439  			break
  5440  		}
  5441  		y := v_1.Args[0]
  5442  		v.reset(OpMIPS64ADDV)
  5443  		v.AddArg2(x, y)
  5444  		return true
  5445  	}
  5446  	// match: (SUBV x x)
  5447  	// result: (MOVVconst [0])
  5448  	for {
  5449  		x := v_0
  5450  		if x != v_1 {
  5451  			break
  5452  		}
  5453  		v.reset(OpMIPS64MOVVconst)
  5454  		v.AuxInt = int64ToAuxInt(0)
  5455  		return true
  5456  	}
  5457  	// match: (SUBV (MOVVconst [0]) x)
  5458  	// result: (NEGV x)
  5459  	for {
  5460  		if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  5461  			break
  5462  		}
  5463  		x := v_1
  5464  		v.reset(OpMIPS64NEGV)
  5465  		v.AddArg(x)
  5466  		return true
  5467  	}
  5468  	return false
  5469  }
  5470  func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
  5471  	v_0 := v.Args[0]
  5472  	// match: (SUBVconst [0] x)
  5473  	// result: x
  5474  	for {
  5475  		if auxIntToInt64(v.AuxInt) != 0 {
  5476  			break
  5477  		}
  5478  		x := v_0
  5479  		v.copyOf(x)
  5480  		return true
  5481  	}
  5482  	// match: (SUBVconst [c] (MOVVconst [d]))
  5483  	// result: (MOVVconst [d-c])
  5484  	for {
  5485  		c := auxIntToInt64(v.AuxInt)
  5486  		if v_0.Op != OpMIPS64MOVVconst {
  5487  			break
  5488  		}
  5489  		d := auxIntToInt64(v_0.AuxInt)
  5490  		v.reset(OpMIPS64MOVVconst)
  5491  		v.AuxInt = int64ToAuxInt(d - c)
  5492  		return true
  5493  	}
  5494  	// match: (SUBVconst [c] (SUBVconst [d] x))
  5495  	// cond: is32Bit(-c-d)
  5496  	// result: (ADDVconst [-c-d] x)
  5497  	for {
  5498  		c := auxIntToInt64(v.AuxInt)
  5499  		if v_0.Op != OpMIPS64SUBVconst {
  5500  			break
  5501  		}
  5502  		d := auxIntToInt64(v_0.AuxInt)
  5503  		x := v_0.Args[0]
  5504  		if !(is32Bit(-c - d)) {
  5505  			break
  5506  		}
  5507  		v.reset(OpMIPS64ADDVconst)
  5508  		v.AuxInt = int64ToAuxInt(-c - d)
  5509  		v.AddArg(x)
  5510  		return true
  5511  	}
  5512  	// match: (SUBVconst [c] (ADDVconst [d] x))
  5513  	// cond: is32Bit(-c+d)
  5514  	// result: (ADDVconst [-c+d] x)
  5515  	for {
  5516  		c := auxIntToInt64(v.AuxInt)
  5517  		if v_0.Op != OpMIPS64ADDVconst {
  5518  			break
  5519  		}
  5520  		d := auxIntToInt64(v_0.AuxInt)
  5521  		x := v_0.Args[0]
  5522  		if !(is32Bit(-c + d)) {
  5523  			break
  5524  		}
  5525  		v.reset(OpMIPS64ADDVconst)
  5526  		v.AuxInt = int64ToAuxInt(-c + d)
  5527  		v.AddArg(x)
  5528  		return true
  5529  	}
  5530  	return false
  5531  }
  5532  func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
  5533  	v_1 := v.Args[1]
  5534  	v_0 := v.Args[0]
  5535  	// match: (XOR x (MOVVconst [c]))
  5536  	// cond: is32Bit(c)
  5537  	// result: (XORconst [c] x)
  5538  	for {
  5539  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5540  			x := v_0
  5541  			if v_1.Op != OpMIPS64MOVVconst {
  5542  				continue
  5543  			}
  5544  			c := auxIntToInt64(v_1.AuxInt)
  5545  			if !(is32Bit(c)) {
  5546  				continue
  5547  			}
  5548  			v.reset(OpMIPS64XORconst)
  5549  			v.AuxInt = int64ToAuxInt(c)
  5550  			v.AddArg(x)
  5551  			return true
  5552  		}
  5553  		break
  5554  	}
  5555  	// match: (XOR x x)
  5556  	// result: (MOVVconst [0])
  5557  	for {
  5558  		x := v_0
  5559  		if x != v_1 {
  5560  			break
  5561  		}
  5562  		v.reset(OpMIPS64MOVVconst)
  5563  		v.AuxInt = int64ToAuxInt(0)
  5564  		return true
  5565  	}
  5566  	return false
  5567  }
  5568  func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
  5569  	v_0 := v.Args[0]
  5570  	// match: (XORconst [0] x)
  5571  	// result: x
  5572  	for {
  5573  		if auxIntToInt64(v.AuxInt) != 0 {
  5574  			break
  5575  		}
  5576  		x := v_0
  5577  		v.copyOf(x)
  5578  		return true
  5579  	}
  5580  	// match: (XORconst [-1] x)
  5581  	// result: (NORconst [0] x)
  5582  	for {
  5583  		if auxIntToInt64(v.AuxInt) != -1 {
  5584  			break
  5585  		}
  5586  		x := v_0
  5587  		v.reset(OpMIPS64NORconst)
  5588  		v.AuxInt = int64ToAuxInt(0)
  5589  		v.AddArg(x)
  5590  		return true
  5591  	}
  5592  	// match: (XORconst [c] (MOVVconst [d]))
  5593  	// result: (MOVVconst [c^d])
  5594  	for {
  5595  		c := auxIntToInt64(v.AuxInt)
  5596  		if v_0.Op != OpMIPS64MOVVconst {
  5597  			break
  5598  		}
  5599  		d := auxIntToInt64(v_0.AuxInt)
  5600  		v.reset(OpMIPS64MOVVconst)
  5601  		v.AuxInt = int64ToAuxInt(c ^ d)
  5602  		return true
  5603  	}
  5604  	// match: (XORconst [c] (XORconst [d] x))
  5605  	// cond: is32Bit(c^d)
  5606  	// result: (XORconst [c^d] x)
  5607  	for {
  5608  		c := auxIntToInt64(v.AuxInt)
  5609  		if v_0.Op != OpMIPS64XORconst {
  5610  			break
  5611  		}
  5612  		d := auxIntToInt64(v_0.AuxInt)
  5613  		x := v_0.Args[0]
  5614  		if !(is32Bit(c ^ d)) {
  5615  			break
  5616  		}
  5617  		v.reset(OpMIPS64XORconst)
  5618  		v.AuxInt = int64ToAuxInt(c ^ d)
  5619  		v.AddArg(x)
  5620  		return true
  5621  	}
  5622  	return false
  5623  }
  5624  func rewriteValueMIPS64_OpMod16(v *Value) bool {
  5625  	v_1 := v.Args[1]
  5626  	v_0 := v.Args[0]
  5627  	b := v.Block
  5628  	typ := &b.Func.Config.Types
  5629  	// match: (Mod16 x y)
  5630  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  5631  	for {
  5632  		x := v_0
  5633  		y := v_1
  5634  		v.reset(OpSelect0)
  5635  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5636  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5637  		v1.AddArg(x)
  5638  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5639  		v2.AddArg(y)
  5640  		v0.AddArg2(v1, v2)
  5641  		v.AddArg(v0)
  5642  		return true
  5643  	}
  5644  }
  5645  func rewriteValueMIPS64_OpMod16u(v *Value) bool {
  5646  	v_1 := v.Args[1]
  5647  	v_0 := v.Args[0]
  5648  	b := v.Block
  5649  	typ := &b.Func.Config.Types
  5650  	// match: (Mod16u x y)
  5651  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  5652  	for {
  5653  		x := v_0
  5654  		y := v_1
  5655  		v.reset(OpSelect0)
  5656  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5657  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5658  		v1.AddArg(x)
  5659  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5660  		v2.AddArg(y)
  5661  		v0.AddArg2(v1, v2)
  5662  		v.AddArg(v0)
  5663  		return true
  5664  	}
  5665  }
  5666  func rewriteValueMIPS64_OpMod32(v *Value) bool {
  5667  	v_1 := v.Args[1]
  5668  	v_0 := v.Args[0]
  5669  	b := v.Block
  5670  	typ := &b.Func.Config.Types
  5671  	// match: (Mod32 x y)
  5672  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  5673  	for {
  5674  		x := v_0
  5675  		y := v_1
  5676  		v.reset(OpSelect0)
  5677  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5678  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5679  		v1.AddArg(x)
  5680  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5681  		v2.AddArg(y)
  5682  		v0.AddArg2(v1, v2)
  5683  		v.AddArg(v0)
  5684  		return true
  5685  	}
  5686  }
  5687  func rewriteValueMIPS64_OpMod32u(v *Value) bool {
  5688  	v_1 := v.Args[1]
  5689  	v_0 := v.Args[0]
  5690  	b := v.Block
  5691  	typ := &b.Func.Config.Types
  5692  	// match: (Mod32u x y)
  5693  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  5694  	for {
  5695  		x := v_0
  5696  		y := v_1
  5697  		v.reset(OpSelect0)
  5698  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5699  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5700  		v1.AddArg(x)
  5701  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5702  		v2.AddArg(y)
  5703  		v0.AddArg2(v1, v2)
  5704  		v.AddArg(v0)
  5705  		return true
  5706  	}
  5707  }
  5708  func rewriteValueMIPS64_OpMod64(v *Value) bool {
  5709  	v_1 := v.Args[1]
  5710  	v_0 := v.Args[0]
  5711  	b := v.Block
  5712  	typ := &b.Func.Config.Types
  5713  	// match: (Mod64 x y)
  5714  	// result: (Select0 (DIVV x y))
  5715  	for {
  5716  		x := v_0
  5717  		y := v_1
  5718  		v.reset(OpSelect0)
  5719  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5720  		v0.AddArg2(x, y)
  5721  		v.AddArg(v0)
  5722  		return true
  5723  	}
  5724  }
  5725  func rewriteValueMIPS64_OpMod64u(v *Value) bool {
  5726  	v_1 := v.Args[1]
  5727  	v_0 := v.Args[0]
  5728  	b := v.Block
  5729  	typ := &b.Func.Config.Types
  5730  	// match: (Mod64u x y)
  5731  	// result: (Select0 (DIVVU x y))
  5732  	for {
  5733  		x := v_0
  5734  		y := v_1
  5735  		v.reset(OpSelect0)
  5736  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5737  		v0.AddArg2(x, y)
  5738  		v.AddArg(v0)
  5739  		return true
  5740  	}
  5741  }
  5742  func rewriteValueMIPS64_OpMod8(v *Value) bool {
  5743  	v_1 := v.Args[1]
  5744  	v_0 := v.Args[0]
  5745  	b := v.Block
  5746  	typ := &b.Func.Config.Types
  5747  	// match: (Mod8 x y)
  5748  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  5749  	for {
  5750  		x := v_0
  5751  		y := v_1
  5752  		v.reset(OpSelect0)
  5753  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5754  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5755  		v1.AddArg(x)
  5756  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5757  		v2.AddArg(y)
  5758  		v0.AddArg2(v1, v2)
  5759  		v.AddArg(v0)
  5760  		return true
  5761  	}
  5762  }
  5763  func rewriteValueMIPS64_OpMod8u(v *Value) bool {
  5764  	v_1 := v.Args[1]
  5765  	v_0 := v.Args[0]
  5766  	b := v.Block
  5767  	typ := &b.Func.Config.Types
  5768  	// match: (Mod8u x y)
  5769  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  5770  	for {
  5771  		x := v_0
  5772  		y := v_1
  5773  		v.reset(OpSelect0)
  5774  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5775  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5776  		v1.AddArg(x)
  5777  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5778  		v2.AddArg(y)
  5779  		v0.AddArg2(v1, v2)
  5780  		v.AddArg(v0)
  5781  		return true
  5782  	}
  5783  }
  5784  func rewriteValueMIPS64_OpMove(v *Value) bool {
  5785  	v_2 := v.Args[2]
  5786  	v_1 := v.Args[1]
  5787  	v_0 := v.Args[0]
  5788  	b := v.Block
  5789  	config := b.Func.Config
  5790  	typ := &b.Func.Config.Types
  5791  	// match: (Move [0] _ _ mem)
  5792  	// result: mem
  5793  	for {
  5794  		if auxIntToInt64(v.AuxInt) != 0 {
  5795  			break
  5796  		}
  5797  		mem := v_2
  5798  		v.copyOf(mem)
  5799  		return true
  5800  	}
  5801  	// match: (Move [1] dst src mem)
  5802  	// result: (MOVBstore dst (MOVBload src mem) mem)
  5803  	for {
  5804  		if auxIntToInt64(v.AuxInt) != 1 {
  5805  			break
  5806  		}
  5807  		dst := v_0
  5808  		src := v_1
  5809  		mem := v_2
  5810  		v.reset(OpMIPS64MOVBstore)
  5811  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5812  		v0.AddArg2(src, mem)
  5813  		v.AddArg3(dst, v0, mem)
  5814  		return true
  5815  	}
  5816  	// match: (Move [2] {t} dst src mem)
  5817  	// cond: t.Alignment()%2 == 0
  5818  	// result: (MOVHstore dst (MOVHload src mem) mem)
  5819  	for {
  5820  		if auxIntToInt64(v.AuxInt) != 2 {
  5821  			break
  5822  		}
  5823  		t := auxToType(v.Aux)
  5824  		dst := v_0
  5825  		src := v_1
  5826  		mem := v_2
  5827  		if !(t.Alignment()%2 == 0) {
  5828  			break
  5829  		}
  5830  		v.reset(OpMIPS64MOVHstore)
  5831  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5832  		v0.AddArg2(src, mem)
  5833  		v.AddArg3(dst, v0, mem)
  5834  		return true
  5835  	}
  5836  	// match: (Move [2] dst src mem)
  5837  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  5838  	for {
  5839  		if auxIntToInt64(v.AuxInt) != 2 {
  5840  			break
  5841  		}
  5842  		dst := v_0
  5843  		src := v_1
  5844  		mem := v_2
  5845  		v.reset(OpMIPS64MOVBstore)
  5846  		v.AuxInt = int32ToAuxInt(1)
  5847  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5848  		v0.AuxInt = int32ToAuxInt(1)
  5849  		v0.AddArg2(src, mem)
  5850  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5851  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5852  		v2.AddArg2(src, mem)
  5853  		v1.AddArg3(dst, v2, mem)
  5854  		v.AddArg3(dst, v0, v1)
  5855  		return true
  5856  	}
  5857  	// match: (Move [4] {t} dst src mem)
  5858  	// cond: t.Alignment()%4 == 0
  5859  	// result: (MOVWstore dst (MOVWload src mem) mem)
  5860  	for {
  5861  		if auxIntToInt64(v.AuxInt) != 4 {
  5862  			break
  5863  		}
  5864  		t := auxToType(v.Aux)
  5865  		dst := v_0
  5866  		src := v_1
  5867  		mem := v_2
  5868  		if !(t.Alignment()%4 == 0) {
  5869  			break
  5870  		}
  5871  		v.reset(OpMIPS64MOVWstore)
  5872  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5873  		v0.AddArg2(src, mem)
  5874  		v.AddArg3(dst, v0, mem)
  5875  		return true
  5876  	}
  5877  	// match: (Move [4] {t} dst src mem)
  5878  	// cond: t.Alignment()%2 == 0
  5879  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  5880  	for {
  5881  		if auxIntToInt64(v.AuxInt) != 4 {
  5882  			break
  5883  		}
  5884  		t := auxToType(v.Aux)
  5885  		dst := v_0
  5886  		src := v_1
  5887  		mem := v_2
  5888  		if !(t.Alignment()%2 == 0) {
  5889  			break
  5890  		}
  5891  		v.reset(OpMIPS64MOVHstore)
  5892  		v.AuxInt = int32ToAuxInt(2)
  5893  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5894  		v0.AuxInt = int32ToAuxInt(2)
  5895  		v0.AddArg2(src, mem)
  5896  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5897  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5898  		v2.AddArg2(src, mem)
  5899  		v1.AddArg3(dst, v2, mem)
  5900  		v.AddArg3(dst, v0, v1)
  5901  		return true
  5902  	}
  5903  	// match: (Move [4] dst src mem)
  5904  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  5905  	for {
  5906  		if auxIntToInt64(v.AuxInt) != 4 {
  5907  			break
  5908  		}
  5909  		dst := v_0
  5910  		src := v_1
  5911  		mem := v_2
  5912  		v.reset(OpMIPS64MOVBstore)
  5913  		v.AuxInt = int32ToAuxInt(3)
  5914  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5915  		v0.AuxInt = int32ToAuxInt(3)
  5916  		v0.AddArg2(src, mem)
  5917  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5918  		v1.AuxInt = int32ToAuxInt(2)
  5919  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5920  		v2.AuxInt = int32ToAuxInt(2)
  5921  		v2.AddArg2(src, mem)
  5922  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5923  		v3.AuxInt = int32ToAuxInt(1)
  5924  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5925  		v4.AuxInt = int32ToAuxInt(1)
  5926  		v4.AddArg2(src, mem)
  5927  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5928  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5929  		v6.AddArg2(src, mem)
  5930  		v5.AddArg3(dst, v6, mem)
  5931  		v3.AddArg3(dst, v4, v5)
  5932  		v1.AddArg3(dst, v2, v3)
  5933  		v.AddArg3(dst, v0, v1)
  5934  		return true
  5935  	}
  5936  	// match: (Move [8] {t} dst src mem)
  5937  	// cond: t.Alignment()%8 == 0
  5938  	// result: (MOVVstore dst (MOVVload src mem) mem)
  5939  	for {
  5940  		if auxIntToInt64(v.AuxInt) != 8 {
  5941  			break
  5942  		}
  5943  		t := auxToType(v.Aux)
  5944  		dst := v_0
  5945  		src := v_1
  5946  		mem := v_2
  5947  		if !(t.Alignment()%8 == 0) {
  5948  			break
  5949  		}
  5950  		v.reset(OpMIPS64MOVVstore)
  5951  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5952  		v0.AddArg2(src, mem)
  5953  		v.AddArg3(dst, v0, mem)
  5954  		return true
  5955  	}
  5956  	// match: (Move [8] {t} dst src mem)
  5957  	// cond: t.Alignment()%4 == 0
  5958  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  5959  	for {
  5960  		if auxIntToInt64(v.AuxInt) != 8 {
  5961  			break
  5962  		}
  5963  		t := auxToType(v.Aux)
  5964  		dst := v_0
  5965  		src := v_1
  5966  		mem := v_2
  5967  		if !(t.Alignment()%4 == 0) {
  5968  			break
  5969  		}
  5970  		v.reset(OpMIPS64MOVWstore)
  5971  		v.AuxInt = int32ToAuxInt(4)
  5972  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5973  		v0.AuxInt = int32ToAuxInt(4)
  5974  		v0.AddArg2(src, mem)
  5975  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5976  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5977  		v2.AddArg2(src, mem)
  5978  		v1.AddArg3(dst, v2, mem)
  5979  		v.AddArg3(dst, v0, v1)
  5980  		return true
  5981  	}
  5982  	// match: (Move [8] {t} dst src mem)
  5983  	// cond: t.Alignment()%2 == 0
  5984  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  5985  	for {
  5986  		if auxIntToInt64(v.AuxInt) != 8 {
  5987  			break
  5988  		}
  5989  		t := auxToType(v.Aux)
  5990  		dst := v_0
  5991  		src := v_1
  5992  		mem := v_2
  5993  		if !(t.Alignment()%2 == 0) {
  5994  			break
  5995  		}
  5996  		v.reset(OpMIPS64MOVHstore)
  5997  		v.AuxInt = int32ToAuxInt(6)
  5998  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5999  		v0.AuxInt = int32ToAuxInt(6)
  6000  		v0.AddArg2(src, mem)
  6001  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6002  		v1.AuxInt = int32ToAuxInt(4)
  6003  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6004  		v2.AuxInt = int32ToAuxInt(4)
  6005  		v2.AddArg2(src, mem)
  6006  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6007  		v3.AuxInt = int32ToAuxInt(2)
  6008  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6009  		v4.AuxInt = int32ToAuxInt(2)
  6010  		v4.AddArg2(src, mem)
  6011  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6012  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6013  		v6.AddArg2(src, mem)
  6014  		v5.AddArg3(dst, v6, mem)
  6015  		v3.AddArg3(dst, v4, v5)
  6016  		v1.AddArg3(dst, v2, v3)
  6017  		v.AddArg3(dst, v0, v1)
  6018  		return true
  6019  	}
  6020  	// match: (Move [3] dst src mem)
  6021  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  6022  	for {
  6023  		if auxIntToInt64(v.AuxInt) != 3 {
  6024  			break
  6025  		}
  6026  		dst := v_0
  6027  		src := v_1
  6028  		mem := v_2
  6029  		v.reset(OpMIPS64MOVBstore)
  6030  		v.AuxInt = int32ToAuxInt(2)
  6031  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6032  		v0.AuxInt = int32ToAuxInt(2)
  6033  		v0.AddArg2(src, mem)
  6034  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6035  		v1.AuxInt = int32ToAuxInt(1)
  6036  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6037  		v2.AuxInt = int32ToAuxInt(1)
  6038  		v2.AddArg2(src, mem)
  6039  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6040  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6041  		v4.AddArg2(src, mem)
  6042  		v3.AddArg3(dst, v4, mem)
  6043  		v1.AddArg3(dst, v2, v3)
  6044  		v.AddArg3(dst, v0, v1)
  6045  		return true
  6046  	}
  6047  	// match: (Move [6] {t} dst src mem)
  6048  	// cond: t.Alignment()%2 == 0
  6049  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  6050  	for {
  6051  		if auxIntToInt64(v.AuxInt) != 6 {
  6052  			break
  6053  		}
  6054  		t := auxToType(v.Aux)
  6055  		dst := v_0
  6056  		src := v_1
  6057  		mem := v_2
  6058  		if !(t.Alignment()%2 == 0) {
  6059  			break
  6060  		}
  6061  		v.reset(OpMIPS64MOVHstore)
  6062  		v.AuxInt = int32ToAuxInt(4)
  6063  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6064  		v0.AuxInt = int32ToAuxInt(4)
  6065  		v0.AddArg2(src, mem)
  6066  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6067  		v1.AuxInt = int32ToAuxInt(2)
  6068  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6069  		v2.AuxInt = int32ToAuxInt(2)
  6070  		v2.AddArg2(src, mem)
  6071  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6072  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6073  		v4.AddArg2(src, mem)
  6074  		v3.AddArg3(dst, v4, mem)
  6075  		v1.AddArg3(dst, v2, v3)
  6076  		v.AddArg3(dst, v0, v1)
  6077  		return true
  6078  	}
  6079  	// match: (Move [12] {t} dst src mem)
  6080  	// cond: t.Alignment()%4 == 0
  6081  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  6082  	for {
  6083  		if auxIntToInt64(v.AuxInt) != 12 {
  6084  			break
  6085  		}
  6086  		t := auxToType(v.Aux)
  6087  		dst := v_0
  6088  		src := v_1
  6089  		mem := v_2
  6090  		if !(t.Alignment()%4 == 0) {
  6091  			break
  6092  		}
  6093  		v.reset(OpMIPS64MOVWstore)
  6094  		v.AuxInt = int32ToAuxInt(8)
  6095  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6096  		v0.AuxInt = int32ToAuxInt(8)
  6097  		v0.AddArg2(src, mem)
  6098  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  6099  		v1.AuxInt = int32ToAuxInt(4)
  6100  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6101  		v2.AuxInt = int32ToAuxInt(4)
  6102  		v2.AddArg2(src, mem)
  6103  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  6104  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6105  		v4.AddArg2(src, mem)
  6106  		v3.AddArg3(dst, v4, mem)
  6107  		v1.AddArg3(dst, v2, v3)
  6108  		v.AddArg3(dst, v0, v1)
  6109  		return true
  6110  	}
  6111  	// match: (Move [16] {t} dst src mem)
  6112  	// cond: t.Alignment()%8 == 0
  6113  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  6114  	for {
  6115  		if auxIntToInt64(v.AuxInt) != 16 {
  6116  			break
  6117  		}
  6118  		t := auxToType(v.Aux)
  6119  		dst := v_0
  6120  		src := v_1
  6121  		mem := v_2
  6122  		if !(t.Alignment()%8 == 0) {
  6123  			break
  6124  		}
  6125  		v.reset(OpMIPS64MOVVstore)
  6126  		v.AuxInt = int32ToAuxInt(8)
  6127  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6128  		v0.AuxInt = int32ToAuxInt(8)
  6129  		v0.AddArg2(src, mem)
  6130  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  6131  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6132  		v2.AddArg2(src, mem)
  6133  		v1.AddArg3(dst, v2, mem)
  6134  		v.AddArg3(dst, v0, v1)
  6135  		return true
  6136  	}
  6137  	// match: (Move [24] {t} dst src mem)
  6138  	// cond: t.Alignment()%8 == 0
  6139  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  6140  	for {
  6141  		if auxIntToInt64(v.AuxInt) != 24 {
  6142  			break
  6143  		}
  6144  		t := auxToType(v.Aux)
  6145  		dst := v_0
  6146  		src := v_1
  6147  		mem := v_2
  6148  		if !(t.Alignment()%8 == 0) {
  6149  			break
  6150  		}
  6151  		v.reset(OpMIPS64MOVVstore)
  6152  		v.AuxInt = int32ToAuxInt(16)
  6153  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6154  		v0.AuxInt = int32ToAuxInt(16)
  6155  		v0.AddArg2(src, mem)
  6156  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  6157  		v1.AuxInt = int32ToAuxInt(8)
  6158  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6159  		v2.AuxInt = int32ToAuxInt(8)
  6160  		v2.AddArg2(src, mem)
  6161  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  6162  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6163  		v4.AddArg2(src, mem)
  6164  		v3.AddArg3(dst, v4, mem)
  6165  		v1.AddArg3(dst, v2, v3)
  6166  		v.AddArg3(dst, v0, v1)
  6167  		return true
  6168  	}
  6169  	// match: (Move [s] {t} dst src mem)
  6170  	// cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)
  6171  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  6172  	for {
  6173  		s := auxIntToInt64(v.AuxInt)
  6174  		t := auxToType(v.Aux)
  6175  		dst := v_0
  6176  		src := v_1
  6177  		mem := v_2
  6178  		if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
  6179  			break
  6180  		}
  6181  		v.reset(OpMIPS64DUFFCOPY)
  6182  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  6183  		v.AddArg3(dst, src, mem)
  6184  		return true
  6185  	}
  6186  	// match: (Move [s] {t} dst src mem)
  6187  	// cond: s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0
  6188  	// result: (LoweredMove [t.Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.Alignment(), config)]) mem)
  6189  	for {
  6190  		s := auxIntToInt64(v.AuxInt)
  6191  		t := auxToType(v.Aux)
  6192  		dst := v_0
  6193  		src := v_1
  6194  		mem := v_2
  6195  		if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
  6196  			break
  6197  		}
  6198  		v.reset(OpMIPS64LoweredMove)
  6199  		v.AuxInt = int64ToAuxInt(t.Alignment())
  6200  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  6201  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  6202  		v0.AddArg(src)
  6203  		v.AddArg4(dst, src, v0, mem)
  6204  		return true
  6205  	}
  6206  	return false
  6207  }
  6208  func rewriteValueMIPS64_OpMul16(v *Value) bool {
  6209  	v_1 := v.Args[1]
  6210  	v_0 := v.Args[0]
  6211  	b := v.Block
  6212  	typ := &b.Func.Config.Types
  6213  	// match: (Mul16 x y)
  6214  	// result: (Select1 (MULVU x y))
  6215  	for {
  6216  		x := v_0
  6217  		y := v_1
  6218  		v.reset(OpSelect1)
  6219  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6220  		v0.AddArg2(x, y)
  6221  		v.AddArg(v0)
  6222  		return true
  6223  	}
  6224  }
  6225  func rewriteValueMIPS64_OpMul32(v *Value) bool {
  6226  	v_1 := v.Args[1]
  6227  	v_0 := v.Args[0]
  6228  	b := v.Block
  6229  	typ := &b.Func.Config.Types
  6230  	// match: (Mul32 x y)
  6231  	// result: (Select1 (MULVU x y))
  6232  	for {
  6233  		x := v_0
  6234  		y := v_1
  6235  		v.reset(OpSelect1)
  6236  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6237  		v0.AddArg2(x, y)
  6238  		v.AddArg(v0)
  6239  		return true
  6240  	}
  6241  }
  6242  func rewriteValueMIPS64_OpMul64(v *Value) bool {
  6243  	v_1 := v.Args[1]
  6244  	v_0 := v.Args[0]
  6245  	b := v.Block
  6246  	typ := &b.Func.Config.Types
  6247  	// match: (Mul64 x y)
  6248  	// result: (Select1 (MULVU x y))
  6249  	for {
  6250  		x := v_0
  6251  		y := v_1
  6252  		v.reset(OpSelect1)
  6253  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6254  		v0.AddArg2(x, y)
  6255  		v.AddArg(v0)
  6256  		return true
  6257  	}
  6258  }
  6259  func rewriteValueMIPS64_OpMul8(v *Value) bool {
  6260  	v_1 := v.Args[1]
  6261  	v_0 := v.Args[0]
  6262  	b := v.Block
  6263  	typ := &b.Func.Config.Types
  6264  	// match: (Mul8 x y)
  6265  	// result: (Select1 (MULVU x y))
  6266  	for {
  6267  		x := v_0
  6268  		y := v_1
  6269  		v.reset(OpSelect1)
  6270  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6271  		v0.AddArg2(x, y)
  6272  		v.AddArg(v0)
  6273  		return true
  6274  	}
  6275  }
  6276  func rewriteValueMIPS64_OpNeq16(v *Value) bool {
  6277  	v_1 := v.Args[1]
  6278  	v_0 := v.Args[0]
  6279  	b := v.Block
  6280  	typ := &b.Func.Config.Types
  6281  	// match: (Neq16 x y)
  6282  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  6283  	for {
  6284  		x := v_0
  6285  		y := v_1
  6286  		v.reset(OpMIPS64SGTU)
  6287  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6288  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6289  		v1.AddArg(x)
  6290  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6291  		v2.AddArg(y)
  6292  		v0.AddArg2(v1, v2)
  6293  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6294  		v3.AuxInt = int64ToAuxInt(0)
  6295  		v.AddArg2(v0, v3)
  6296  		return true
  6297  	}
  6298  }
  6299  func rewriteValueMIPS64_OpNeq32(v *Value) bool {
  6300  	v_1 := v.Args[1]
  6301  	v_0 := v.Args[0]
  6302  	b := v.Block
  6303  	typ := &b.Func.Config.Types
  6304  	// match: (Neq32 x y)
  6305  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  6306  	for {
  6307  		x := v_0
  6308  		y := v_1
  6309  		v.reset(OpMIPS64SGTU)
  6310  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6311  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6312  		v1.AddArg(x)
  6313  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6314  		v2.AddArg(y)
  6315  		v0.AddArg2(v1, v2)
  6316  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6317  		v3.AuxInt = int64ToAuxInt(0)
  6318  		v.AddArg2(v0, v3)
  6319  		return true
  6320  	}
  6321  }
  6322  func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
  6323  	v_1 := v.Args[1]
  6324  	v_0 := v.Args[0]
  6325  	b := v.Block
  6326  	// match: (Neq32F x y)
  6327  	// result: (FPFlagFalse (CMPEQF x y))
  6328  	for {
  6329  		x := v_0
  6330  		y := v_1
  6331  		v.reset(OpMIPS64FPFlagFalse)
  6332  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  6333  		v0.AddArg2(x, y)
  6334  		v.AddArg(v0)
  6335  		return true
  6336  	}
  6337  }
  6338  func rewriteValueMIPS64_OpNeq64(v *Value) bool {
  6339  	v_1 := v.Args[1]
  6340  	v_0 := v.Args[0]
  6341  	b := v.Block
  6342  	typ := &b.Func.Config.Types
  6343  	// match: (Neq64 x y)
  6344  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  6345  	for {
  6346  		x := v_0
  6347  		y := v_1
  6348  		v.reset(OpMIPS64SGTU)
  6349  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6350  		v0.AddArg2(x, y)
  6351  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6352  		v1.AuxInt = int64ToAuxInt(0)
  6353  		v.AddArg2(v0, v1)
  6354  		return true
  6355  	}
  6356  }
  6357  func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
  6358  	v_1 := v.Args[1]
  6359  	v_0 := v.Args[0]
  6360  	b := v.Block
  6361  	// match: (Neq64F x y)
  6362  	// result: (FPFlagFalse (CMPEQD x y))
  6363  	for {
  6364  		x := v_0
  6365  		y := v_1
  6366  		v.reset(OpMIPS64FPFlagFalse)
  6367  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  6368  		v0.AddArg2(x, y)
  6369  		v.AddArg(v0)
  6370  		return true
  6371  	}
  6372  }
  6373  func rewriteValueMIPS64_OpNeq8(v *Value) bool {
  6374  	v_1 := v.Args[1]
  6375  	v_0 := v.Args[0]
  6376  	b := v.Block
  6377  	typ := &b.Func.Config.Types
  6378  	// match: (Neq8 x y)
  6379  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  6380  	for {
  6381  		x := v_0
  6382  		y := v_1
  6383  		v.reset(OpMIPS64SGTU)
  6384  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6385  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6386  		v1.AddArg(x)
  6387  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6388  		v2.AddArg(y)
  6389  		v0.AddArg2(v1, v2)
  6390  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6391  		v3.AuxInt = int64ToAuxInt(0)
  6392  		v.AddArg2(v0, v3)
  6393  		return true
  6394  	}
  6395  }
  6396  func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
  6397  	v_1 := v.Args[1]
  6398  	v_0 := v.Args[0]
  6399  	b := v.Block
  6400  	typ := &b.Func.Config.Types
  6401  	// match: (NeqPtr x y)
  6402  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  6403  	for {
  6404  		x := v_0
  6405  		y := v_1
  6406  		v.reset(OpMIPS64SGTU)
  6407  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  6408  		v0.AddArg2(x, y)
  6409  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6410  		v1.AuxInt = int64ToAuxInt(0)
  6411  		v.AddArg2(v0, v1)
  6412  		return true
  6413  	}
  6414  }
  6415  func rewriteValueMIPS64_OpNot(v *Value) bool {
  6416  	v_0 := v.Args[0]
  6417  	// match: (Not x)
  6418  	// result: (XORconst [1] x)
  6419  	for {
  6420  		x := v_0
  6421  		v.reset(OpMIPS64XORconst)
  6422  		v.AuxInt = int64ToAuxInt(1)
  6423  		v.AddArg(x)
  6424  		return true
  6425  	}
  6426  }
  6427  func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
  6428  	v_0 := v.Args[0]
  6429  	// match: (OffPtr [off] ptr:(SP))
  6430  	// cond: is32Bit(off)
  6431  	// result: (MOVVaddr [int32(off)] ptr)
  6432  	for {
  6433  		off := auxIntToInt64(v.AuxInt)
  6434  		ptr := v_0
  6435  		if ptr.Op != OpSP || !(is32Bit(off)) {
  6436  			break
  6437  		}
  6438  		v.reset(OpMIPS64MOVVaddr)
  6439  		v.AuxInt = int32ToAuxInt(int32(off))
  6440  		v.AddArg(ptr)
  6441  		return true
  6442  	}
  6443  	// match: (OffPtr [off] ptr)
  6444  	// result: (ADDVconst [off] ptr)
  6445  	for {
  6446  		off := auxIntToInt64(v.AuxInt)
  6447  		ptr := v_0
  6448  		v.reset(OpMIPS64ADDVconst)
  6449  		v.AuxInt = int64ToAuxInt(off)
  6450  		v.AddArg(ptr)
  6451  		return true
  6452  	}
  6453  }
  6454  func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
  6455  	v_1 := v.Args[1]
  6456  	v_0 := v.Args[0]
  6457  	b := v.Block
  6458  	typ := &b.Func.Config.Types
  6459  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  6460  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  6461  	for {
  6462  		t := v.Type
  6463  		x := v_0
  6464  		if v_1.Op != OpMIPS64MOVVconst {
  6465  			break
  6466  		}
  6467  		c := auxIntToInt64(v_1.AuxInt)
  6468  		v.reset(OpOr16)
  6469  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  6470  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6471  		v1.AuxInt = int64ToAuxInt(c & 15)
  6472  		v0.AddArg2(x, v1)
  6473  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  6474  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6475  		v3.AuxInt = int64ToAuxInt(-c & 15)
  6476  		v2.AddArg2(x, v3)
  6477  		v.AddArg2(v0, v2)
  6478  		return true
  6479  	}
  6480  	return false
  6481  }
  6482  func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
  6483  	v_1 := v.Args[1]
  6484  	v_0 := v.Args[0]
  6485  	b := v.Block
  6486  	typ := &b.Func.Config.Types
  6487  	// match: (RotateLeft32 <t> x (MOVVconst [c]))
  6488  	// result: (Or32 (Lsh32x64 <t> x (MOVVconst [c&31])) (Rsh32Ux64 <t> x (MOVVconst [-c&31])))
  6489  	for {
  6490  		t := v.Type
  6491  		x := v_0
  6492  		if v_1.Op != OpMIPS64MOVVconst {
  6493  			break
  6494  		}
  6495  		c := auxIntToInt64(v_1.AuxInt)
  6496  		v.reset(OpOr32)
  6497  		v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  6498  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6499  		v1.AuxInt = int64ToAuxInt(c & 31)
  6500  		v0.AddArg2(x, v1)
  6501  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  6502  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6503  		v3.AuxInt = int64ToAuxInt(-c & 31)
  6504  		v2.AddArg2(x, v3)
  6505  		v.AddArg2(v0, v2)
  6506  		return true
  6507  	}
  6508  	return false
  6509  }
  6510  func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
  6511  	v_1 := v.Args[1]
  6512  	v_0 := v.Args[0]
  6513  	b := v.Block
  6514  	typ := &b.Func.Config.Types
  6515  	// match: (RotateLeft64 <t> x (MOVVconst [c]))
  6516  	// result: (Or64 (Lsh64x64 <t> x (MOVVconst [c&63])) (Rsh64Ux64 <t> x (MOVVconst [-c&63])))
  6517  	for {
  6518  		t := v.Type
  6519  		x := v_0
  6520  		if v_1.Op != OpMIPS64MOVVconst {
  6521  			break
  6522  		}
  6523  		c := auxIntToInt64(v_1.AuxInt)
  6524  		v.reset(OpOr64)
  6525  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  6526  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6527  		v1.AuxInt = int64ToAuxInt(c & 63)
  6528  		v0.AddArg2(x, v1)
  6529  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  6530  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6531  		v3.AuxInt = int64ToAuxInt(-c & 63)
  6532  		v2.AddArg2(x, v3)
  6533  		v.AddArg2(v0, v2)
  6534  		return true
  6535  	}
  6536  	return false
  6537  }
  6538  func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
  6539  	v_1 := v.Args[1]
  6540  	v_0 := v.Args[0]
  6541  	b := v.Block
  6542  	typ := &b.Func.Config.Types
  6543  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  6544  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  6545  	for {
  6546  		t := v.Type
  6547  		x := v_0
  6548  		if v_1.Op != OpMIPS64MOVVconst {
  6549  			break
  6550  		}
  6551  		c := auxIntToInt64(v_1.AuxInt)
  6552  		v.reset(OpOr8)
  6553  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6554  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6555  		v1.AuxInt = int64ToAuxInt(c & 7)
  6556  		v0.AddArg2(x, v1)
  6557  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  6558  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6559  		v3.AuxInt = int64ToAuxInt(-c & 7)
  6560  		v2.AddArg2(x, v3)
  6561  		v.AddArg2(v0, v2)
  6562  		return true
  6563  	}
  6564  	return false
  6565  }
  6566  func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
  6567  	v_1 := v.Args[1]
  6568  	v_0 := v.Args[0]
  6569  	b := v.Block
  6570  	typ := &b.Func.Config.Types
  6571  	// match: (Rsh16Ux16 <t> x y)
  6572  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6573  	for {
  6574  		t := v.Type
  6575  		x := v_0
  6576  		y := v_1
  6577  		v.reset(OpMIPS64AND)
  6578  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6579  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6580  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6581  		v2.AuxInt = int64ToAuxInt(64)
  6582  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6583  		v3.AddArg(y)
  6584  		v1.AddArg2(v2, v3)
  6585  		v0.AddArg(v1)
  6586  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6587  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6588  		v5.AddArg(x)
  6589  		v4.AddArg2(v5, v3)
  6590  		v.AddArg2(v0, v4)
  6591  		return true
  6592  	}
  6593  }
  6594  func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
  6595  	v_1 := v.Args[1]
  6596  	v_0 := v.Args[0]
  6597  	b := v.Block
  6598  	typ := &b.Func.Config.Types
  6599  	// match: (Rsh16Ux32 <t> x y)
  6600  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  6601  	for {
  6602  		t := v.Type
  6603  		x := v_0
  6604  		y := v_1
  6605  		v.reset(OpMIPS64AND)
  6606  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6607  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6608  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6609  		v2.AuxInt = int64ToAuxInt(64)
  6610  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6611  		v3.AddArg(y)
  6612  		v1.AddArg2(v2, v3)
  6613  		v0.AddArg(v1)
  6614  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6615  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6616  		v5.AddArg(x)
  6617  		v4.AddArg2(v5, v3)
  6618  		v.AddArg2(v0, v4)
  6619  		return true
  6620  	}
  6621  }
  6622  func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
  6623  	v_1 := v.Args[1]
  6624  	v_0 := v.Args[0]
  6625  	b := v.Block
  6626  	typ := &b.Func.Config.Types
  6627  	// match: (Rsh16Ux64 <t> x y)
  6628  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  6629  	for {
  6630  		t := v.Type
  6631  		x := v_0
  6632  		y := v_1
  6633  		v.reset(OpMIPS64AND)
  6634  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6635  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6636  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6637  		v2.AuxInt = int64ToAuxInt(64)
  6638  		v1.AddArg2(v2, y)
  6639  		v0.AddArg(v1)
  6640  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6641  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6642  		v4.AddArg(x)
  6643  		v3.AddArg2(v4, y)
  6644  		v.AddArg2(v0, v3)
  6645  		return true
  6646  	}
  6647  }
  6648  func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
  6649  	v_1 := v.Args[1]
  6650  	v_0 := v.Args[0]
  6651  	b := v.Block
  6652  	typ := &b.Func.Config.Types
  6653  	// match: (Rsh16Ux8 <t> x y)
  6654  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
  6655  	for {
  6656  		t := v.Type
  6657  		x := v_0
  6658  		y := v_1
  6659  		v.reset(OpMIPS64AND)
  6660  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6661  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6662  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6663  		v2.AuxInt = int64ToAuxInt(64)
  6664  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6665  		v3.AddArg(y)
  6666  		v1.AddArg2(v2, v3)
  6667  		v0.AddArg(v1)
  6668  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6669  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6670  		v5.AddArg(x)
  6671  		v4.AddArg2(v5, v3)
  6672  		v.AddArg2(v0, v4)
  6673  		return true
  6674  	}
  6675  }
  6676  func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
  6677  	v_1 := v.Args[1]
  6678  	v_0 := v.Args[0]
  6679  	b := v.Block
  6680  	typ := &b.Func.Config.Types
  6681  	// match: (Rsh16x16 <t> x y)
  6682  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6683  	for {
  6684  		t := v.Type
  6685  		x := v_0
  6686  		y := v_1
  6687  		v.reset(OpMIPS64SRAV)
  6688  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6689  		v0.AddArg(x)
  6690  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6691  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6692  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6693  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6694  		v4.AddArg(y)
  6695  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6696  		v5.AuxInt = int64ToAuxInt(63)
  6697  		v3.AddArg2(v4, v5)
  6698  		v2.AddArg(v3)
  6699  		v1.AddArg2(v2, v4)
  6700  		v.AddArg2(v0, v1)
  6701  		return true
  6702  	}
  6703  }
  6704  func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
  6705  	v_1 := v.Args[1]
  6706  	v_0 := v.Args[0]
  6707  	b := v.Block
  6708  	typ := &b.Func.Config.Types
  6709  	// match: (Rsh16x32 <t> x y)
  6710  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6711  	for {
  6712  		t := v.Type
  6713  		x := v_0
  6714  		y := v_1
  6715  		v.reset(OpMIPS64SRAV)
  6716  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6717  		v0.AddArg(x)
  6718  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6719  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6720  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6721  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6722  		v4.AddArg(y)
  6723  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6724  		v5.AuxInt = int64ToAuxInt(63)
  6725  		v3.AddArg2(v4, v5)
  6726  		v2.AddArg(v3)
  6727  		v1.AddArg2(v2, v4)
  6728  		v.AddArg2(v0, v1)
  6729  		return true
  6730  	}
  6731  }
  6732  func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
  6733  	v_1 := v.Args[1]
  6734  	v_0 := v.Args[0]
  6735  	b := v.Block
  6736  	typ := &b.Func.Config.Types
  6737  	// match: (Rsh16x64 <t> x y)
  6738  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6739  	for {
  6740  		t := v.Type
  6741  		x := v_0
  6742  		y := v_1
  6743  		v.reset(OpMIPS64SRAV)
  6744  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6745  		v0.AddArg(x)
  6746  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6747  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6748  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6749  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6750  		v4.AuxInt = int64ToAuxInt(63)
  6751  		v3.AddArg2(y, v4)
  6752  		v2.AddArg(v3)
  6753  		v1.AddArg2(v2, y)
  6754  		v.AddArg2(v0, v1)
  6755  		return true
  6756  	}
  6757  }
  6758  func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
  6759  	v_1 := v.Args[1]
  6760  	v_0 := v.Args[0]
  6761  	b := v.Block
  6762  	typ := &b.Func.Config.Types
  6763  	// match: (Rsh16x8 <t> x y)
  6764  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6765  	for {
  6766  		t := v.Type
  6767  		x := v_0
  6768  		y := v_1
  6769  		v.reset(OpMIPS64SRAV)
  6770  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6771  		v0.AddArg(x)
  6772  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6773  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6774  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6775  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6776  		v4.AddArg(y)
  6777  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6778  		v5.AuxInt = int64ToAuxInt(63)
  6779  		v3.AddArg2(v4, v5)
  6780  		v2.AddArg(v3)
  6781  		v1.AddArg2(v2, v4)
  6782  		v.AddArg2(v0, v1)
  6783  		return true
  6784  	}
  6785  }
  6786  func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
  6787  	v_1 := v.Args[1]
  6788  	v_0 := v.Args[0]
  6789  	b := v.Block
  6790  	typ := &b.Func.Config.Types
  6791  	// match: (Rsh32Ux16 <t> x y)
  6792  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  6793  	for {
  6794  		t := v.Type
  6795  		x := v_0
  6796  		y := v_1
  6797  		v.reset(OpMIPS64AND)
  6798  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6799  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6800  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6801  		v2.AuxInt = int64ToAuxInt(64)
  6802  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6803  		v3.AddArg(y)
  6804  		v1.AddArg2(v2, v3)
  6805  		v0.AddArg(v1)
  6806  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6807  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6808  		v5.AddArg(x)
  6809  		v4.AddArg2(v5, v3)
  6810  		v.AddArg2(v0, v4)
  6811  		return true
  6812  	}
  6813  }
  6814  func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
  6815  	v_1 := v.Args[1]
  6816  	v_0 := v.Args[0]
  6817  	b := v.Block
  6818  	typ := &b.Func.Config.Types
  6819  	// match: (Rsh32Ux32 <t> x y)
  6820  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6821  	for {
  6822  		t := v.Type
  6823  		x := v_0
  6824  		y := v_1
  6825  		v.reset(OpMIPS64AND)
  6826  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6827  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6828  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6829  		v2.AuxInt = int64ToAuxInt(64)
  6830  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6831  		v3.AddArg(y)
  6832  		v1.AddArg2(v2, v3)
  6833  		v0.AddArg(v1)
  6834  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6835  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6836  		v5.AddArg(x)
  6837  		v4.AddArg2(v5, v3)
  6838  		v.AddArg2(v0, v4)
  6839  		return true
  6840  	}
  6841  }
  6842  func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
  6843  	v_1 := v.Args[1]
  6844  	v_0 := v.Args[0]
  6845  	b := v.Block
  6846  	typ := &b.Func.Config.Types
  6847  	// match: (Rsh32Ux64 <t> x y)
  6848  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  6849  	for {
  6850  		t := v.Type
  6851  		x := v_0
  6852  		y := v_1
  6853  		v.reset(OpMIPS64AND)
  6854  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6855  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6856  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6857  		v2.AuxInt = int64ToAuxInt(64)
  6858  		v1.AddArg2(v2, y)
  6859  		v0.AddArg(v1)
  6860  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6861  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6862  		v4.AddArg(x)
  6863  		v3.AddArg2(v4, y)
  6864  		v.AddArg2(v0, v3)
  6865  		return true
  6866  	}
  6867  }
  6868  func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
  6869  	v_1 := v.Args[1]
  6870  	v_0 := v.Args[0]
  6871  	b := v.Block
  6872  	typ := &b.Func.Config.Types
  6873  	// match: (Rsh32Ux8 <t> x y)
  6874  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
  6875  	for {
  6876  		t := v.Type
  6877  		x := v_0
  6878  		y := v_1
  6879  		v.reset(OpMIPS64AND)
  6880  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6881  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6882  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6883  		v2.AuxInt = int64ToAuxInt(64)
  6884  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6885  		v3.AddArg(y)
  6886  		v1.AddArg2(v2, v3)
  6887  		v0.AddArg(v1)
  6888  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6889  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6890  		v5.AddArg(x)
  6891  		v4.AddArg2(v5, v3)
  6892  		v.AddArg2(v0, v4)
  6893  		return true
  6894  	}
  6895  }
  6896  func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
  6897  	v_1 := v.Args[1]
  6898  	v_0 := v.Args[0]
  6899  	b := v.Block
  6900  	typ := &b.Func.Config.Types
  6901  	// match: (Rsh32x16 <t> x y)
  6902  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6903  	for {
  6904  		t := v.Type
  6905  		x := v_0
  6906  		y := v_1
  6907  		v.reset(OpMIPS64SRAV)
  6908  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6909  		v0.AddArg(x)
  6910  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6911  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6912  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6913  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6914  		v4.AddArg(y)
  6915  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6916  		v5.AuxInt = int64ToAuxInt(63)
  6917  		v3.AddArg2(v4, v5)
  6918  		v2.AddArg(v3)
  6919  		v1.AddArg2(v2, v4)
  6920  		v.AddArg2(v0, v1)
  6921  		return true
  6922  	}
  6923  }
  6924  func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
  6925  	v_1 := v.Args[1]
  6926  	v_0 := v.Args[0]
  6927  	b := v.Block
  6928  	typ := &b.Func.Config.Types
  6929  	// match: (Rsh32x32 <t> x y)
  6930  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6931  	for {
  6932  		t := v.Type
  6933  		x := v_0
  6934  		y := v_1
  6935  		v.reset(OpMIPS64SRAV)
  6936  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6937  		v0.AddArg(x)
  6938  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6939  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6940  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6941  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6942  		v4.AddArg(y)
  6943  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6944  		v5.AuxInt = int64ToAuxInt(63)
  6945  		v3.AddArg2(v4, v5)
  6946  		v2.AddArg(v3)
  6947  		v1.AddArg2(v2, v4)
  6948  		v.AddArg2(v0, v1)
  6949  		return true
  6950  	}
  6951  }
  6952  func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
  6953  	v_1 := v.Args[1]
  6954  	v_0 := v.Args[0]
  6955  	b := v.Block
  6956  	typ := &b.Func.Config.Types
  6957  	// match: (Rsh32x64 <t> x y)
  6958  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6959  	for {
  6960  		t := v.Type
  6961  		x := v_0
  6962  		y := v_1
  6963  		v.reset(OpMIPS64SRAV)
  6964  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6965  		v0.AddArg(x)
  6966  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6967  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6968  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6969  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6970  		v4.AuxInt = int64ToAuxInt(63)
  6971  		v3.AddArg2(y, v4)
  6972  		v2.AddArg(v3)
  6973  		v1.AddArg2(v2, y)
  6974  		v.AddArg2(v0, v1)
  6975  		return true
  6976  	}
  6977  }
  6978  func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
  6979  	v_1 := v.Args[1]
  6980  	v_0 := v.Args[0]
  6981  	b := v.Block
  6982  	typ := &b.Func.Config.Types
  6983  	// match: (Rsh32x8 <t> x y)
  6984  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6985  	for {
  6986  		t := v.Type
  6987  		x := v_0
  6988  		y := v_1
  6989  		v.reset(OpMIPS64SRAV)
  6990  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6991  		v0.AddArg(x)
  6992  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6993  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6994  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6995  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6996  		v4.AddArg(y)
  6997  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6998  		v5.AuxInt = int64ToAuxInt(63)
  6999  		v3.AddArg2(v4, v5)
  7000  		v2.AddArg(v3)
  7001  		v1.AddArg2(v2, v4)
  7002  		v.AddArg2(v0, v1)
  7003  		return true
  7004  	}
  7005  }
  7006  func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
  7007  	v_1 := v.Args[1]
  7008  	v_0 := v.Args[0]
  7009  	b := v.Block
  7010  	typ := &b.Func.Config.Types
  7011  	// match: (Rsh64Ux16 <t> x y)
  7012  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  7013  	for {
  7014  		t := v.Type
  7015  		x := v_0
  7016  		y := v_1
  7017  		v.reset(OpMIPS64AND)
  7018  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7019  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7020  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7021  		v2.AuxInt = int64ToAuxInt(64)
  7022  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7023  		v3.AddArg(y)
  7024  		v1.AddArg2(v2, v3)
  7025  		v0.AddArg(v1)
  7026  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7027  		v4.AddArg2(x, v3)
  7028  		v.AddArg2(v0, v4)
  7029  		return true
  7030  	}
  7031  }
  7032  func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
  7033  	v_1 := v.Args[1]
  7034  	v_0 := v.Args[0]
  7035  	b := v.Block
  7036  	typ := &b.Func.Config.Types
  7037  	// match: (Rsh64Ux32 <t> x y)
  7038  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  7039  	for {
  7040  		t := v.Type
  7041  		x := v_0
  7042  		y := v_1
  7043  		v.reset(OpMIPS64AND)
  7044  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7045  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7046  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7047  		v2.AuxInt = int64ToAuxInt(64)
  7048  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7049  		v3.AddArg(y)
  7050  		v1.AddArg2(v2, v3)
  7051  		v0.AddArg(v1)
  7052  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7053  		v4.AddArg2(x, v3)
  7054  		v.AddArg2(v0, v4)
  7055  		return true
  7056  	}
  7057  }
  7058  func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
  7059  	v_1 := v.Args[1]
  7060  	v_0 := v.Args[0]
  7061  	b := v.Block
  7062  	typ := &b.Func.Config.Types
  7063  	// match: (Rsh64Ux64 <t> x y)
  7064  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  7065  	for {
  7066  		t := v.Type
  7067  		x := v_0
  7068  		y := v_1
  7069  		v.reset(OpMIPS64AND)
  7070  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7071  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7072  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7073  		v2.AuxInt = int64ToAuxInt(64)
  7074  		v1.AddArg2(v2, y)
  7075  		v0.AddArg(v1)
  7076  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7077  		v3.AddArg2(x, y)
  7078  		v.AddArg2(v0, v3)
  7079  		return true
  7080  	}
  7081  }
  7082  func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
  7083  	v_1 := v.Args[1]
  7084  	v_0 := v.Args[0]
  7085  	b := v.Block
  7086  	typ := &b.Func.Config.Types
  7087  	// match: (Rsh64Ux8 <t> x y)
  7088  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
  7089  	for {
  7090  		t := v.Type
  7091  		x := v_0
  7092  		y := v_1
  7093  		v.reset(OpMIPS64AND)
  7094  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7095  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7096  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7097  		v2.AuxInt = int64ToAuxInt(64)
  7098  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7099  		v3.AddArg(y)
  7100  		v1.AddArg2(v2, v3)
  7101  		v0.AddArg(v1)
  7102  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7103  		v4.AddArg2(x, v3)
  7104  		v.AddArg2(v0, v4)
  7105  		return true
  7106  	}
  7107  }
  7108  func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
  7109  	v_1 := v.Args[1]
  7110  	v_0 := v.Args[0]
  7111  	b := v.Block
  7112  	typ := &b.Func.Config.Types
  7113  	// match: (Rsh64x16 <t> x y)
  7114  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  7115  	for {
  7116  		t := v.Type
  7117  		x := v_0
  7118  		y := v_1
  7119  		v.reset(OpMIPS64SRAV)
  7120  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7121  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7122  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7123  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7124  		v3.AddArg(y)
  7125  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7126  		v4.AuxInt = int64ToAuxInt(63)
  7127  		v2.AddArg2(v3, v4)
  7128  		v1.AddArg(v2)
  7129  		v0.AddArg2(v1, v3)
  7130  		v.AddArg2(x, v0)
  7131  		return true
  7132  	}
  7133  }
  7134  func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
  7135  	v_1 := v.Args[1]
  7136  	v_0 := v.Args[0]
  7137  	b := v.Block
  7138  	typ := &b.Func.Config.Types
  7139  	// match: (Rsh64x32 <t> x y)
  7140  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  7141  	for {
  7142  		t := v.Type
  7143  		x := v_0
  7144  		y := v_1
  7145  		v.reset(OpMIPS64SRAV)
  7146  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7147  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7148  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7149  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7150  		v3.AddArg(y)
  7151  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7152  		v4.AuxInt = int64ToAuxInt(63)
  7153  		v2.AddArg2(v3, v4)
  7154  		v1.AddArg(v2)
  7155  		v0.AddArg2(v1, v3)
  7156  		v.AddArg2(x, v0)
  7157  		return true
  7158  	}
  7159  }
  7160  func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
  7161  	v_1 := v.Args[1]
  7162  	v_0 := v.Args[0]
  7163  	b := v.Block
  7164  	typ := &b.Func.Config.Types
  7165  	// match: (Rsh64x64 <t> x y)
  7166  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  7167  	for {
  7168  		t := v.Type
  7169  		x := v_0
  7170  		y := v_1
  7171  		v.reset(OpMIPS64SRAV)
  7172  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7173  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7174  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7175  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7176  		v3.AuxInt = int64ToAuxInt(63)
  7177  		v2.AddArg2(y, v3)
  7178  		v1.AddArg(v2)
  7179  		v0.AddArg2(v1, y)
  7180  		v.AddArg2(x, v0)
  7181  		return true
  7182  	}
  7183  }
  7184  func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
  7185  	v_1 := v.Args[1]
  7186  	v_0 := v.Args[0]
  7187  	b := v.Block
  7188  	typ := &b.Func.Config.Types
  7189  	// match: (Rsh64x8 <t> x y)
  7190  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  7191  	for {
  7192  		t := v.Type
  7193  		x := v_0
  7194  		y := v_1
  7195  		v.reset(OpMIPS64SRAV)
  7196  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7197  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7198  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7199  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7200  		v3.AddArg(y)
  7201  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7202  		v4.AuxInt = int64ToAuxInt(63)
  7203  		v2.AddArg2(v3, v4)
  7204  		v1.AddArg(v2)
  7205  		v0.AddArg2(v1, v3)
  7206  		v.AddArg2(x, v0)
  7207  		return true
  7208  	}
  7209  }
  7210  func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
  7211  	v_1 := v.Args[1]
  7212  	v_0 := v.Args[0]
  7213  	b := v.Block
  7214  	typ := &b.Func.Config.Types
  7215  	// match: (Rsh8Ux16 <t> x y)
  7216  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  7217  	for {
  7218  		t := v.Type
  7219  		x := v_0
  7220  		y := v_1
  7221  		v.reset(OpMIPS64AND)
  7222  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7223  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7224  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7225  		v2.AuxInt = int64ToAuxInt(64)
  7226  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7227  		v3.AddArg(y)
  7228  		v1.AddArg2(v2, v3)
  7229  		v0.AddArg(v1)
  7230  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7231  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7232  		v5.AddArg(x)
  7233  		v4.AddArg2(v5, v3)
  7234  		v.AddArg2(v0, v4)
  7235  		return true
  7236  	}
  7237  }
  7238  func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
  7239  	v_1 := v.Args[1]
  7240  	v_0 := v.Args[0]
  7241  	b := v.Block
  7242  	typ := &b.Func.Config.Types
  7243  	// match: (Rsh8Ux32 <t> x y)
  7244  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  7245  	for {
  7246  		t := v.Type
  7247  		x := v_0
  7248  		y := v_1
  7249  		v.reset(OpMIPS64AND)
  7250  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7251  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7252  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7253  		v2.AuxInt = int64ToAuxInt(64)
  7254  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7255  		v3.AddArg(y)
  7256  		v1.AddArg2(v2, v3)
  7257  		v0.AddArg(v1)
  7258  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7259  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7260  		v5.AddArg(x)
  7261  		v4.AddArg2(v5, v3)
  7262  		v.AddArg2(v0, v4)
  7263  		return true
  7264  	}
  7265  }
  7266  func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
  7267  	v_1 := v.Args[1]
  7268  	v_0 := v.Args[0]
  7269  	b := v.Block
  7270  	typ := &b.Func.Config.Types
  7271  	// match: (Rsh8Ux64 <t> x y)
  7272  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  7273  	for {
  7274  		t := v.Type
  7275  		x := v_0
  7276  		y := v_1
  7277  		v.reset(OpMIPS64AND)
  7278  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7279  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7280  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7281  		v2.AuxInt = int64ToAuxInt(64)
  7282  		v1.AddArg2(v2, y)
  7283  		v0.AddArg(v1)
  7284  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7285  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7286  		v4.AddArg(x)
  7287  		v3.AddArg2(v4, y)
  7288  		v.AddArg2(v0, v3)
  7289  		return true
  7290  	}
  7291  }
  7292  func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
  7293  	v_1 := v.Args[1]
  7294  	v_0 := v.Args[0]
  7295  	b := v.Block
  7296  	typ := &b.Func.Config.Types
  7297  	// match: (Rsh8Ux8 <t> x y)
  7298  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  7299  	for {
  7300  		t := v.Type
  7301  		x := v_0
  7302  		y := v_1
  7303  		v.reset(OpMIPS64AND)
  7304  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7305  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7306  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7307  		v2.AuxInt = int64ToAuxInt(64)
  7308  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7309  		v3.AddArg(y)
  7310  		v1.AddArg2(v2, v3)
  7311  		v0.AddArg(v1)
  7312  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7313  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7314  		v5.AddArg(x)
  7315  		v4.AddArg2(v5, v3)
  7316  		v.AddArg2(v0, v4)
  7317  		return true
  7318  	}
  7319  }
  7320  func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
  7321  	v_1 := v.Args[1]
  7322  	v_0 := v.Args[0]
  7323  	b := v.Block
  7324  	typ := &b.Func.Config.Types
  7325  	// match: (Rsh8x16 <t> x y)
  7326  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  7327  	for {
  7328  		t := v.Type
  7329  		x := v_0
  7330  		y := v_1
  7331  		v.reset(OpMIPS64SRAV)
  7332  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7333  		v0.AddArg(x)
  7334  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7335  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7336  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7337  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7338  		v4.AddArg(y)
  7339  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7340  		v5.AuxInt = int64ToAuxInt(63)
  7341  		v3.AddArg2(v4, v5)
  7342  		v2.AddArg(v3)
  7343  		v1.AddArg2(v2, v4)
  7344  		v.AddArg2(v0, v1)
  7345  		return true
  7346  	}
  7347  }
  7348  func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
  7349  	v_1 := v.Args[1]
  7350  	v_0 := v.Args[0]
  7351  	b := v.Block
  7352  	typ := &b.Func.Config.Types
  7353  	// match: (Rsh8x32 <t> x y)
  7354  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  7355  	for {
  7356  		t := v.Type
  7357  		x := v_0
  7358  		y := v_1
  7359  		v.reset(OpMIPS64SRAV)
  7360  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7361  		v0.AddArg(x)
  7362  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7363  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7364  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7365  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7366  		v4.AddArg(y)
  7367  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7368  		v5.AuxInt = int64ToAuxInt(63)
  7369  		v3.AddArg2(v4, v5)
  7370  		v2.AddArg(v3)
  7371  		v1.AddArg2(v2, v4)
  7372  		v.AddArg2(v0, v1)
  7373  		return true
  7374  	}
  7375  }
  7376  func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
  7377  	v_1 := v.Args[1]
  7378  	v_0 := v.Args[0]
  7379  	b := v.Block
  7380  	typ := &b.Func.Config.Types
  7381  	// match: (Rsh8x64 <t> x y)
  7382  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  7383  	for {
  7384  		t := v.Type
  7385  		x := v_0
  7386  		y := v_1
  7387  		v.reset(OpMIPS64SRAV)
  7388  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7389  		v0.AddArg(x)
  7390  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7391  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7392  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7393  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7394  		v4.AuxInt = int64ToAuxInt(63)
  7395  		v3.AddArg2(y, v4)
  7396  		v2.AddArg(v3)
  7397  		v1.AddArg2(v2, y)
  7398  		v.AddArg2(v0, v1)
  7399  		return true
  7400  	}
  7401  }
  7402  func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
  7403  	v_1 := v.Args[1]
  7404  	v_0 := v.Args[0]
  7405  	b := v.Block
  7406  	typ := &b.Func.Config.Types
  7407  	// match: (Rsh8x8 <t> x y)
  7408  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  7409  	for {
  7410  		t := v.Type
  7411  		x := v_0
  7412  		y := v_1
  7413  		v.reset(OpMIPS64SRAV)
  7414  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7415  		v0.AddArg(x)
  7416  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7417  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7418  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7419  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7420  		v4.AddArg(y)
  7421  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7422  		v5.AuxInt = int64ToAuxInt(63)
  7423  		v3.AddArg2(v4, v5)
  7424  		v2.AddArg(v3)
  7425  		v1.AddArg2(v2, v4)
  7426  		v.AddArg2(v0, v1)
  7427  		return true
  7428  	}
  7429  }
  7430  func rewriteValueMIPS64_OpSelect0(v *Value) bool {
  7431  	v_0 := v.Args[0]
  7432  	b := v.Block
  7433  	typ := &b.Func.Config.Types
  7434  	// match: (Select0 (Mul64uover x y))
  7435  	// result: (Select1 <typ.UInt64> (MULVU x y))
  7436  	for {
  7437  		if v_0.Op != OpMul64uover {
  7438  			break
  7439  		}
  7440  		y := v_0.Args[1]
  7441  		x := v_0.Args[0]
  7442  		v.reset(OpSelect1)
  7443  		v.Type = typ.UInt64
  7444  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7445  		v0.AddArg2(x, y)
  7446  		v.AddArg(v0)
  7447  		return true
  7448  	}
  7449  	// match: (Select0 <t> (Add64carry x y c))
  7450  	// result: (ADDV (ADDV <t> x y) c)
  7451  	for {
  7452  		t := v.Type
  7453  		if v_0.Op != OpAdd64carry {
  7454  			break
  7455  		}
  7456  		c := v_0.Args[2]
  7457  		x := v_0.Args[0]
  7458  		y := v_0.Args[1]
  7459  		v.reset(OpMIPS64ADDV)
  7460  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
  7461  		v0.AddArg2(x, y)
  7462  		v.AddArg2(v0, c)
  7463  		return true
  7464  	}
  7465  	// match: (Select0 <t> (Sub64borrow x y c))
  7466  	// result: (SUBV (SUBV <t> x y) c)
  7467  	for {
  7468  		t := v.Type
  7469  		if v_0.Op != OpSub64borrow {
  7470  			break
  7471  		}
  7472  		c := v_0.Args[2]
  7473  		x := v_0.Args[0]
  7474  		y := v_0.Args[1]
  7475  		v.reset(OpMIPS64SUBV)
  7476  		v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  7477  		v0.AddArg2(x, y)
  7478  		v.AddArg2(v0, c)
  7479  		return true
  7480  	}
  7481  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  7482  	// result: (MOVVconst [0])
  7483  	for {
  7484  		if v_0.Op != OpMIPS64DIVVU {
  7485  			break
  7486  		}
  7487  		_ = v_0.Args[1]
  7488  		v_0_1 := v_0.Args[1]
  7489  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7490  			break
  7491  		}
  7492  		v.reset(OpMIPS64MOVVconst)
  7493  		v.AuxInt = int64ToAuxInt(0)
  7494  		return true
  7495  	}
  7496  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  7497  	// cond: isPowerOfTwo(c)
  7498  	// result: (ANDconst [c-1] x)
  7499  	for {
  7500  		if v_0.Op != OpMIPS64DIVVU {
  7501  			break
  7502  		}
  7503  		_ = v_0.Args[1]
  7504  		x := v_0.Args[0]
  7505  		v_0_1 := v_0.Args[1]
  7506  		if v_0_1.Op != OpMIPS64MOVVconst {
  7507  			break
  7508  		}
  7509  		c := auxIntToInt64(v_0_1.AuxInt)
  7510  		if !(isPowerOfTwo(c)) {
  7511  			break
  7512  		}
  7513  		v.reset(OpMIPS64ANDconst)
  7514  		v.AuxInt = int64ToAuxInt(c - 1)
  7515  		v.AddArg(x)
  7516  		return true
  7517  	}
  7518  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  7519  	// cond: d != 0
  7520  	// result: (MOVVconst [c%d])
  7521  	for {
  7522  		if v_0.Op != OpMIPS64DIVV {
  7523  			break
  7524  		}
  7525  		_ = v_0.Args[1]
  7526  		v_0_0 := v_0.Args[0]
  7527  		if v_0_0.Op != OpMIPS64MOVVconst {
  7528  			break
  7529  		}
  7530  		c := auxIntToInt64(v_0_0.AuxInt)
  7531  		v_0_1 := v_0.Args[1]
  7532  		if v_0_1.Op != OpMIPS64MOVVconst {
  7533  			break
  7534  		}
  7535  		d := auxIntToInt64(v_0_1.AuxInt)
  7536  		if !(d != 0) {
  7537  			break
  7538  		}
  7539  		v.reset(OpMIPS64MOVVconst)
  7540  		v.AuxInt = int64ToAuxInt(c % d)
  7541  		return true
  7542  	}
  7543  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7544  	// cond: d != 0
  7545  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  7546  	for {
  7547  		if v_0.Op != OpMIPS64DIVVU {
  7548  			break
  7549  		}
  7550  		_ = v_0.Args[1]
  7551  		v_0_0 := v_0.Args[0]
  7552  		if v_0_0.Op != OpMIPS64MOVVconst {
  7553  			break
  7554  		}
  7555  		c := auxIntToInt64(v_0_0.AuxInt)
  7556  		v_0_1 := v_0.Args[1]
  7557  		if v_0_1.Op != OpMIPS64MOVVconst {
  7558  			break
  7559  		}
  7560  		d := auxIntToInt64(v_0_1.AuxInt)
  7561  		if !(d != 0) {
  7562  			break
  7563  		}
  7564  		v.reset(OpMIPS64MOVVconst)
  7565  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  7566  		return true
  7567  	}
  7568  	return false
  7569  }
  7570  func rewriteValueMIPS64_OpSelect1(v *Value) bool {
  7571  	v_0 := v.Args[0]
  7572  	b := v.Block
  7573  	typ := &b.Func.Config.Types
  7574  	// match: (Select1 (Mul64uover x y))
  7575  	// result: (SGTU <typ.Bool> (Select0 <typ.UInt64> (MULVU x y)) (MOVVconst <typ.UInt64> [0]))
  7576  	for {
  7577  		if v_0.Op != OpMul64uover {
  7578  			break
  7579  		}
  7580  		y := v_0.Args[1]
  7581  		x := v_0.Args[0]
  7582  		v.reset(OpMIPS64SGTU)
  7583  		v.Type = typ.Bool
  7584  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
  7585  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7586  		v1.AddArg2(x, y)
  7587  		v0.AddArg(v1)
  7588  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7589  		v2.AuxInt = int64ToAuxInt(0)
  7590  		v.AddArg2(v0, v2)
  7591  		return true
  7592  	}
  7593  	// match: (Select1 <t> (Add64carry x y c))
  7594  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
  7595  	for {
  7596  		t := v.Type
  7597  		if v_0.Op != OpAdd64carry {
  7598  			break
  7599  		}
  7600  		c := v_0.Args[2]
  7601  		x := v_0.Args[0]
  7602  		y := v_0.Args[1]
  7603  		v.reset(OpMIPS64OR)
  7604  		v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7605  		s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
  7606  		s.AddArg2(x, y)
  7607  		v0.AddArg2(x, s)
  7608  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7609  		v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
  7610  		v3.AddArg2(s, c)
  7611  		v2.AddArg2(s, v3)
  7612  		v.AddArg2(v0, v2)
  7613  		return true
  7614  	}
  7615  	// match: (Select1 <t> (Sub64borrow x y c))
  7616  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
  7617  	for {
  7618  		t := v.Type
  7619  		if v_0.Op != OpSub64borrow {
  7620  			break
  7621  		}
  7622  		c := v_0.Args[2]
  7623  		x := v_0.Args[0]
  7624  		y := v_0.Args[1]
  7625  		v.reset(OpMIPS64OR)
  7626  		v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7627  		s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  7628  		s.AddArg2(x, y)
  7629  		v0.AddArg2(s, x)
  7630  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
  7631  		v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  7632  		v3.AddArg2(s, c)
  7633  		v2.AddArg2(v3, s)
  7634  		v.AddArg2(v0, v2)
  7635  		return true
  7636  	}
  7637  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  7638  	// result: (NEGV x)
  7639  	for {
  7640  		if v_0.Op != OpMIPS64MULVU {
  7641  			break
  7642  		}
  7643  		_ = v_0.Args[1]
  7644  		v_0_0 := v_0.Args[0]
  7645  		v_0_1 := v_0.Args[1]
  7646  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7647  			x := v_0_0
  7648  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
  7649  				continue
  7650  			}
  7651  			v.reset(OpMIPS64NEGV)
  7652  			v.AddArg(x)
  7653  			return true
  7654  		}
  7655  		break
  7656  	}
  7657  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  7658  	// result: (MOVVconst [0])
  7659  	for {
  7660  		if v_0.Op != OpMIPS64MULVU {
  7661  			break
  7662  		}
  7663  		_ = v_0.Args[1]
  7664  		v_0_0 := v_0.Args[0]
  7665  		v_0_1 := v_0.Args[1]
  7666  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7667  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7668  				continue
  7669  			}
  7670  			v.reset(OpMIPS64MOVVconst)
  7671  			v.AuxInt = int64ToAuxInt(0)
  7672  			return true
  7673  		}
  7674  		break
  7675  	}
  7676  	// match: (Select1 (MULVU x (MOVVconst [1])))
  7677  	// result: x
  7678  	for {
  7679  		if v_0.Op != OpMIPS64MULVU {
  7680  			break
  7681  		}
  7682  		_ = v_0.Args[1]
  7683  		v_0_0 := v_0.Args[0]
  7684  		v_0_1 := v_0.Args[1]
  7685  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7686  			x := v_0_0
  7687  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7688  				continue
  7689  			}
  7690  			v.copyOf(x)
  7691  			return true
  7692  		}
  7693  		break
  7694  	}
  7695  	// match: (Select1 (MULVU x (MOVVconst [c])))
  7696  	// cond: isPowerOfTwo(c)
  7697  	// result: (SLLVconst [log64(c)] x)
  7698  	for {
  7699  		if v_0.Op != OpMIPS64MULVU {
  7700  			break
  7701  		}
  7702  		_ = v_0.Args[1]
  7703  		v_0_0 := v_0.Args[0]
  7704  		v_0_1 := v_0.Args[1]
  7705  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7706  			x := v_0_0
  7707  			if v_0_1.Op != OpMIPS64MOVVconst {
  7708  				continue
  7709  			}
  7710  			c := auxIntToInt64(v_0_1.AuxInt)
  7711  			if !(isPowerOfTwo(c)) {
  7712  				continue
  7713  			}
  7714  			v.reset(OpMIPS64SLLVconst)
  7715  			v.AuxInt = int64ToAuxInt(log64(c))
  7716  			v.AddArg(x)
  7717  			return true
  7718  		}
  7719  		break
  7720  	}
  7721  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  7722  	// result: x
  7723  	for {
  7724  		if v_0.Op != OpMIPS64DIVVU {
  7725  			break
  7726  		}
  7727  		_ = v_0.Args[1]
  7728  		x := v_0.Args[0]
  7729  		v_0_1 := v_0.Args[1]
  7730  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7731  			break
  7732  		}
  7733  		v.copyOf(x)
  7734  		return true
  7735  	}
  7736  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  7737  	// cond: isPowerOfTwo(c)
  7738  	// result: (SRLVconst [log64(c)] x)
  7739  	for {
  7740  		if v_0.Op != OpMIPS64DIVVU {
  7741  			break
  7742  		}
  7743  		_ = v_0.Args[1]
  7744  		x := v_0.Args[0]
  7745  		v_0_1 := v_0.Args[1]
  7746  		if v_0_1.Op != OpMIPS64MOVVconst {
  7747  			break
  7748  		}
  7749  		c := auxIntToInt64(v_0_1.AuxInt)
  7750  		if !(isPowerOfTwo(c)) {
  7751  			break
  7752  		}
  7753  		v.reset(OpMIPS64SRLVconst)
  7754  		v.AuxInt = int64ToAuxInt(log64(c))
  7755  		v.AddArg(x)
  7756  		return true
  7757  	}
  7758  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  7759  	// result: (MOVVconst [c*d])
  7760  	for {
  7761  		if v_0.Op != OpMIPS64MULVU {
  7762  			break
  7763  		}
  7764  		_ = v_0.Args[1]
  7765  		v_0_0 := v_0.Args[0]
  7766  		v_0_1 := v_0.Args[1]
  7767  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7768  			if v_0_0.Op != OpMIPS64MOVVconst {
  7769  				continue
  7770  			}
  7771  			c := auxIntToInt64(v_0_0.AuxInt)
  7772  			if v_0_1.Op != OpMIPS64MOVVconst {
  7773  				continue
  7774  			}
  7775  			d := auxIntToInt64(v_0_1.AuxInt)
  7776  			v.reset(OpMIPS64MOVVconst)
  7777  			v.AuxInt = int64ToAuxInt(c * d)
  7778  			return true
  7779  		}
  7780  		break
  7781  	}
  7782  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  7783  	// cond: d != 0
  7784  	// result: (MOVVconst [c/d])
  7785  	for {
  7786  		if v_0.Op != OpMIPS64DIVV {
  7787  			break
  7788  		}
  7789  		_ = v_0.Args[1]
  7790  		v_0_0 := v_0.Args[0]
  7791  		if v_0_0.Op != OpMIPS64MOVVconst {
  7792  			break
  7793  		}
  7794  		c := auxIntToInt64(v_0_0.AuxInt)
  7795  		v_0_1 := v_0.Args[1]
  7796  		if v_0_1.Op != OpMIPS64MOVVconst {
  7797  			break
  7798  		}
  7799  		d := auxIntToInt64(v_0_1.AuxInt)
  7800  		if !(d != 0) {
  7801  			break
  7802  		}
  7803  		v.reset(OpMIPS64MOVVconst)
  7804  		v.AuxInt = int64ToAuxInt(c / d)
  7805  		return true
  7806  	}
  7807  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7808  	// cond: d != 0
  7809  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  7810  	for {
  7811  		if v_0.Op != OpMIPS64DIVVU {
  7812  			break
  7813  		}
  7814  		_ = v_0.Args[1]
  7815  		v_0_0 := v_0.Args[0]
  7816  		if v_0_0.Op != OpMIPS64MOVVconst {
  7817  			break
  7818  		}
  7819  		c := auxIntToInt64(v_0_0.AuxInt)
  7820  		v_0_1 := v_0.Args[1]
  7821  		if v_0_1.Op != OpMIPS64MOVVconst {
  7822  			break
  7823  		}
  7824  		d := auxIntToInt64(v_0_1.AuxInt)
  7825  		if !(d != 0) {
  7826  			break
  7827  		}
  7828  		v.reset(OpMIPS64MOVVconst)
  7829  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  7830  		return true
  7831  	}
  7832  	return false
  7833  }
  7834  func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
  7835  	v_0 := v.Args[0]
  7836  	b := v.Block
  7837  	// match: (Slicemask <t> x)
  7838  	// result: (SRAVconst (NEGV <t> x) [63])
  7839  	for {
  7840  		t := v.Type
  7841  		x := v_0
  7842  		v.reset(OpMIPS64SRAVconst)
  7843  		v.AuxInt = int64ToAuxInt(63)
  7844  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7845  		v0.AddArg(x)
  7846  		v.AddArg(v0)
  7847  		return true
  7848  	}
  7849  }
  7850  func rewriteValueMIPS64_OpStore(v *Value) bool {
  7851  	v_2 := v.Args[2]
  7852  	v_1 := v.Args[1]
  7853  	v_0 := v.Args[0]
  7854  	// match: (Store {t} ptr val mem)
  7855  	// cond: t.Size() == 1
  7856  	// result: (MOVBstore ptr val mem)
  7857  	for {
  7858  		t := auxToType(v.Aux)
  7859  		ptr := v_0
  7860  		val := v_1
  7861  		mem := v_2
  7862  		if !(t.Size() == 1) {
  7863  			break
  7864  		}
  7865  		v.reset(OpMIPS64MOVBstore)
  7866  		v.AddArg3(ptr, val, mem)
  7867  		return true
  7868  	}
  7869  	// match: (Store {t} ptr val mem)
  7870  	// cond: t.Size() == 2
  7871  	// result: (MOVHstore ptr val mem)
  7872  	for {
  7873  		t := auxToType(v.Aux)
  7874  		ptr := v_0
  7875  		val := v_1
  7876  		mem := v_2
  7877  		if !(t.Size() == 2) {
  7878  			break
  7879  		}
  7880  		v.reset(OpMIPS64MOVHstore)
  7881  		v.AddArg3(ptr, val, mem)
  7882  		return true
  7883  	}
  7884  	// match: (Store {t} ptr val mem)
  7885  	// cond: t.Size() == 4 && !t.IsFloat()
  7886  	// result: (MOVWstore ptr val mem)
  7887  	for {
  7888  		t := auxToType(v.Aux)
  7889  		ptr := v_0
  7890  		val := v_1
  7891  		mem := v_2
  7892  		if !(t.Size() == 4 && !t.IsFloat()) {
  7893  			break
  7894  		}
  7895  		v.reset(OpMIPS64MOVWstore)
  7896  		v.AddArg3(ptr, val, mem)
  7897  		return true
  7898  	}
  7899  	// match: (Store {t} ptr val mem)
  7900  	// cond: t.Size() == 8 && !t.IsFloat()
  7901  	// result: (MOVVstore ptr val mem)
  7902  	for {
  7903  		t := auxToType(v.Aux)
  7904  		ptr := v_0
  7905  		val := v_1
  7906  		mem := v_2
  7907  		if !(t.Size() == 8 && !t.IsFloat()) {
  7908  			break
  7909  		}
  7910  		v.reset(OpMIPS64MOVVstore)
  7911  		v.AddArg3(ptr, val, mem)
  7912  		return true
  7913  	}
  7914  	// match: (Store {t} ptr val mem)
  7915  	// cond: t.Size() == 4 && t.IsFloat()
  7916  	// result: (MOVFstore ptr val mem)
  7917  	for {
  7918  		t := auxToType(v.Aux)
  7919  		ptr := v_0
  7920  		val := v_1
  7921  		mem := v_2
  7922  		if !(t.Size() == 4 && t.IsFloat()) {
  7923  			break
  7924  		}
  7925  		v.reset(OpMIPS64MOVFstore)
  7926  		v.AddArg3(ptr, val, mem)
  7927  		return true
  7928  	}
  7929  	// match: (Store {t} ptr val mem)
  7930  	// cond: t.Size() == 8 && t.IsFloat()
  7931  	// result: (MOVDstore ptr val mem)
  7932  	for {
  7933  		t := auxToType(v.Aux)
  7934  		ptr := v_0
  7935  		val := v_1
  7936  		mem := v_2
  7937  		if !(t.Size() == 8 && t.IsFloat()) {
  7938  			break
  7939  		}
  7940  		v.reset(OpMIPS64MOVDstore)
  7941  		v.AddArg3(ptr, val, mem)
  7942  		return true
  7943  	}
  7944  	return false
  7945  }
  7946  func rewriteValueMIPS64_OpZero(v *Value) bool {
  7947  	v_1 := v.Args[1]
  7948  	v_0 := v.Args[0]
  7949  	b := v.Block
  7950  	config := b.Func.Config
  7951  	typ := &b.Func.Config.Types
  7952  	// match: (Zero [0] _ mem)
  7953  	// result: mem
  7954  	for {
  7955  		if auxIntToInt64(v.AuxInt) != 0 {
  7956  			break
  7957  		}
  7958  		mem := v_1
  7959  		v.copyOf(mem)
  7960  		return true
  7961  	}
  7962  	// match: (Zero [1] ptr mem)
  7963  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  7964  	for {
  7965  		if auxIntToInt64(v.AuxInt) != 1 {
  7966  			break
  7967  		}
  7968  		ptr := v_0
  7969  		mem := v_1
  7970  		v.reset(OpMIPS64MOVBstore)
  7971  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7972  		v0.AuxInt = int64ToAuxInt(0)
  7973  		v.AddArg3(ptr, v0, mem)
  7974  		return true
  7975  	}
  7976  	// match: (Zero [2] {t} ptr mem)
  7977  	// cond: t.Alignment()%2 == 0
  7978  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  7979  	for {
  7980  		if auxIntToInt64(v.AuxInt) != 2 {
  7981  			break
  7982  		}
  7983  		t := auxToType(v.Aux)
  7984  		ptr := v_0
  7985  		mem := v_1
  7986  		if !(t.Alignment()%2 == 0) {
  7987  			break
  7988  		}
  7989  		v.reset(OpMIPS64MOVHstore)
  7990  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7991  		v0.AuxInt = int64ToAuxInt(0)
  7992  		v.AddArg3(ptr, v0, mem)
  7993  		return true
  7994  	}
  7995  	// match: (Zero [2] ptr mem)
  7996  	// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
  7997  	for {
  7998  		if auxIntToInt64(v.AuxInt) != 2 {
  7999  			break
  8000  		}
  8001  		ptr := v_0
  8002  		mem := v_1
  8003  		v.reset(OpMIPS64MOVBstore)
  8004  		v.AuxInt = int32ToAuxInt(1)
  8005  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8006  		v0.AuxInt = int64ToAuxInt(0)
  8007  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  8008  		v1.AuxInt = int32ToAuxInt(0)
  8009  		v1.AddArg3(ptr, v0, mem)
  8010  		v.AddArg3(ptr, v0, v1)
  8011  		return true
  8012  	}
  8013  	// match: (Zero [4] {t} ptr mem)
  8014  	// cond: t.Alignment()%4 == 0
  8015  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  8016  	for {
  8017  		if auxIntToInt64(v.AuxInt) != 4 {
  8018  			break
  8019  		}
  8020  		t := auxToType(v.Aux)
  8021  		ptr := v_0
  8022  		mem := v_1
  8023  		if !(t.Alignment()%4 == 0) {
  8024  			break
  8025  		}
  8026  		v.reset(OpMIPS64MOVWstore)
  8027  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8028  		v0.AuxInt = int64ToAuxInt(0)
  8029  		v.AddArg3(ptr, v0, mem)
  8030  		return true
  8031  	}
  8032  	// match: (Zero [4] {t} ptr mem)
  8033  	// cond: t.Alignment()%2 == 0
  8034  	// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
  8035  	for {
  8036  		if auxIntToInt64(v.AuxInt) != 4 {
  8037  			break
  8038  		}
  8039  		t := auxToType(v.Aux)
  8040  		ptr := v_0
  8041  		mem := v_1
  8042  		if !(t.Alignment()%2 == 0) {
  8043  			break
  8044  		}
  8045  		v.reset(OpMIPS64MOVHstore)
  8046  		v.AuxInt = int32ToAuxInt(2)
  8047  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8048  		v0.AuxInt = int64ToAuxInt(0)
  8049  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  8050  		v1.AuxInt = int32ToAuxInt(0)
  8051  		v1.AddArg3(ptr, v0, mem)
  8052  		v.AddArg3(ptr, v0, v1)
  8053  		return true
  8054  	}
  8055  	// match: (Zero [4] ptr mem)
  8056  	// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
  8057  	for {
  8058  		if auxIntToInt64(v.AuxInt) != 4 {
  8059  			break
  8060  		}
  8061  		ptr := v_0
  8062  		mem := v_1
  8063  		v.reset(OpMIPS64MOVBstore)
  8064  		v.AuxInt = int32ToAuxInt(3)
  8065  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8066  		v0.AuxInt = int64ToAuxInt(0)
  8067  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  8068  		v1.AuxInt = int32ToAuxInt(2)
  8069  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  8070  		v2.AuxInt = int32ToAuxInt(1)
  8071  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  8072  		v3.AuxInt = int32ToAuxInt(0)
  8073  		v3.AddArg3(ptr, v0, mem)
  8074  		v2.AddArg3(ptr, v0, v3)
  8075  		v1.AddArg3(ptr, v0, v2)
  8076  		v.AddArg3(ptr, v0, v1)
  8077  		return true
  8078  	}
  8079  	// match: (Zero [8] {t} ptr mem)
  8080  	// cond: t.Alignment()%8 == 0
  8081  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  8082  	for {
  8083  		if auxIntToInt64(v.AuxInt) != 8 {
  8084  			break
  8085  		}
  8086  		t := auxToType(v.Aux)
  8087  		ptr := v_0
  8088  		mem := v_1
  8089  		if !(t.Alignment()%8 == 0) {
  8090  			break
  8091  		}
  8092  		v.reset(OpMIPS64MOVVstore)
  8093  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8094  		v0.AuxInt = int64ToAuxInt(0)
  8095  		v.AddArg3(ptr, v0, mem)
  8096  		return true
  8097  	}
  8098  	// match: (Zero [8] {t} ptr mem)
  8099  	// cond: t.Alignment()%4 == 0
  8100  	// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
  8101  	for {
  8102  		if auxIntToInt64(v.AuxInt) != 8 {
  8103  			break
  8104  		}
  8105  		t := auxToType(v.Aux)
  8106  		ptr := v_0
  8107  		mem := v_1
  8108  		if !(t.Alignment()%4 == 0) {
  8109  			break
  8110  		}
  8111  		v.reset(OpMIPS64MOVWstore)
  8112  		v.AuxInt = int32ToAuxInt(4)
  8113  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8114  		v0.AuxInt = int64ToAuxInt(0)
  8115  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  8116  		v1.AuxInt = int32ToAuxInt(0)
  8117  		v1.AddArg3(ptr, v0, mem)
  8118  		v.AddArg3(ptr, v0, v1)
  8119  		return true
  8120  	}
  8121  	// match: (Zero [8] {t} ptr mem)
  8122  	// cond: t.Alignment()%2 == 0
  8123  	// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
  8124  	for {
  8125  		if auxIntToInt64(v.AuxInt) != 8 {
  8126  			break
  8127  		}
  8128  		t := auxToType(v.Aux)
  8129  		ptr := v_0
  8130  		mem := v_1
  8131  		if !(t.Alignment()%2 == 0) {
  8132  			break
  8133  		}
  8134  		v.reset(OpMIPS64MOVHstore)
  8135  		v.AuxInt = int32ToAuxInt(6)
  8136  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8137  		v0.AuxInt = int64ToAuxInt(0)
  8138  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  8139  		v1.AuxInt = int32ToAuxInt(4)
  8140  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  8141  		v2.AuxInt = int32ToAuxInt(2)
  8142  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  8143  		v3.AuxInt = int32ToAuxInt(0)
  8144  		v3.AddArg3(ptr, v0, mem)
  8145  		v2.AddArg3(ptr, v0, v3)
  8146  		v1.AddArg3(ptr, v0, v2)
  8147  		v.AddArg3(ptr, v0, v1)
  8148  		return true
  8149  	}
  8150  	// match: (Zero [3] ptr mem)
  8151  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
  8152  	for {
  8153  		if auxIntToInt64(v.AuxInt) != 3 {
  8154  			break
  8155  		}
  8156  		ptr := v_0
  8157  		mem := v_1
  8158  		v.reset(OpMIPS64MOVBstore)
  8159  		v.AuxInt = int32ToAuxInt(2)
  8160  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8161  		v0.AuxInt = int64ToAuxInt(0)
  8162  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  8163  		v1.AuxInt = int32ToAuxInt(1)
  8164  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  8165  		v2.AuxInt = int32ToAuxInt(0)
  8166  		v2.AddArg3(ptr, v0, mem)
  8167  		v1.AddArg3(ptr, v0, v2)
  8168  		v.AddArg3(ptr, v0, v1)
  8169  		return true
  8170  	}
  8171  	// match: (Zero [6] {t} ptr mem)
  8172  	// cond: t.Alignment()%2 == 0
  8173  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
  8174  	for {
  8175  		if auxIntToInt64(v.AuxInt) != 6 {
  8176  			break
  8177  		}
  8178  		t := auxToType(v.Aux)
  8179  		ptr := v_0
  8180  		mem := v_1
  8181  		if !(t.Alignment()%2 == 0) {
  8182  			break
  8183  		}
  8184  		v.reset(OpMIPS64MOVHstore)
  8185  		v.AuxInt = int32ToAuxInt(4)
  8186  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8187  		v0.AuxInt = int64ToAuxInt(0)
  8188  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  8189  		v1.AuxInt = int32ToAuxInt(2)
  8190  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  8191  		v2.AuxInt = int32ToAuxInt(0)
  8192  		v2.AddArg3(ptr, v0, mem)
  8193  		v1.AddArg3(ptr, v0, v2)
  8194  		v.AddArg3(ptr, v0, v1)
  8195  		return true
  8196  	}
  8197  	// match: (Zero [12] {t} ptr mem)
  8198  	// cond: t.Alignment()%4 == 0
  8199  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
  8200  	for {
  8201  		if auxIntToInt64(v.AuxInt) != 12 {
  8202  			break
  8203  		}
  8204  		t := auxToType(v.Aux)
  8205  		ptr := v_0
  8206  		mem := v_1
  8207  		if !(t.Alignment()%4 == 0) {
  8208  			break
  8209  		}
  8210  		v.reset(OpMIPS64MOVWstore)
  8211  		v.AuxInt = int32ToAuxInt(8)
  8212  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8213  		v0.AuxInt = int64ToAuxInt(0)
  8214  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  8215  		v1.AuxInt = int32ToAuxInt(4)
  8216  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  8217  		v2.AuxInt = int32ToAuxInt(0)
  8218  		v2.AddArg3(ptr, v0, mem)
  8219  		v1.AddArg3(ptr, v0, v2)
  8220  		v.AddArg3(ptr, v0, v1)
  8221  		return true
  8222  	}
  8223  	// match: (Zero [16] {t} ptr mem)
  8224  	// cond: t.Alignment()%8 == 0
  8225  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
  8226  	for {
  8227  		if auxIntToInt64(v.AuxInt) != 16 {
  8228  			break
  8229  		}
  8230  		t := auxToType(v.Aux)
  8231  		ptr := v_0
  8232  		mem := v_1
  8233  		if !(t.Alignment()%8 == 0) {
  8234  			break
  8235  		}
  8236  		v.reset(OpMIPS64MOVVstore)
  8237  		v.AuxInt = int32ToAuxInt(8)
  8238  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8239  		v0.AuxInt = int64ToAuxInt(0)
  8240  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  8241  		v1.AuxInt = int32ToAuxInt(0)
  8242  		v1.AddArg3(ptr, v0, mem)
  8243  		v.AddArg3(ptr, v0, v1)
  8244  		return true
  8245  	}
  8246  	// match: (Zero [24] {t} ptr mem)
  8247  	// cond: t.Alignment()%8 == 0
  8248  	// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
  8249  	for {
  8250  		if auxIntToInt64(v.AuxInt) != 24 {
  8251  			break
  8252  		}
  8253  		t := auxToType(v.Aux)
  8254  		ptr := v_0
  8255  		mem := v_1
  8256  		if !(t.Alignment()%8 == 0) {
  8257  			break
  8258  		}
  8259  		v.reset(OpMIPS64MOVVstore)
  8260  		v.AuxInt = int32ToAuxInt(16)
  8261  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8262  		v0.AuxInt = int64ToAuxInt(0)
  8263  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  8264  		v1.AuxInt = int32ToAuxInt(8)
  8265  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  8266  		v2.AuxInt = int32ToAuxInt(0)
  8267  		v2.AddArg3(ptr, v0, mem)
  8268  		v1.AddArg3(ptr, v0, v2)
  8269  		v.AddArg3(ptr, v0, v1)
  8270  		return true
  8271  	}
  8272  	// match: (Zero [s] {t} ptr mem)
  8273  	// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0
  8274  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  8275  	for {
  8276  		s := auxIntToInt64(v.AuxInt)
  8277  		t := auxToType(v.Aux)
  8278  		ptr := v_0
  8279  		mem := v_1
  8280  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0) {
  8281  			break
  8282  		}
  8283  		v.reset(OpMIPS64DUFFZERO)
  8284  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  8285  		v.AddArg2(ptr, mem)
  8286  		return true
  8287  	}
  8288  	// match: (Zero [s] {t} ptr mem)
  8289  	// cond: s > 8*128 || t.Alignment()%8 != 0
  8290  	// result: (LoweredZero [t.Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.Alignment(), config)]) mem)
  8291  	for {
  8292  		s := auxIntToInt64(v.AuxInt)
  8293  		t := auxToType(v.Aux)
  8294  		ptr := v_0
  8295  		mem := v_1
  8296  		if !(s > 8*128 || t.Alignment()%8 != 0) {
  8297  			break
  8298  		}
  8299  		v.reset(OpMIPS64LoweredZero)
  8300  		v.AuxInt = int64ToAuxInt(t.Alignment())
  8301  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
  8302  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  8303  		v0.AddArg(ptr)
  8304  		v.AddArg3(ptr, v0, mem)
  8305  		return true
  8306  	}
  8307  	return false
  8308  }
  8309  func rewriteBlockMIPS64(b *Block) bool {
  8310  	switch b.Kind {
  8311  	case BlockMIPS64EQ:
  8312  		// match: (EQ (FPFlagTrue cmp) yes no)
  8313  		// result: (FPF cmp yes no)
  8314  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  8315  			v_0 := b.Controls[0]
  8316  			cmp := v_0.Args[0]
  8317  			b.resetWithControl(BlockMIPS64FPF, cmp)
  8318  			return true
  8319  		}
  8320  		// match: (EQ (FPFlagFalse cmp) yes no)
  8321  		// result: (FPT cmp yes no)
  8322  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  8323  			v_0 := b.Controls[0]
  8324  			cmp := v_0.Args[0]
  8325  			b.resetWithControl(BlockMIPS64FPT, cmp)
  8326  			return true
  8327  		}
  8328  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  8329  		// result: (NE cmp yes no)
  8330  		for b.Controls[0].Op == OpMIPS64XORconst {
  8331  			v_0 := b.Controls[0]
  8332  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8333  				break
  8334  			}
  8335  			cmp := v_0.Args[0]
  8336  			if cmp.Op != OpMIPS64SGT {
  8337  				break
  8338  			}
  8339  			b.resetWithControl(BlockMIPS64NE, cmp)
  8340  			return true
  8341  		}
  8342  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  8343  		// result: (NE cmp yes no)
  8344  		for b.Controls[0].Op == OpMIPS64XORconst {
  8345  			v_0 := b.Controls[0]
  8346  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8347  				break
  8348  			}
  8349  			cmp := v_0.Args[0]
  8350  			if cmp.Op != OpMIPS64SGTU {
  8351  				break
  8352  			}
  8353  			b.resetWithControl(BlockMIPS64NE, cmp)
  8354  			return true
  8355  		}
  8356  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  8357  		// result: (NE cmp yes no)
  8358  		for b.Controls[0].Op == OpMIPS64XORconst {
  8359  			v_0 := b.Controls[0]
  8360  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8361  				break
  8362  			}
  8363  			cmp := v_0.Args[0]
  8364  			if cmp.Op != OpMIPS64SGTconst {
  8365  				break
  8366  			}
  8367  			b.resetWithControl(BlockMIPS64NE, cmp)
  8368  			return true
  8369  		}
  8370  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  8371  		// result: (NE cmp yes no)
  8372  		for b.Controls[0].Op == OpMIPS64XORconst {
  8373  			v_0 := b.Controls[0]
  8374  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8375  				break
  8376  			}
  8377  			cmp := v_0.Args[0]
  8378  			if cmp.Op != OpMIPS64SGTUconst {
  8379  				break
  8380  			}
  8381  			b.resetWithControl(BlockMIPS64NE, cmp)
  8382  			return true
  8383  		}
  8384  		// match: (EQ (SGTUconst [1] x) yes no)
  8385  		// result: (NE x yes no)
  8386  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  8387  			v_0 := b.Controls[0]
  8388  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8389  				break
  8390  			}
  8391  			x := v_0.Args[0]
  8392  			b.resetWithControl(BlockMIPS64NE, x)
  8393  			return true
  8394  		}
  8395  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  8396  		// result: (EQ x yes no)
  8397  		for b.Controls[0].Op == OpMIPS64SGTU {
  8398  			v_0 := b.Controls[0]
  8399  			_ = v_0.Args[1]
  8400  			x := v_0.Args[0]
  8401  			v_0_1 := v_0.Args[1]
  8402  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8403  				break
  8404  			}
  8405  			b.resetWithControl(BlockMIPS64EQ, x)
  8406  			return true
  8407  		}
  8408  		// match: (EQ (SGTconst [0] x) yes no)
  8409  		// result: (GEZ x yes no)
  8410  		for b.Controls[0].Op == OpMIPS64SGTconst {
  8411  			v_0 := b.Controls[0]
  8412  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8413  				break
  8414  			}
  8415  			x := v_0.Args[0]
  8416  			b.resetWithControl(BlockMIPS64GEZ, x)
  8417  			return true
  8418  		}
  8419  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  8420  		// result: (LEZ x yes no)
  8421  		for b.Controls[0].Op == OpMIPS64SGT {
  8422  			v_0 := b.Controls[0]
  8423  			_ = v_0.Args[1]
  8424  			x := v_0.Args[0]
  8425  			v_0_1 := v_0.Args[1]
  8426  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8427  				break
  8428  			}
  8429  			b.resetWithControl(BlockMIPS64LEZ, x)
  8430  			return true
  8431  		}
  8432  		// match: (EQ (MOVVconst [0]) yes no)
  8433  		// result: (First yes no)
  8434  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8435  			v_0 := b.Controls[0]
  8436  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8437  				break
  8438  			}
  8439  			b.Reset(BlockFirst)
  8440  			return true
  8441  		}
  8442  		// match: (EQ (MOVVconst [c]) yes no)
  8443  		// cond: c != 0
  8444  		// result: (First no yes)
  8445  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8446  			v_0 := b.Controls[0]
  8447  			c := auxIntToInt64(v_0.AuxInt)
  8448  			if !(c != 0) {
  8449  				break
  8450  			}
  8451  			b.Reset(BlockFirst)
  8452  			b.swapSuccessors()
  8453  			return true
  8454  		}
  8455  	case BlockMIPS64GEZ:
  8456  		// match: (GEZ (MOVVconst [c]) yes no)
  8457  		// cond: c >= 0
  8458  		// result: (First yes no)
  8459  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8460  			v_0 := b.Controls[0]
  8461  			c := auxIntToInt64(v_0.AuxInt)
  8462  			if !(c >= 0) {
  8463  				break
  8464  			}
  8465  			b.Reset(BlockFirst)
  8466  			return true
  8467  		}
  8468  		// match: (GEZ (MOVVconst [c]) yes no)
  8469  		// cond: c < 0
  8470  		// result: (First no yes)
  8471  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8472  			v_0 := b.Controls[0]
  8473  			c := auxIntToInt64(v_0.AuxInt)
  8474  			if !(c < 0) {
  8475  				break
  8476  			}
  8477  			b.Reset(BlockFirst)
  8478  			b.swapSuccessors()
  8479  			return true
  8480  		}
  8481  	case BlockMIPS64GTZ:
  8482  		// match: (GTZ (MOVVconst [c]) yes no)
  8483  		// cond: c > 0
  8484  		// result: (First yes no)
  8485  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8486  			v_0 := b.Controls[0]
  8487  			c := auxIntToInt64(v_0.AuxInt)
  8488  			if !(c > 0) {
  8489  				break
  8490  			}
  8491  			b.Reset(BlockFirst)
  8492  			return true
  8493  		}
  8494  		// match: (GTZ (MOVVconst [c]) yes no)
  8495  		// cond: c <= 0
  8496  		// result: (First no yes)
  8497  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8498  			v_0 := b.Controls[0]
  8499  			c := auxIntToInt64(v_0.AuxInt)
  8500  			if !(c <= 0) {
  8501  				break
  8502  			}
  8503  			b.Reset(BlockFirst)
  8504  			b.swapSuccessors()
  8505  			return true
  8506  		}
  8507  	case BlockIf:
  8508  		// match: (If cond yes no)
  8509  		// result: (NE cond yes no)
  8510  		for {
  8511  			cond := b.Controls[0]
  8512  			b.resetWithControl(BlockMIPS64NE, cond)
  8513  			return true
  8514  		}
  8515  	case BlockMIPS64LEZ:
  8516  		// match: (LEZ (MOVVconst [c]) yes no)
  8517  		// cond: c <= 0
  8518  		// result: (First yes no)
  8519  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8520  			v_0 := b.Controls[0]
  8521  			c := auxIntToInt64(v_0.AuxInt)
  8522  			if !(c <= 0) {
  8523  				break
  8524  			}
  8525  			b.Reset(BlockFirst)
  8526  			return true
  8527  		}
  8528  		// match: (LEZ (MOVVconst [c]) yes no)
  8529  		// cond: c > 0
  8530  		// result: (First no yes)
  8531  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8532  			v_0 := b.Controls[0]
  8533  			c := auxIntToInt64(v_0.AuxInt)
  8534  			if !(c > 0) {
  8535  				break
  8536  			}
  8537  			b.Reset(BlockFirst)
  8538  			b.swapSuccessors()
  8539  			return true
  8540  		}
  8541  	case BlockMIPS64LTZ:
  8542  		// match: (LTZ (MOVVconst [c]) yes no)
  8543  		// cond: c < 0
  8544  		// result: (First yes no)
  8545  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8546  			v_0 := b.Controls[0]
  8547  			c := auxIntToInt64(v_0.AuxInt)
  8548  			if !(c < 0) {
  8549  				break
  8550  			}
  8551  			b.Reset(BlockFirst)
  8552  			return true
  8553  		}
  8554  		// match: (LTZ (MOVVconst [c]) yes no)
  8555  		// cond: c >= 0
  8556  		// result: (First no yes)
  8557  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8558  			v_0 := b.Controls[0]
  8559  			c := auxIntToInt64(v_0.AuxInt)
  8560  			if !(c >= 0) {
  8561  				break
  8562  			}
  8563  			b.Reset(BlockFirst)
  8564  			b.swapSuccessors()
  8565  			return true
  8566  		}
  8567  	case BlockMIPS64NE:
  8568  		// match: (NE (FPFlagTrue cmp) yes no)
  8569  		// result: (FPT cmp yes no)
  8570  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  8571  			v_0 := b.Controls[0]
  8572  			cmp := v_0.Args[0]
  8573  			b.resetWithControl(BlockMIPS64FPT, cmp)
  8574  			return true
  8575  		}
  8576  		// match: (NE (FPFlagFalse cmp) yes no)
  8577  		// result: (FPF cmp yes no)
  8578  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  8579  			v_0 := b.Controls[0]
  8580  			cmp := v_0.Args[0]
  8581  			b.resetWithControl(BlockMIPS64FPF, cmp)
  8582  			return true
  8583  		}
  8584  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  8585  		// result: (EQ cmp yes no)
  8586  		for b.Controls[0].Op == OpMIPS64XORconst {
  8587  			v_0 := b.Controls[0]
  8588  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8589  				break
  8590  			}
  8591  			cmp := v_0.Args[0]
  8592  			if cmp.Op != OpMIPS64SGT {
  8593  				break
  8594  			}
  8595  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8596  			return true
  8597  		}
  8598  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  8599  		// result: (EQ cmp yes no)
  8600  		for b.Controls[0].Op == OpMIPS64XORconst {
  8601  			v_0 := b.Controls[0]
  8602  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8603  				break
  8604  			}
  8605  			cmp := v_0.Args[0]
  8606  			if cmp.Op != OpMIPS64SGTU {
  8607  				break
  8608  			}
  8609  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8610  			return true
  8611  		}
  8612  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  8613  		// result: (EQ cmp yes no)
  8614  		for b.Controls[0].Op == OpMIPS64XORconst {
  8615  			v_0 := b.Controls[0]
  8616  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8617  				break
  8618  			}
  8619  			cmp := v_0.Args[0]
  8620  			if cmp.Op != OpMIPS64SGTconst {
  8621  				break
  8622  			}
  8623  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8624  			return true
  8625  		}
  8626  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  8627  		// result: (EQ cmp yes no)
  8628  		for b.Controls[0].Op == OpMIPS64XORconst {
  8629  			v_0 := b.Controls[0]
  8630  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8631  				break
  8632  			}
  8633  			cmp := v_0.Args[0]
  8634  			if cmp.Op != OpMIPS64SGTUconst {
  8635  				break
  8636  			}
  8637  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8638  			return true
  8639  		}
  8640  		// match: (NE (SGTUconst [1] x) yes no)
  8641  		// result: (EQ x yes no)
  8642  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  8643  			v_0 := b.Controls[0]
  8644  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8645  				break
  8646  			}
  8647  			x := v_0.Args[0]
  8648  			b.resetWithControl(BlockMIPS64EQ, x)
  8649  			return true
  8650  		}
  8651  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
  8652  		// result: (NE x yes no)
  8653  		for b.Controls[0].Op == OpMIPS64SGTU {
  8654  			v_0 := b.Controls[0]
  8655  			_ = v_0.Args[1]
  8656  			x := v_0.Args[0]
  8657  			v_0_1 := v_0.Args[1]
  8658  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8659  				break
  8660  			}
  8661  			b.resetWithControl(BlockMIPS64NE, x)
  8662  			return true
  8663  		}
  8664  		// match: (NE (SGTconst [0] x) yes no)
  8665  		// result: (LTZ x yes no)
  8666  		for b.Controls[0].Op == OpMIPS64SGTconst {
  8667  			v_0 := b.Controls[0]
  8668  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8669  				break
  8670  			}
  8671  			x := v_0.Args[0]
  8672  			b.resetWithControl(BlockMIPS64LTZ, x)
  8673  			return true
  8674  		}
  8675  		// match: (NE (SGT x (MOVVconst [0])) yes no)
  8676  		// result: (GTZ x yes no)
  8677  		for b.Controls[0].Op == OpMIPS64SGT {
  8678  			v_0 := b.Controls[0]
  8679  			_ = v_0.Args[1]
  8680  			x := v_0.Args[0]
  8681  			v_0_1 := v_0.Args[1]
  8682  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8683  				break
  8684  			}
  8685  			b.resetWithControl(BlockMIPS64GTZ, x)
  8686  			return true
  8687  		}
  8688  		// match: (NE (MOVVconst [0]) yes no)
  8689  		// result: (First no yes)
  8690  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8691  			v_0 := b.Controls[0]
  8692  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8693  				break
  8694  			}
  8695  			b.Reset(BlockFirst)
  8696  			b.swapSuccessors()
  8697  			return true
  8698  		}
  8699  		// match: (NE (MOVVconst [c]) yes no)
  8700  		// cond: c != 0
  8701  		// result: (First yes no)
  8702  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8703  			v_0 := b.Controls[0]
  8704  			c := auxIntToInt64(v_0.AuxInt)
  8705  			if !(c != 0) {
  8706  				break
  8707  			}
  8708  			b.Reset(BlockFirst)
  8709  			return true
  8710  		}
  8711  	}
  8712  	return false
  8713  }
  8714  

View as plain text