1
2
3 package ssa
4
5 import "cmd/compile/internal/types"
6
7 func rewriteValueMIPS64(v *Value) bool {
8 switch v.Op {
9 case OpAbs:
10 v.Op = OpMIPS64ABSD
11 return true
12 case OpAdd16:
13 v.Op = OpMIPS64ADDV
14 return true
15 case OpAdd32:
16 v.Op = OpMIPS64ADDV
17 return true
18 case OpAdd32F:
19 v.Op = OpMIPS64ADDF
20 return true
21 case OpAdd64:
22 v.Op = OpMIPS64ADDV
23 return true
24 case OpAdd64F:
25 v.Op = OpMIPS64ADDD
26 return true
27 case OpAdd8:
28 v.Op = OpMIPS64ADDV
29 return true
30 case OpAddPtr:
31 v.Op = OpMIPS64ADDV
32 return true
33 case OpAddr:
34 return rewriteValueMIPS64_OpAddr(v)
35 case OpAnd16:
36 v.Op = OpMIPS64AND
37 return true
38 case OpAnd32:
39 v.Op = OpMIPS64AND
40 return true
41 case OpAnd64:
42 v.Op = OpMIPS64AND
43 return true
44 case OpAnd8:
45 v.Op = OpMIPS64AND
46 return true
47 case OpAndB:
48 v.Op = OpMIPS64AND
49 return true
50 case OpAtomicAdd32:
51 v.Op = OpMIPS64LoweredAtomicAdd32
52 return true
53 case OpAtomicAdd64:
54 v.Op = OpMIPS64LoweredAtomicAdd64
55 return true
56 case OpAtomicAnd32:
57 v.Op = OpMIPS64LoweredAtomicAnd32
58 return true
59 case OpAtomicAnd8:
60 return rewriteValueMIPS64_OpAtomicAnd8(v)
61 case OpAtomicCompareAndSwap32:
62 return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
63 case OpAtomicCompareAndSwap64:
64 v.Op = OpMIPS64LoweredAtomicCas64
65 return true
66 case OpAtomicExchange32:
67 v.Op = OpMIPS64LoweredAtomicExchange32
68 return true
69 case OpAtomicExchange64:
70 v.Op = OpMIPS64LoweredAtomicExchange64
71 return true
72 case OpAtomicLoad32:
73 v.Op = OpMIPS64LoweredAtomicLoad32
74 return true
75 case OpAtomicLoad64:
76 v.Op = OpMIPS64LoweredAtomicLoad64
77 return true
78 case OpAtomicLoad8:
79 v.Op = OpMIPS64LoweredAtomicLoad8
80 return true
81 case OpAtomicLoadPtr:
82 v.Op = OpMIPS64LoweredAtomicLoad64
83 return true
84 case OpAtomicOr32:
85 v.Op = OpMIPS64LoweredAtomicOr32
86 return true
87 case OpAtomicOr8:
88 return rewriteValueMIPS64_OpAtomicOr8(v)
89 case OpAtomicStore32:
90 v.Op = OpMIPS64LoweredAtomicStore32
91 return true
92 case OpAtomicStore64:
93 v.Op = OpMIPS64LoweredAtomicStore64
94 return true
95 case OpAtomicStore8:
96 v.Op = OpMIPS64LoweredAtomicStore8
97 return true
98 case OpAtomicStorePtrNoWB:
99 v.Op = OpMIPS64LoweredAtomicStore64
100 return true
101 case OpAvg64u:
102 return rewriteValueMIPS64_OpAvg64u(v)
103 case OpClosureCall:
104 v.Op = OpMIPS64CALLclosure
105 return true
106 case OpCom16:
107 return rewriteValueMIPS64_OpCom16(v)
108 case OpCom32:
109 return rewriteValueMIPS64_OpCom32(v)
110 case OpCom64:
111 return rewriteValueMIPS64_OpCom64(v)
112 case OpCom8:
113 return rewriteValueMIPS64_OpCom8(v)
114 case OpConst16:
115 return rewriteValueMIPS64_OpConst16(v)
116 case OpConst32:
117 return rewriteValueMIPS64_OpConst32(v)
118 case OpConst32F:
119 return rewriteValueMIPS64_OpConst32F(v)
120 case OpConst64:
121 return rewriteValueMIPS64_OpConst64(v)
122 case OpConst64F:
123 return rewriteValueMIPS64_OpConst64F(v)
124 case OpConst8:
125 return rewriteValueMIPS64_OpConst8(v)
126 case OpConstBool:
127 return rewriteValueMIPS64_OpConstBool(v)
128 case OpConstNil:
129 return rewriteValueMIPS64_OpConstNil(v)
130 case OpCvt32Fto32:
131 v.Op = OpMIPS64TRUNCFW
132 return true
133 case OpCvt32Fto64:
134 v.Op = OpMIPS64TRUNCFV
135 return true
136 case OpCvt32Fto64F:
137 v.Op = OpMIPS64MOVFD
138 return true
139 case OpCvt32to32F:
140 v.Op = OpMIPS64MOVWF
141 return true
142 case OpCvt32to64F:
143 v.Op = OpMIPS64MOVWD
144 return true
145 case OpCvt64Fto32:
146 v.Op = OpMIPS64TRUNCDW
147 return true
148 case OpCvt64Fto32F:
149 v.Op = OpMIPS64MOVDF
150 return true
151 case OpCvt64Fto64:
152 v.Op = OpMIPS64TRUNCDV
153 return true
154 case OpCvt64to32F:
155 v.Op = OpMIPS64MOVVF
156 return true
157 case OpCvt64to64F:
158 v.Op = OpMIPS64MOVVD
159 return true
160 case OpCvtBoolToUint8:
161 v.Op = OpCopy
162 return true
163 case OpDiv16:
164 return rewriteValueMIPS64_OpDiv16(v)
165 case OpDiv16u:
166 return rewriteValueMIPS64_OpDiv16u(v)
167 case OpDiv32:
168 return rewriteValueMIPS64_OpDiv32(v)
169 case OpDiv32F:
170 v.Op = OpMIPS64DIVF
171 return true
172 case OpDiv32u:
173 return rewriteValueMIPS64_OpDiv32u(v)
174 case OpDiv64:
175 return rewriteValueMIPS64_OpDiv64(v)
176 case OpDiv64F:
177 v.Op = OpMIPS64DIVD
178 return true
179 case OpDiv64u:
180 return rewriteValueMIPS64_OpDiv64u(v)
181 case OpDiv8:
182 return rewriteValueMIPS64_OpDiv8(v)
183 case OpDiv8u:
184 return rewriteValueMIPS64_OpDiv8u(v)
185 case OpEq16:
186 return rewriteValueMIPS64_OpEq16(v)
187 case OpEq32:
188 return rewriteValueMIPS64_OpEq32(v)
189 case OpEq32F:
190 return rewriteValueMIPS64_OpEq32F(v)
191 case OpEq64:
192 return rewriteValueMIPS64_OpEq64(v)
193 case OpEq64F:
194 return rewriteValueMIPS64_OpEq64F(v)
195 case OpEq8:
196 return rewriteValueMIPS64_OpEq8(v)
197 case OpEqB:
198 return rewriteValueMIPS64_OpEqB(v)
199 case OpEqPtr:
200 return rewriteValueMIPS64_OpEqPtr(v)
201 case OpGetCallerPC:
202 v.Op = OpMIPS64LoweredGetCallerPC
203 return true
204 case OpGetCallerSP:
205 v.Op = OpMIPS64LoweredGetCallerSP
206 return true
207 case OpGetClosurePtr:
208 v.Op = OpMIPS64LoweredGetClosurePtr
209 return true
210 case OpHmul32:
211 return rewriteValueMIPS64_OpHmul32(v)
212 case OpHmul32u:
213 return rewriteValueMIPS64_OpHmul32u(v)
214 case OpHmul64:
215 return rewriteValueMIPS64_OpHmul64(v)
216 case OpHmul64u:
217 return rewriteValueMIPS64_OpHmul64u(v)
218 case OpInterCall:
219 v.Op = OpMIPS64CALLinter
220 return true
221 case OpIsInBounds:
222 return rewriteValueMIPS64_OpIsInBounds(v)
223 case OpIsNonNil:
224 return rewriteValueMIPS64_OpIsNonNil(v)
225 case OpIsSliceInBounds:
226 return rewriteValueMIPS64_OpIsSliceInBounds(v)
227 case OpLeq16:
228 return rewriteValueMIPS64_OpLeq16(v)
229 case OpLeq16U:
230 return rewriteValueMIPS64_OpLeq16U(v)
231 case OpLeq32:
232 return rewriteValueMIPS64_OpLeq32(v)
233 case OpLeq32F:
234 return rewriteValueMIPS64_OpLeq32F(v)
235 case OpLeq32U:
236 return rewriteValueMIPS64_OpLeq32U(v)
237 case OpLeq64:
238 return rewriteValueMIPS64_OpLeq64(v)
239 case OpLeq64F:
240 return rewriteValueMIPS64_OpLeq64F(v)
241 case OpLeq64U:
242 return rewriteValueMIPS64_OpLeq64U(v)
243 case OpLeq8:
244 return rewriteValueMIPS64_OpLeq8(v)
245 case OpLeq8U:
246 return rewriteValueMIPS64_OpLeq8U(v)
247 case OpLess16:
248 return rewriteValueMIPS64_OpLess16(v)
249 case OpLess16U:
250 return rewriteValueMIPS64_OpLess16U(v)
251 case OpLess32:
252 return rewriteValueMIPS64_OpLess32(v)
253 case OpLess32F:
254 return rewriteValueMIPS64_OpLess32F(v)
255 case OpLess32U:
256 return rewriteValueMIPS64_OpLess32U(v)
257 case OpLess64:
258 return rewriteValueMIPS64_OpLess64(v)
259 case OpLess64F:
260 return rewriteValueMIPS64_OpLess64F(v)
261 case OpLess64U:
262 return rewriteValueMIPS64_OpLess64U(v)
263 case OpLess8:
264 return rewriteValueMIPS64_OpLess8(v)
265 case OpLess8U:
266 return rewriteValueMIPS64_OpLess8U(v)
267 case OpLoad:
268 return rewriteValueMIPS64_OpLoad(v)
269 case OpLocalAddr:
270 return rewriteValueMIPS64_OpLocalAddr(v)
271 case OpLsh16x16:
272 return rewriteValueMIPS64_OpLsh16x16(v)
273 case OpLsh16x32:
274 return rewriteValueMIPS64_OpLsh16x32(v)
275 case OpLsh16x64:
276 return rewriteValueMIPS64_OpLsh16x64(v)
277 case OpLsh16x8:
278 return rewriteValueMIPS64_OpLsh16x8(v)
279 case OpLsh32x16:
280 return rewriteValueMIPS64_OpLsh32x16(v)
281 case OpLsh32x32:
282 return rewriteValueMIPS64_OpLsh32x32(v)
283 case OpLsh32x64:
284 return rewriteValueMIPS64_OpLsh32x64(v)
285 case OpLsh32x8:
286 return rewriteValueMIPS64_OpLsh32x8(v)
287 case OpLsh64x16:
288 return rewriteValueMIPS64_OpLsh64x16(v)
289 case OpLsh64x32:
290 return rewriteValueMIPS64_OpLsh64x32(v)
291 case OpLsh64x64:
292 return rewriteValueMIPS64_OpLsh64x64(v)
293 case OpLsh64x8:
294 return rewriteValueMIPS64_OpLsh64x8(v)
295 case OpLsh8x16:
296 return rewriteValueMIPS64_OpLsh8x16(v)
297 case OpLsh8x32:
298 return rewriteValueMIPS64_OpLsh8x32(v)
299 case OpLsh8x64:
300 return rewriteValueMIPS64_OpLsh8x64(v)
301 case OpLsh8x8:
302 return rewriteValueMIPS64_OpLsh8x8(v)
303 case OpMIPS64ADDV:
304 return rewriteValueMIPS64_OpMIPS64ADDV(v)
305 case OpMIPS64ADDVconst:
306 return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
307 case OpMIPS64AND:
308 return rewriteValueMIPS64_OpMIPS64AND(v)
309 case OpMIPS64ANDconst:
310 return rewriteValueMIPS64_OpMIPS64ANDconst(v)
311 case OpMIPS64LoweredAtomicAdd32:
312 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
313 case OpMIPS64LoweredAtomicAdd64:
314 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
315 case OpMIPS64LoweredAtomicStore32:
316 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
317 case OpMIPS64LoweredAtomicStore64:
318 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
319 case OpMIPS64LoweredPanicBoundsCR:
320 return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v)
321 case OpMIPS64LoweredPanicBoundsRC:
322 return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v)
323 case OpMIPS64LoweredPanicBoundsRR:
324 return rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v)
325 case OpMIPS64MOVBUload:
326 return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
327 case OpMIPS64MOVBUreg:
328 return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
329 case OpMIPS64MOVBload:
330 return rewriteValueMIPS64_OpMIPS64MOVBload(v)
331 case OpMIPS64MOVBreg:
332 return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
333 case OpMIPS64MOVBstore:
334 return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
335 case OpMIPS64MOVBstorezero:
336 return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
337 case OpMIPS64MOVDload:
338 return rewriteValueMIPS64_OpMIPS64MOVDload(v)
339 case OpMIPS64MOVDstore:
340 return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
341 case OpMIPS64MOVFload:
342 return rewriteValueMIPS64_OpMIPS64MOVFload(v)
343 case OpMIPS64MOVFstore:
344 return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
345 case OpMIPS64MOVHUload:
346 return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
347 case OpMIPS64MOVHUreg:
348 return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
349 case OpMIPS64MOVHload:
350 return rewriteValueMIPS64_OpMIPS64MOVHload(v)
351 case OpMIPS64MOVHreg:
352 return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
353 case OpMIPS64MOVHstore:
354 return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
355 case OpMIPS64MOVHstorezero:
356 return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
357 case OpMIPS64MOVVload:
358 return rewriteValueMIPS64_OpMIPS64MOVVload(v)
359 case OpMIPS64MOVVnop:
360 return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
361 case OpMIPS64MOVVreg:
362 return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
363 case OpMIPS64MOVVstore:
364 return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
365 case OpMIPS64MOVVstorezero:
366 return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
367 case OpMIPS64MOVWUload:
368 return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
369 case OpMIPS64MOVWUreg:
370 return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
371 case OpMIPS64MOVWload:
372 return rewriteValueMIPS64_OpMIPS64MOVWload(v)
373 case OpMIPS64MOVWreg:
374 return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
375 case OpMIPS64MOVWstore:
376 return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
377 case OpMIPS64MOVWstorezero:
378 return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
379 case OpMIPS64NEGV:
380 return rewriteValueMIPS64_OpMIPS64NEGV(v)
381 case OpMIPS64NOR:
382 return rewriteValueMIPS64_OpMIPS64NOR(v)
383 case OpMIPS64NORconst:
384 return rewriteValueMIPS64_OpMIPS64NORconst(v)
385 case OpMIPS64OR:
386 return rewriteValueMIPS64_OpMIPS64OR(v)
387 case OpMIPS64ORconst:
388 return rewriteValueMIPS64_OpMIPS64ORconst(v)
389 case OpMIPS64SGT:
390 return rewriteValueMIPS64_OpMIPS64SGT(v)
391 case OpMIPS64SGTU:
392 return rewriteValueMIPS64_OpMIPS64SGTU(v)
393 case OpMIPS64SGTUconst:
394 return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
395 case OpMIPS64SGTconst:
396 return rewriteValueMIPS64_OpMIPS64SGTconst(v)
397 case OpMIPS64SLLV:
398 return rewriteValueMIPS64_OpMIPS64SLLV(v)
399 case OpMIPS64SLLVconst:
400 return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
401 case OpMIPS64SRAV:
402 return rewriteValueMIPS64_OpMIPS64SRAV(v)
403 case OpMIPS64SRAVconst:
404 return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
405 case OpMIPS64SRLV:
406 return rewriteValueMIPS64_OpMIPS64SRLV(v)
407 case OpMIPS64SRLVconst:
408 return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
409 case OpMIPS64SUBV:
410 return rewriteValueMIPS64_OpMIPS64SUBV(v)
411 case OpMIPS64SUBVconst:
412 return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
413 case OpMIPS64XOR:
414 return rewriteValueMIPS64_OpMIPS64XOR(v)
415 case OpMIPS64XORconst:
416 return rewriteValueMIPS64_OpMIPS64XORconst(v)
417 case OpMod16:
418 return rewriteValueMIPS64_OpMod16(v)
419 case OpMod16u:
420 return rewriteValueMIPS64_OpMod16u(v)
421 case OpMod32:
422 return rewriteValueMIPS64_OpMod32(v)
423 case OpMod32u:
424 return rewriteValueMIPS64_OpMod32u(v)
425 case OpMod64:
426 return rewriteValueMIPS64_OpMod64(v)
427 case OpMod64u:
428 return rewriteValueMIPS64_OpMod64u(v)
429 case OpMod8:
430 return rewriteValueMIPS64_OpMod8(v)
431 case OpMod8u:
432 return rewriteValueMIPS64_OpMod8u(v)
433 case OpMove:
434 return rewriteValueMIPS64_OpMove(v)
435 case OpMul16:
436 return rewriteValueMIPS64_OpMul16(v)
437 case OpMul32:
438 return rewriteValueMIPS64_OpMul32(v)
439 case OpMul32F:
440 v.Op = OpMIPS64MULF
441 return true
442 case OpMul64:
443 return rewriteValueMIPS64_OpMul64(v)
444 case OpMul64F:
445 v.Op = OpMIPS64MULD
446 return true
447 case OpMul64uhilo:
448 v.Op = OpMIPS64MULVU
449 return true
450 case OpMul8:
451 return rewriteValueMIPS64_OpMul8(v)
452 case OpNeg16:
453 v.Op = OpMIPS64NEGV
454 return true
455 case OpNeg32:
456 v.Op = OpMIPS64NEGV
457 return true
458 case OpNeg32F:
459 v.Op = OpMIPS64NEGF
460 return true
461 case OpNeg64:
462 v.Op = OpMIPS64NEGV
463 return true
464 case OpNeg64F:
465 v.Op = OpMIPS64NEGD
466 return true
467 case OpNeg8:
468 v.Op = OpMIPS64NEGV
469 return true
470 case OpNeq16:
471 return rewriteValueMIPS64_OpNeq16(v)
472 case OpNeq32:
473 return rewriteValueMIPS64_OpNeq32(v)
474 case OpNeq32F:
475 return rewriteValueMIPS64_OpNeq32F(v)
476 case OpNeq64:
477 return rewriteValueMIPS64_OpNeq64(v)
478 case OpNeq64F:
479 return rewriteValueMIPS64_OpNeq64F(v)
480 case OpNeq8:
481 return rewriteValueMIPS64_OpNeq8(v)
482 case OpNeqB:
483 v.Op = OpMIPS64XOR
484 return true
485 case OpNeqPtr:
486 return rewriteValueMIPS64_OpNeqPtr(v)
487 case OpNilCheck:
488 v.Op = OpMIPS64LoweredNilCheck
489 return true
490 case OpNot:
491 return rewriteValueMIPS64_OpNot(v)
492 case OpOffPtr:
493 return rewriteValueMIPS64_OpOffPtr(v)
494 case OpOr16:
495 v.Op = OpMIPS64OR
496 return true
497 case OpOr32:
498 v.Op = OpMIPS64OR
499 return true
500 case OpOr64:
501 v.Op = OpMIPS64OR
502 return true
503 case OpOr8:
504 v.Op = OpMIPS64OR
505 return true
506 case OpOrB:
507 v.Op = OpMIPS64OR
508 return true
509 case OpPanicBounds:
510 v.Op = OpMIPS64LoweredPanicBoundsRR
511 return true
512 case OpPubBarrier:
513 v.Op = OpMIPS64LoweredPubBarrier
514 return true
515 case OpRotateLeft16:
516 return rewriteValueMIPS64_OpRotateLeft16(v)
517 case OpRotateLeft32:
518 return rewriteValueMIPS64_OpRotateLeft32(v)
519 case OpRotateLeft64:
520 return rewriteValueMIPS64_OpRotateLeft64(v)
521 case OpRotateLeft8:
522 return rewriteValueMIPS64_OpRotateLeft8(v)
523 case OpRound32F:
524 v.Op = OpCopy
525 return true
526 case OpRound64F:
527 v.Op = OpCopy
528 return true
529 case OpRsh16Ux16:
530 return rewriteValueMIPS64_OpRsh16Ux16(v)
531 case OpRsh16Ux32:
532 return rewriteValueMIPS64_OpRsh16Ux32(v)
533 case OpRsh16Ux64:
534 return rewriteValueMIPS64_OpRsh16Ux64(v)
535 case OpRsh16Ux8:
536 return rewriteValueMIPS64_OpRsh16Ux8(v)
537 case OpRsh16x16:
538 return rewriteValueMIPS64_OpRsh16x16(v)
539 case OpRsh16x32:
540 return rewriteValueMIPS64_OpRsh16x32(v)
541 case OpRsh16x64:
542 return rewriteValueMIPS64_OpRsh16x64(v)
543 case OpRsh16x8:
544 return rewriteValueMIPS64_OpRsh16x8(v)
545 case OpRsh32Ux16:
546 return rewriteValueMIPS64_OpRsh32Ux16(v)
547 case OpRsh32Ux32:
548 return rewriteValueMIPS64_OpRsh32Ux32(v)
549 case OpRsh32Ux64:
550 return rewriteValueMIPS64_OpRsh32Ux64(v)
551 case OpRsh32Ux8:
552 return rewriteValueMIPS64_OpRsh32Ux8(v)
553 case OpRsh32x16:
554 return rewriteValueMIPS64_OpRsh32x16(v)
555 case OpRsh32x32:
556 return rewriteValueMIPS64_OpRsh32x32(v)
557 case OpRsh32x64:
558 return rewriteValueMIPS64_OpRsh32x64(v)
559 case OpRsh32x8:
560 return rewriteValueMIPS64_OpRsh32x8(v)
561 case OpRsh64Ux16:
562 return rewriteValueMIPS64_OpRsh64Ux16(v)
563 case OpRsh64Ux32:
564 return rewriteValueMIPS64_OpRsh64Ux32(v)
565 case OpRsh64Ux64:
566 return rewriteValueMIPS64_OpRsh64Ux64(v)
567 case OpRsh64Ux8:
568 return rewriteValueMIPS64_OpRsh64Ux8(v)
569 case OpRsh64x16:
570 return rewriteValueMIPS64_OpRsh64x16(v)
571 case OpRsh64x32:
572 return rewriteValueMIPS64_OpRsh64x32(v)
573 case OpRsh64x64:
574 return rewriteValueMIPS64_OpRsh64x64(v)
575 case OpRsh64x8:
576 return rewriteValueMIPS64_OpRsh64x8(v)
577 case OpRsh8Ux16:
578 return rewriteValueMIPS64_OpRsh8Ux16(v)
579 case OpRsh8Ux32:
580 return rewriteValueMIPS64_OpRsh8Ux32(v)
581 case OpRsh8Ux64:
582 return rewriteValueMIPS64_OpRsh8Ux64(v)
583 case OpRsh8Ux8:
584 return rewriteValueMIPS64_OpRsh8Ux8(v)
585 case OpRsh8x16:
586 return rewriteValueMIPS64_OpRsh8x16(v)
587 case OpRsh8x32:
588 return rewriteValueMIPS64_OpRsh8x32(v)
589 case OpRsh8x64:
590 return rewriteValueMIPS64_OpRsh8x64(v)
591 case OpRsh8x8:
592 return rewriteValueMIPS64_OpRsh8x8(v)
593 case OpSelect0:
594 return rewriteValueMIPS64_OpSelect0(v)
595 case OpSelect1:
596 return rewriteValueMIPS64_OpSelect1(v)
597 case OpSignExt16to32:
598 v.Op = OpMIPS64MOVHreg
599 return true
600 case OpSignExt16to64:
601 v.Op = OpMIPS64MOVHreg
602 return true
603 case OpSignExt32to64:
604 v.Op = OpMIPS64MOVWreg
605 return true
606 case OpSignExt8to16:
607 v.Op = OpMIPS64MOVBreg
608 return true
609 case OpSignExt8to32:
610 v.Op = OpMIPS64MOVBreg
611 return true
612 case OpSignExt8to64:
613 v.Op = OpMIPS64MOVBreg
614 return true
615 case OpSlicemask:
616 return rewriteValueMIPS64_OpSlicemask(v)
617 case OpSqrt:
618 v.Op = OpMIPS64SQRTD
619 return true
620 case OpSqrt32:
621 v.Op = OpMIPS64SQRTF
622 return true
623 case OpStaticCall:
624 v.Op = OpMIPS64CALLstatic
625 return true
626 case OpStore:
627 return rewriteValueMIPS64_OpStore(v)
628 case OpSub16:
629 v.Op = OpMIPS64SUBV
630 return true
631 case OpSub32:
632 v.Op = OpMIPS64SUBV
633 return true
634 case OpSub32F:
635 v.Op = OpMIPS64SUBF
636 return true
637 case OpSub64:
638 v.Op = OpMIPS64SUBV
639 return true
640 case OpSub64F:
641 v.Op = OpMIPS64SUBD
642 return true
643 case OpSub8:
644 v.Op = OpMIPS64SUBV
645 return true
646 case OpSubPtr:
647 v.Op = OpMIPS64SUBV
648 return true
649 case OpTailCall:
650 v.Op = OpMIPS64CALLtail
651 return true
652 case OpTrunc16to8:
653 v.Op = OpCopy
654 return true
655 case OpTrunc32to16:
656 v.Op = OpCopy
657 return true
658 case OpTrunc32to8:
659 v.Op = OpCopy
660 return true
661 case OpTrunc64to16:
662 v.Op = OpCopy
663 return true
664 case OpTrunc64to32:
665 v.Op = OpCopy
666 return true
667 case OpTrunc64to8:
668 v.Op = OpCopy
669 return true
670 case OpWB:
671 v.Op = OpMIPS64LoweredWB
672 return true
673 case OpXor16:
674 v.Op = OpMIPS64XOR
675 return true
676 case OpXor32:
677 v.Op = OpMIPS64XOR
678 return true
679 case OpXor64:
680 v.Op = OpMIPS64XOR
681 return true
682 case OpXor8:
683 v.Op = OpMIPS64XOR
684 return true
685 case OpZero:
686 return rewriteValueMIPS64_OpZero(v)
687 case OpZeroExt16to32:
688 v.Op = OpMIPS64MOVHUreg
689 return true
690 case OpZeroExt16to64:
691 v.Op = OpMIPS64MOVHUreg
692 return true
693 case OpZeroExt32to64:
694 v.Op = OpMIPS64MOVWUreg
695 return true
696 case OpZeroExt8to16:
697 v.Op = OpMIPS64MOVBUreg
698 return true
699 case OpZeroExt8to32:
700 v.Op = OpMIPS64MOVBUreg
701 return true
702 case OpZeroExt8to64:
703 v.Op = OpMIPS64MOVBUreg
704 return true
705 }
706 return false
707 }
708 func rewriteValueMIPS64_OpAddr(v *Value) bool {
709 v_0 := v.Args[0]
710
711
712 for {
713 sym := auxToSym(v.Aux)
714 base := v_0
715 v.reset(OpMIPS64MOVVaddr)
716 v.Aux = symToAux(sym)
717 v.AddArg(base)
718 return true
719 }
720 }
721 func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
722 v_2 := v.Args[2]
723 v_1 := v.Args[1]
724 v_0 := v.Args[0]
725 b := v.Block
726 config := b.Func.Config
727 typ := &b.Func.Config.Types
728
729
730
731 for {
732 ptr := v_0
733 val := v_1
734 mem := v_2
735 if !(!config.BigEndian) {
736 break
737 }
738 v.reset(OpMIPS64LoweredAtomicAnd32)
739 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
740 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
741 v1.AuxInt = int64ToAuxInt(^3)
742 v0.AddArg2(v1, ptr)
743 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
744 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
745 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
746 v4.AddArg(val)
747 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
748 v5.AuxInt = int64ToAuxInt(3)
749 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
750 v6.AuxInt = int64ToAuxInt(3)
751 v6.AddArg(ptr)
752 v5.AddArg(v6)
753 v3.AddArg2(v4, v5)
754 v7 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
755 v7.AuxInt = int64ToAuxInt(0)
756 v8 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
757 v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
758 v9.AuxInt = int64ToAuxInt(0xff)
759 v8.AddArg2(v9, v5)
760 v7.AddArg(v8)
761 v2.AddArg2(v3, v7)
762 v.AddArg3(v0, v2, mem)
763 return true
764 }
765
766
767
768 for {
769 ptr := v_0
770 val := v_1
771 mem := v_2
772 if !(config.BigEndian) {
773 break
774 }
775 v.reset(OpMIPS64LoweredAtomicAnd32)
776 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
777 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
778 v1.AuxInt = int64ToAuxInt(^3)
779 v0.AddArg2(v1, ptr)
780 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
781 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
782 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
783 v4.AddArg(val)
784 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
785 v5.AuxInt = int64ToAuxInt(3)
786 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
787 v6.AuxInt = int64ToAuxInt(3)
788 v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
789 v7.AuxInt = int64ToAuxInt(3)
790 v7.AddArg(ptr)
791 v6.AddArg(v7)
792 v5.AddArg(v6)
793 v3.AddArg2(v4, v5)
794 v8 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
795 v8.AuxInt = int64ToAuxInt(0)
796 v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
797 v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
798 v10.AuxInt = int64ToAuxInt(0xff)
799 v9.AddArg2(v10, v5)
800 v8.AddArg(v9)
801 v2.AddArg2(v3, v8)
802 v.AddArg3(v0, v2, mem)
803 return true
804 }
805 return false
806 }
807 func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
808 v_3 := v.Args[3]
809 v_2 := v.Args[2]
810 v_1 := v.Args[1]
811 v_0 := v.Args[0]
812 b := v.Block
813 typ := &b.Func.Config.Types
814
815
816 for {
817 ptr := v_0
818 old := v_1
819 new := v_2
820 mem := v_3
821 v.reset(OpMIPS64LoweredAtomicCas32)
822 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
823 v0.AddArg(old)
824 v.AddArg4(ptr, v0, new, mem)
825 return true
826 }
827 }
828 func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
829 v_2 := v.Args[2]
830 v_1 := v.Args[1]
831 v_0 := v.Args[0]
832 b := v.Block
833 config := b.Func.Config
834 typ := &b.Func.Config.Types
835
836
837
838 for {
839 ptr := v_0
840 val := v_1
841 mem := v_2
842 if !(!config.BigEndian) {
843 break
844 }
845 v.reset(OpMIPS64LoweredAtomicOr32)
846 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
847 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
848 v1.AuxInt = int64ToAuxInt(^3)
849 v0.AddArg2(v1, ptr)
850 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
851 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
852 v3.AddArg(val)
853 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
854 v4.AuxInt = int64ToAuxInt(3)
855 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
856 v5.AuxInt = int64ToAuxInt(3)
857 v5.AddArg(ptr)
858 v4.AddArg(v5)
859 v2.AddArg2(v3, v4)
860 v.AddArg3(v0, v2, mem)
861 return true
862 }
863
864
865
866 for {
867 ptr := v_0
868 val := v_1
869 mem := v_2
870 if !(config.BigEndian) {
871 break
872 }
873 v.reset(OpMIPS64LoweredAtomicOr32)
874 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
875 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
876 v1.AuxInt = int64ToAuxInt(^3)
877 v0.AddArg2(v1, ptr)
878 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
879 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
880 v3.AddArg(val)
881 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
882 v4.AuxInt = int64ToAuxInt(3)
883 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
884 v5.AuxInt = int64ToAuxInt(3)
885 v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
886 v6.AuxInt = int64ToAuxInt(3)
887 v6.AddArg(ptr)
888 v5.AddArg(v6)
889 v4.AddArg(v5)
890 v2.AddArg2(v3, v4)
891 v.AddArg3(v0, v2, mem)
892 return true
893 }
894 return false
895 }
896 func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
897 v_1 := v.Args[1]
898 v_0 := v.Args[0]
899 b := v.Block
900
901
902 for {
903 t := v.Type
904 x := v_0
905 y := v_1
906 v.reset(OpMIPS64ADDV)
907 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
908 v0.AuxInt = int64ToAuxInt(1)
909 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
910 v1.AddArg2(x, y)
911 v0.AddArg(v1)
912 v.AddArg2(v0, y)
913 return true
914 }
915 }
916 func rewriteValueMIPS64_OpCom16(v *Value) bool {
917 v_0 := v.Args[0]
918 b := v.Block
919 typ := &b.Func.Config.Types
920
921
922 for {
923 x := v_0
924 v.reset(OpMIPS64NOR)
925 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
926 v0.AuxInt = int64ToAuxInt(0)
927 v.AddArg2(v0, x)
928 return true
929 }
930 }
931 func rewriteValueMIPS64_OpCom32(v *Value) bool {
932 v_0 := v.Args[0]
933 b := v.Block
934 typ := &b.Func.Config.Types
935
936
937 for {
938 x := v_0
939 v.reset(OpMIPS64NOR)
940 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
941 v0.AuxInt = int64ToAuxInt(0)
942 v.AddArg2(v0, x)
943 return true
944 }
945 }
946 func rewriteValueMIPS64_OpCom64(v *Value) bool {
947 v_0 := v.Args[0]
948 b := v.Block
949 typ := &b.Func.Config.Types
950
951
952 for {
953 x := v_0
954 v.reset(OpMIPS64NOR)
955 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
956 v0.AuxInt = int64ToAuxInt(0)
957 v.AddArg2(v0, x)
958 return true
959 }
960 }
961 func rewriteValueMIPS64_OpCom8(v *Value) bool {
962 v_0 := v.Args[0]
963 b := v.Block
964 typ := &b.Func.Config.Types
965
966
967 for {
968 x := v_0
969 v.reset(OpMIPS64NOR)
970 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
971 v0.AuxInt = int64ToAuxInt(0)
972 v.AddArg2(v0, x)
973 return true
974 }
975 }
976 func rewriteValueMIPS64_OpConst16(v *Value) bool {
977
978
979 for {
980 val := auxIntToInt16(v.AuxInt)
981 v.reset(OpMIPS64MOVVconst)
982 v.AuxInt = int64ToAuxInt(int64(val))
983 return true
984 }
985 }
986 func rewriteValueMIPS64_OpConst32(v *Value) bool {
987
988
989 for {
990 val := auxIntToInt32(v.AuxInt)
991 v.reset(OpMIPS64MOVVconst)
992 v.AuxInt = int64ToAuxInt(int64(val))
993 return true
994 }
995 }
996 func rewriteValueMIPS64_OpConst32F(v *Value) bool {
997
998
999 for {
1000 val := auxIntToFloat32(v.AuxInt)
1001 v.reset(OpMIPS64MOVFconst)
1002 v.AuxInt = float64ToAuxInt(float64(val))
1003 return true
1004 }
1005 }
1006 func rewriteValueMIPS64_OpConst64(v *Value) bool {
1007
1008
1009 for {
1010 val := auxIntToInt64(v.AuxInt)
1011 v.reset(OpMIPS64MOVVconst)
1012 v.AuxInt = int64ToAuxInt(int64(val))
1013 return true
1014 }
1015 }
1016 func rewriteValueMIPS64_OpConst64F(v *Value) bool {
1017
1018
1019 for {
1020 val := auxIntToFloat64(v.AuxInt)
1021 v.reset(OpMIPS64MOVDconst)
1022 v.AuxInt = float64ToAuxInt(float64(val))
1023 return true
1024 }
1025 }
1026 func rewriteValueMIPS64_OpConst8(v *Value) bool {
1027
1028
1029 for {
1030 val := auxIntToInt8(v.AuxInt)
1031 v.reset(OpMIPS64MOVVconst)
1032 v.AuxInt = int64ToAuxInt(int64(val))
1033 return true
1034 }
1035 }
1036 func rewriteValueMIPS64_OpConstBool(v *Value) bool {
1037
1038
1039 for {
1040 t := auxIntToBool(v.AuxInt)
1041 v.reset(OpMIPS64MOVVconst)
1042 v.AuxInt = int64ToAuxInt(int64(b2i(t)))
1043 return true
1044 }
1045 }
1046 func rewriteValueMIPS64_OpConstNil(v *Value) bool {
1047
1048
1049 for {
1050 v.reset(OpMIPS64MOVVconst)
1051 v.AuxInt = int64ToAuxInt(0)
1052 return true
1053 }
1054 }
1055 func rewriteValueMIPS64_OpDiv16(v *Value) bool {
1056 v_1 := v.Args[1]
1057 v_0 := v.Args[0]
1058 b := v.Block
1059 typ := &b.Func.Config.Types
1060
1061
1062 for {
1063 x := v_0
1064 y := v_1
1065 v.reset(OpSelect1)
1066 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1067 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1068 v1.AddArg(x)
1069 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1070 v2.AddArg(y)
1071 v0.AddArg2(v1, v2)
1072 v.AddArg(v0)
1073 return true
1074 }
1075 }
1076 func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
1077 v_1 := v.Args[1]
1078 v_0 := v.Args[0]
1079 b := v.Block
1080 typ := &b.Func.Config.Types
1081
1082
1083 for {
1084 x := v_0
1085 y := v_1
1086 v.reset(OpSelect1)
1087 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1088 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1089 v1.AddArg(x)
1090 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1091 v2.AddArg(y)
1092 v0.AddArg2(v1, v2)
1093 v.AddArg(v0)
1094 return true
1095 }
1096 }
1097 func rewriteValueMIPS64_OpDiv32(v *Value) bool {
1098 v_1 := v.Args[1]
1099 v_0 := v.Args[0]
1100 b := v.Block
1101 typ := &b.Func.Config.Types
1102
1103
1104 for {
1105 x := v_0
1106 y := v_1
1107 v.reset(OpSelect1)
1108 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1109 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1110 v1.AddArg(x)
1111 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1112 v2.AddArg(y)
1113 v0.AddArg2(v1, v2)
1114 v.AddArg(v0)
1115 return true
1116 }
1117 }
1118 func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
1119 v_1 := v.Args[1]
1120 v_0 := v.Args[0]
1121 b := v.Block
1122 typ := &b.Func.Config.Types
1123
1124
1125 for {
1126 x := v_0
1127 y := v_1
1128 v.reset(OpSelect1)
1129 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1130 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1131 v1.AddArg(x)
1132 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1133 v2.AddArg(y)
1134 v0.AddArg2(v1, v2)
1135 v.AddArg(v0)
1136 return true
1137 }
1138 }
1139 func rewriteValueMIPS64_OpDiv64(v *Value) bool {
1140 v_1 := v.Args[1]
1141 v_0 := v.Args[0]
1142 b := v.Block
1143 typ := &b.Func.Config.Types
1144
1145
1146 for {
1147 x := v_0
1148 y := v_1
1149 v.reset(OpSelect1)
1150 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1151 v0.AddArg2(x, y)
1152 v.AddArg(v0)
1153 return true
1154 }
1155 }
1156 func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
1157 v_1 := v.Args[1]
1158 v_0 := v.Args[0]
1159 b := v.Block
1160 typ := &b.Func.Config.Types
1161
1162
1163 for {
1164 x := v_0
1165 y := v_1
1166 v.reset(OpSelect1)
1167 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1168 v0.AddArg2(x, y)
1169 v.AddArg(v0)
1170 return true
1171 }
1172 }
1173 func rewriteValueMIPS64_OpDiv8(v *Value) bool {
1174 v_1 := v.Args[1]
1175 v_0 := v.Args[0]
1176 b := v.Block
1177 typ := &b.Func.Config.Types
1178
1179
1180 for {
1181 x := v_0
1182 y := v_1
1183 v.reset(OpSelect1)
1184 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1185 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1186 v1.AddArg(x)
1187 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1188 v2.AddArg(y)
1189 v0.AddArg2(v1, v2)
1190 v.AddArg(v0)
1191 return true
1192 }
1193 }
1194 func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
1195 v_1 := v.Args[1]
1196 v_0 := v.Args[0]
1197 b := v.Block
1198 typ := &b.Func.Config.Types
1199
1200
1201 for {
1202 x := v_0
1203 y := v_1
1204 v.reset(OpSelect1)
1205 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1206 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1207 v1.AddArg(x)
1208 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1209 v2.AddArg(y)
1210 v0.AddArg2(v1, v2)
1211 v.AddArg(v0)
1212 return true
1213 }
1214 }
1215 func rewriteValueMIPS64_OpEq16(v *Value) bool {
1216 v_1 := v.Args[1]
1217 v_0 := v.Args[0]
1218 b := v.Block
1219 typ := &b.Func.Config.Types
1220
1221
1222 for {
1223 x := v_0
1224 y := v_1
1225 v.reset(OpMIPS64SGTU)
1226 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1227 v0.AuxInt = int64ToAuxInt(1)
1228 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1229 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1230 v2.AddArg(x)
1231 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1232 v3.AddArg(y)
1233 v1.AddArg2(v2, v3)
1234 v.AddArg2(v0, v1)
1235 return true
1236 }
1237 }
1238 func rewriteValueMIPS64_OpEq32(v *Value) bool {
1239 v_1 := v.Args[1]
1240 v_0 := v.Args[0]
1241 b := v.Block
1242 typ := &b.Func.Config.Types
1243
1244
1245 for {
1246 x := v_0
1247 y := v_1
1248 v.reset(OpMIPS64SGTU)
1249 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1250 v0.AuxInt = int64ToAuxInt(1)
1251 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1252 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1253 v2.AddArg(x)
1254 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1255 v3.AddArg(y)
1256 v1.AddArg2(v2, v3)
1257 v.AddArg2(v0, v1)
1258 return true
1259 }
1260 }
1261 func rewriteValueMIPS64_OpEq32F(v *Value) bool {
1262 v_1 := v.Args[1]
1263 v_0 := v.Args[0]
1264 b := v.Block
1265
1266
1267 for {
1268 x := v_0
1269 y := v_1
1270 v.reset(OpMIPS64FPFlagTrue)
1271 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
1272 v0.AddArg2(x, y)
1273 v.AddArg(v0)
1274 return true
1275 }
1276 }
1277 func rewriteValueMIPS64_OpEq64(v *Value) bool {
1278 v_1 := v.Args[1]
1279 v_0 := v.Args[0]
1280 b := v.Block
1281 typ := &b.Func.Config.Types
1282
1283
1284 for {
1285 x := v_0
1286 y := v_1
1287 v.reset(OpMIPS64SGTU)
1288 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1289 v0.AuxInt = int64ToAuxInt(1)
1290 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1291 v1.AddArg2(x, y)
1292 v.AddArg2(v0, v1)
1293 return true
1294 }
1295 }
1296 func rewriteValueMIPS64_OpEq64F(v *Value) bool {
1297 v_1 := v.Args[1]
1298 v_0 := v.Args[0]
1299 b := v.Block
1300
1301
1302 for {
1303 x := v_0
1304 y := v_1
1305 v.reset(OpMIPS64FPFlagTrue)
1306 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
1307 v0.AddArg2(x, y)
1308 v.AddArg(v0)
1309 return true
1310 }
1311 }
1312 func rewriteValueMIPS64_OpEq8(v *Value) bool {
1313 v_1 := v.Args[1]
1314 v_0 := v.Args[0]
1315 b := v.Block
1316 typ := &b.Func.Config.Types
1317
1318
1319 for {
1320 x := v_0
1321 y := v_1
1322 v.reset(OpMIPS64SGTU)
1323 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1324 v0.AuxInt = int64ToAuxInt(1)
1325 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1326 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1327 v2.AddArg(x)
1328 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1329 v3.AddArg(y)
1330 v1.AddArg2(v2, v3)
1331 v.AddArg2(v0, v1)
1332 return true
1333 }
1334 }
1335 func rewriteValueMIPS64_OpEqB(v *Value) bool {
1336 v_1 := v.Args[1]
1337 v_0 := v.Args[0]
1338 b := v.Block
1339 typ := &b.Func.Config.Types
1340
1341
1342 for {
1343 x := v_0
1344 y := v_1
1345 v.reset(OpMIPS64XOR)
1346 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1347 v0.AuxInt = int64ToAuxInt(1)
1348 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
1349 v1.AddArg2(x, y)
1350 v.AddArg2(v0, v1)
1351 return true
1352 }
1353 }
1354 func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
1355 v_1 := v.Args[1]
1356 v_0 := v.Args[0]
1357 b := v.Block
1358 typ := &b.Func.Config.Types
1359
1360
1361 for {
1362 x := v_0
1363 y := v_1
1364 v.reset(OpMIPS64SGTU)
1365 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1366 v0.AuxInt = int64ToAuxInt(1)
1367 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1368 v1.AddArg2(x, y)
1369 v.AddArg2(v0, v1)
1370 return true
1371 }
1372 }
1373 func rewriteValueMIPS64_OpHmul32(v *Value) bool {
1374 v_1 := v.Args[1]
1375 v_0 := v.Args[0]
1376 b := v.Block
1377 typ := &b.Func.Config.Types
1378
1379
1380 for {
1381 x := v_0
1382 y := v_1
1383 v.reset(OpMIPS64SRAVconst)
1384 v.AuxInt = int64ToAuxInt(32)
1385 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
1386 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1387 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1388 v2.AddArg(x)
1389 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1390 v3.AddArg(y)
1391 v1.AddArg2(v2, v3)
1392 v0.AddArg(v1)
1393 v.AddArg(v0)
1394 return true
1395 }
1396 }
1397 func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
1398 v_1 := v.Args[1]
1399 v_0 := v.Args[0]
1400 b := v.Block
1401 typ := &b.Func.Config.Types
1402
1403
1404 for {
1405 x := v_0
1406 y := v_1
1407 v.reset(OpMIPS64SRLVconst)
1408 v.AuxInt = int64ToAuxInt(32)
1409 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
1410 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1411 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1412 v2.AddArg(x)
1413 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1414 v3.AddArg(y)
1415 v1.AddArg2(v2, v3)
1416 v0.AddArg(v1)
1417 v.AddArg(v0)
1418 return true
1419 }
1420 }
1421 func rewriteValueMIPS64_OpHmul64(v *Value) bool {
1422 v_1 := v.Args[1]
1423 v_0 := v.Args[0]
1424 b := v.Block
1425 typ := &b.Func.Config.Types
1426
1427
1428 for {
1429 x := v_0
1430 y := v_1
1431 v.reset(OpSelect0)
1432 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1433 v0.AddArg2(x, y)
1434 v.AddArg(v0)
1435 return true
1436 }
1437 }
1438 func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
1439 v_1 := v.Args[1]
1440 v_0 := v.Args[0]
1441 b := v.Block
1442 typ := &b.Func.Config.Types
1443
1444
1445 for {
1446 x := v_0
1447 y := v_1
1448 v.reset(OpSelect0)
1449 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1450 v0.AddArg2(x, y)
1451 v.AddArg(v0)
1452 return true
1453 }
1454 }
1455 func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
1456 v_1 := v.Args[1]
1457 v_0 := v.Args[0]
1458
1459
1460 for {
1461 idx := v_0
1462 len := v_1
1463 v.reset(OpMIPS64SGTU)
1464 v.AddArg2(len, idx)
1465 return true
1466 }
1467 }
1468 func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
1469 v_0 := v.Args[0]
1470 b := v.Block
1471 typ := &b.Func.Config.Types
1472
1473
1474 for {
1475 ptr := v_0
1476 v.reset(OpMIPS64SGTU)
1477 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1478 v0.AuxInt = int64ToAuxInt(0)
1479 v.AddArg2(ptr, v0)
1480 return true
1481 }
1482 }
1483 func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
1484 v_1 := v.Args[1]
1485 v_0 := v.Args[0]
1486 b := v.Block
1487 typ := &b.Func.Config.Types
1488
1489
1490 for {
1491 idx := v_0
1492 len := v_1
1493 v.reset(OpMIPS64XOR)
1494 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1495 v0.AuxInt = int64ToAuxInt(1)
1496 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1497 v1.AddArg2(idx, len)
1498 v.AddArg2(v0, v1)
1499 return true
1500 }
1501 }
1502 func rewriteValueMIPS64_OpLeq16(v *Value) bool {
1503 v_1 := v.Args[1]
1504 v_0 := v.Args[0]
1505 b := v.Block
1506 typ := &b.Func.Config.Types
1507
1508
1509 for {
1510 x := v_0
1511 y := v_1
1512 v.reset(OpMIPS64XOR)
1513 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1514 v0.AuxInt = int64ToAuxInt(1)
1515 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1516 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1517 v2.AddArg(x)
1518 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1519 v3.AddArg(y)
1520 v1.AddArg2(v2, v3)
1521 v.AddArg2(v0, v1)
1522 return true
1523 }
1524 }
1525 func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
1526 v_1 := v.Args[1]
1527 v_0 := v.Args[0]
1528 b := v.Block
1529 typ := &b.Func.Config.Types
1530
1531
1532 for {
1533 x := v_0
1534 y := v_1
1535 v.reset(OpMIPS64XOR)
1536 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1537 v0.AuxInt = int64ToAuxInt(1)
1538 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1539 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1540 v2.AddArg(x)
1541 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1542 v3.AddArg(y)
1543 v1.AddArg2(v2, v3)
1544 v.AddArg2(v0, v1)
1545 return true
1546 }
1547 }
1548 func rewriteValueMIPS64_OpLeq32(v *Value) bool {
1549 v_1 := v.Args[1]
1550 v_0 := v.Args[0]
1551 b := v.Block
1552 typ := &b.Func.Config.Types
1553
1554
1555 for {
1556 x := v_0
1557 y := v_1
1558 v.reset(OpMIPS64XOR)
1559 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1560 v0.AuxInt = int64ToAuxInt(1)
1561 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1562 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1563 v2.AddArg(x)
1564 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1565 v3.AddArg(y)
1566 v1.AddArg2(v2, v3)
1567 v.AddArg2(v0, v1)
1568 return true
1569 }
1570 }
1571 func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
1572 v_1 := v.Args[1]
1573 v_0 := v.Args[0]
1574 b := v.Block
1575
1576
1577 for {
1578 x := v_0
1579 y := v_1
1580 v.reset(OpMIPS64FPFlagTrue)
1581 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
1582 v0.AddArg2(y, x)
1583 v.AddArg(v0)
1584 return true
1585 }
1586 }
1587 func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
1588 v_1 := v.Args[1]
1589 v_0 := v.Args[0]
1590 b := v.Block
1591 typ := &b.Func.Config.Types
1592
1593
1594 for {
1595 x := v_0
1596 y := v_1
1597 v.reset(OpMIPS64XOR)
1598 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1599 v0.AuxInt = int64ToAuxInt(1)
1600 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1601 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1602 v2.AddArg(x)
1603 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1604 v3.AddArg(y)
1605 v1.AddArg2(v2, v3)
1606 v.AddArg2(v0, v1)
1607 return true
1608 }
1609 }
1610 func rewriteValueMIPS64_OpLeq64(v *Value) bool {
1611 v_1 := v.Args[1]
1612 v_0 := v.Args[0]
1613 b := v.Block
1614 typ := &b.Func.Config.Types
1615
1616
1617 for {
1618 x := v_0
1619 y := v_1
1620 v.reset(OpMIPS64XOR)
1621 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1622 v0.AuxInt = int64ToAuxInt(1)
1623 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1624 v1.AddArg2(x, y)
1625 v.AddArg2(v0, v1)
1626 return true
1627 }
1628 }
1629 func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
1630 v_1 := v.Args[1]
1631 v_0 := v.Args[0]
1632 b := v.Block
1633
1634
1635 for {
1636 x := v_0
1637 y := v_1
1638 v.reset(OpMIPS64FPFlagTrue)
1639 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
1640 v0.AddArg2(y, x)
1641 v.AddArg(v0)
1642 return true
1643 }
1644 }
1645 func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
1646 v_1 := v.Args[1]
1647 v_0 := v.Args[0]
1648 b := v.Block
1649 typ := &b.Func.Config.Types
1650
1651
1652 for {
1653 x := v_0
1654 y := v_1
1655 v.reset(OpMIPS64XOR)
1656 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1657 v0.AuxInt = int64ToAuxInt(1)
1658 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1659 v1.AddArg2(x, y)
1660 v.AddArg2(v0, v1)
1661 return true
1662 }
1663 }
1664 func rewriteValueMIPS64_OpLeq8(v *Value) bool {
1665 v_1 := v.Args[1]
1666 v_0 := v.Args[0]
1667 b := v.Block
1668 typ := &b.Func.Config.Types
1669
1670
1671 for {
1672 x := v_0
1673 y := v_1
1674 v.reset(OpMIPS64XOR)
1675 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1676 v0.AuxInt = int64ToAuxInt(1)
1677 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1678 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1679 v2.AddArg(x)
1680 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1681 v3.AddArg(y)
1682 v1.AddArg2(v2, v3)
1683 v.AddArg2(v0, v1)
1684 return true
1685 }
1686 }
1687 func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
1688 v_1 := v.Args[1]
1689 v_0 := v.Args[0]
1690 b := v.Block
1691 typ := &b.Func.Config.Types
1692
1693
1694 for {
1695 x := v_0
1696 y := v_1
1697 v.reset(OpMIPS64XOR)
1698 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1699 v0.AuxInt = int64ToAuxInt(1)
1700 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1701 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1702 v2.AddArg(x)
1703 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1704 v3.AddArg(y)
1705 v1.AddArg2(v2, v3)
1706 v.AddArg2(v0, v1)
1707 return true
1708 }
1709 }
1710 func rewriteValueMIPS64_OpLess16(v *Value) bool {
1711 v_1 := v.Args[1]
1712 v_0 := v.Args[0]
1713 b := v.Block
1714 typ := &b.Func.Config.Types
1715
1716
1717 for {
1718 x := v_0
1719 y := v_1
1720 v.reset(OpMIPS64SGT)
1721 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1722 v0.AddArg(y)
1723 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1724 v1.AddArg(x)
1725 v.AddArg2(v0, v1)
1726 return true
1727 }
1728 }
1729 func rewriteValueMIPS64_OpLess16U(v *Value) bool {
1730 v_1 := v.Args[1]
1731 v_0 := v.Args[0]
1732 b := v.Block
1733 typ := &b.Func.Config.Types
1734
1735
1736 for {
1737 x := v_0
1738 y := v_1
1739 v.reset(OpMIPS64SGTU)
1740 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1741 v0.AddArg(y)
1742 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1743 v1.AddArg(x)
1744 v.AddArg2(v0, v1)
1745 return true
1746 }
1747 }
1748 func rewriteValueMIPS64_OpLess32(v *Value) bool {
1749 v_1 := v.Args[1]
1750 v_0 := v.Args[0]
1751 b := v.Block
1752 typ := &b.Func.Config.Types
1753
1754
1755 for {
1756 x := v_0
1757 y := v_1
1758 v.reset(OpMIPS64SGT)
1759 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1760 v0.AddArg(y)
1761 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1762 v1.AddArg(x)
1763 v.AddArg2(v0, v1)
1764 return true
1765 }
1766 }
1767 func rewriteValueMIPS64_OpLess32F(v *Value) bool {
1768 v_1 := v.Args[1]
1769 v_0 := v.Args[0]
1770 b := v.Block
1771
1772
1773 for {
1774 x := v_0
1775 y := v_1
1776 v.reset(OpMIPS64FPFlagTrue)
1777 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
1778 v0.AddArg2(y, x)
1779 v.AddArg(v0)
1780 return true
1781 }
1782 }
1783 func rewriteValueMIPS64_OpLess32U(v *Value) bool {
1784 v_1 := v.Args[1]
1785 v_0 := v.Args[0]
1786 b := v.Block
1787 typ := &b.Func.Config.Types
1788
1789
1790 for {
1791 x := v_0
1792 y := v_1
1793 v.reset(OpMIPS64SGTU)
1794 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1795 v0.AddArg(y)
1796 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1797 v1.AddArg(x)
1798 v.AddArg2(v0, v1)
1799 return true
1800 }
1801 }
1802 func rewriteValueMIPS64_OpLess64(v *Value) bool {
1803 v_1 := v.Args[1]
1804 v_0 := v.Args[0]
1805
1806
1807 for {
1808 x := v_0
1809 y := v_1
1810 v.reset(OpMIPS64SGT)
1811 v.AddArg2(y, x)
1812 return true
1813 }
1814 }
1815 func rewriteValueMIPS64_OpLess64F(v *Value) bool {
1816 v_1 := v.Args[1]
1817 v_0 := v.Args[0]
1818 b := v.Block
1819
1820
1821 for {
1822 x := v_0
1823 y := v_1
1824 v.reset(OpMIPS64FPFlagTrue)
1825 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
1826 v0.AddArg2(y, x)
1827 v.AddArg(v0)
1828 return true
1829 }
1830 }
1831 func rewriteValueMIPS64_OpLess64U(v *Value) bool {
1832 v_1 := v.Args[1]
1833 v_0 := v.Args[0]
1834
1835
1836 for {
1837 x := v_0
1838 y := v_1
1839 v.reset(OpMIPS64SGTU)
1840 v.AddArg2(y, x)
1841 return true
1842 }
1843 }
1844 func rewriteValueMIPS64_OpLess8(v *Value) bool {
1845 v_1 := v.Args[1]
1846 v_0 := v.Args[0]
1847 b := v.Block
1848 typ := &b.Func.Config.Types
1849
1850
1851 for {
1852 x := v_0
1853 y := v_1
1854 v.reset(OpMIPS64SGT)
1855 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1856 v0.AddArg(y)
1857 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1858 v1.AddArg(x)
1859 v.AddArg2(v0, v1)
1860 return true
1861 }
1862 }
1863 func rewriteValueMIPS64_OpLess8U(v *Value) bool {
1864 v_1 := v.Args[1]
1865 v_0 := v.Args[0]
1866 b := v.Block
1867 typ := &b.Func.Config.Types
1868
1869
1870 for {
1871 x := v_0
1872 y := v_1
1873 v.reset(OpMIPS64SGTU)
1874 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1875 v0.AddArg(y)
1876 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1877 v1.AddArg(x)
1878 v.AddArg2(v0, v1)
1879 return true
1880 }
1881 }
1882 func rewriteValueMIPS64_OpLoad(v *Value) bool {
1883 v_1 := v.Args[1]
1884 v_0 := v.Args[0]
1885
1886
1887
1888 for {
1889 t := v.Type
1890 ptr := v_0
1891 mem := v_1
1892 if !(t.IsBoolean()) {
1893 break
1894 }
1895 v.reset(OpMIPS64MOVBUload)
1896 v.AddArg2(ptr, mem)
1897 return true
1898 }
1899
1900
1901
1902 for {
1903 t := v.Type
1904 ptr := v_0
1905 mem := v_1
1906 if !(is8BitInt(t) && t.IsSigned()) {
1907 break
1908 }
1909 v.reset(OpMIPS64MOVBload)
1910 v.AddArg2(ptr, mem)
1911 return true
1912 }
1913
1914
1915
1916 for {
1917 t := v.Type
1918 ptr := v_0
1919 mem := v_1
1920 if !(is8BitInt(t) && !t.IsSigned()) {
1921 break
1922 }
1923 v.reset(OpMIPS64MOVBUload)
1924 v.AddArg2(ptr, mem)
1925 return true
1926 }
1927
1928
1929
1930 for {
1931 t := v.Type
1932 ptr := v_0
1933 mem := v_1
1934 if !(is16BitInt(t) && t.IsSigned()) {
1935 break
1936 }
1937 v.reset(OpMIPS64MOVHload)
1938 v.AddArg2(ptr, mem)
1939 return true
1940 }
1941
1942
1943
1944 for {
1945 t := v.Type
1946 ptr := v_0
1947 mem := v_1
1948 if !(is16BitInt(t) && !t.IsSigned()) {
1949 break
1950 }
1951 v.reset(OpMIPS64MOVHUload)
1952 v.AddArg2(ptr, mem)
1953 return true
1954 }
1955
1956
1957
1958 for {
1959 t := v.Type
1960 ptr := v_0
1961 mem := v_1
1962 if !(is32BitInt(t) && t.IsSigned()) {
1963 break
1964 }
1965 v.reset(OpMIPS64MOVWload)
1966 v.AddArg2(ptr, mem)
1967 return true
1968 }
1969
1970
1971
1972 for {
1973 t := v.Type
1974 ptr := v_0
1975 mem := v_1
1976 if !(is32BitInt(t) && !t.IsSigned()) {
1977 break
1978 }
1979 v.reset(OpMIPS64MOVWUload)
1980 v.AddArg2(ptr, mem)
1981 return true
1982 }
1983
1984
1985
1986 for {
1987 t := v.Type
1988 ptr := v_0
1989 mem := v_1
1990 if !(is64BitInt(t) || isPtr(t)) {
1991 break
1992 }
1993 v.reset(OpMIPS64MOVVload)
1994 v.AddArg2(ptr, mem)
1995 return true
1996 }
1997
1998
1999
2000 for {
2001 t := v.Type
2002 ptr := v_0
2003 mem := v_1
2004 if !(is32BitFloat(t)) {
2005 break
2006 }
2007 v.reset(OpMIPS64MOVFload)
2008 v.AddArg2(ptr, mem)
2009 return true
2010 }
2011
2012
2013
2014 for {
2015 t := v.Type
2016 ptr := v_0
2017 mem := v_1
2018 if !(is64BitFloat(t)) {
2019 break
2020 }
2021 v.reset(OpMIPS64MOVDload)
2022 v.AddArg2(ptr, mem)
2023 return true
2024 }
2025 return false
2026 }
2027 func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
2028 v_1 := v.Args[1]
2029 v_0 := v.Args[0]
2030 b := v.Block
2031 typ := &b.Func.Config.Types
2032
2033
2034
2035 for {
2036 t := v.Type
2037 sym := auxToSym(v.Aux)
2038 base := v_0
2039 mem := v_1
2040 if !(t.Elem().HasPointers()) {
2041 break
2042 }
2043 v.reset(OpMIPS64MOVVaddr)
2044 v.Aux = symToAux(sym)
2045 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
2046 v0.AddArg2(base, mem)
2047 v.AddArg(v0)
2048 return true
2049 }
2050
2051
2052
2053 for {
2054 t := v.Type
2055 sym := auxToSym(v.Aux)
2056 base := v_0
2057 if !(!t.Elem().HasPointers()) {
2058 break
2059 }
2060 v.reset(OpMIPS64MOVVaddr)
2061 v.Aux = symToAux(sym)
2062 v.AddArg(base)
2063 return true
2064 }
2065 return false
2066 }
2067 func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
2068 v_1 := v.Args[1]
2069 v_0 := v.Args[0]
2070 b := v.Block
2071 typ := &b.Func.Config.Types
2072
2073
2074 for {
2075 t := v.Type
2076 x := v_0
2077 y := v_1
2078 v.reset(OpMIPS64AND)
2079 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2080 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2081 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2082 v2.AuxInt = int64ToAuxInt(64)
2083 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2084 v3.AddArg(y)
2085 v1.AddArg2(v2, v3)
2086 v0.AddArg(v1)
2087 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2088 v4.AddArg2(x, v3)
2089 v.AddArg2(v0, v4)
2090 return true
2091 }
2092 }
2093 func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
2094 v_1 := v.Args[1]
2095 v_0 := v.Args[0]
2096 b := v.Block
2097 typ := &b.Func.Config.Types
2098
2099
2100 for {
2101 t := v.Type
2102 x := v_0
2103 y := v_1
2104 v.reset(OpMIPS64AND)
2105 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2106 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2107 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2108 v2.AuxInt = int64ToAuxInt(64)
2109 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2110 v3.AddArg(y)
2111 v1.AddArg2(v2, v3)
2112 v0.AddArg(v1)
2113 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2114 v4.AddArg2(x, v3)
2115 v.AddArg2(v0, v4)
2116 return true
2117 }
2118 }
2119 func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
2120 v_1 := v.Args[1]
2121 v_0 := v.Args[0]
2122 b := v.Block
2123 typ := &b.Func.Config.Types
2124
2125
2126 for {
2127 t := v.Type
2128 x := v_0
2129 y := v_1
2130 v.reset(OpMIPS64AND)
2131 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2132 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2133 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2134 v2.AuxInt = int64ToAuxInt(64)
2135 v1.AddArg2(v2, y)
2136 v0.AddArg(v1)
2137 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2138 v3.AddArg2(x, y)
2139 v.AddArg2(v0, v3)
2140 return true
2141 }
2142 }
2143 func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
2144 v_1 := v.Args[1]
2145 v_0 := v.Args[0]
2146 b := v.Block
2147 typ := &b.Func.Config.Types
2148
2149
2150 for {
2151 t := v.Type
2152 x := v_0
2153 y := v_1
2154 v.reset(OpMIPS64AND)
2155 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2156 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2157 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2158 v2.AuxInt = int64ToAuxInt(64)
2159 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2160 v3.AddArg(y)
2161 v1.AddArg2(v2, v3)
2162 v0.AddArg(v1)
2163 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2164 v4.AddArg2(x, v3)
2165 v.AddArg2(v0, v4)
2166 return true
2167 }
2168 }
2169 func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
2170 v_1 := v.Args[1]
2171 v_0 := v.Args[0]
2172 b := v.Block
2173 typ := &b.Func.Config.Types
2174
2175
2176 for {
2177 t := v.Type
2178 x := v_0
2179 y := v_1
2180 v.reset(OpMIPS64AND)
2181 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2182 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2183 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2184 v2.AuxInt = int64ToAuxInt(64)
2185 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2186 v3.AddArg(y)
2187 v1.AddArg2(v2, v3)
2188 v0.AddArg(v1)
2189 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2190 v4.AddArg2(x, v3)
2191 v.AddArg2(v0, v4)
2192 return true
2193 }
2194 }
2195 func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
2196 v_1 := v.Args[1]
2197 v_0 := v.Args[0]
2198 b := v.Block
2199 typ := &b.Func.Config.Types
2200
2201
2202 for {
2203 t := v.Type
2204 x := v_0
2205 y := v_1
2206 v.reset(OpMIPS64AND)
2207 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2208 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2209 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2210 v2.AuxInt = int64ToAuxInt(64)
2211 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2212 v3.AddArg(y)
2213 v1.AddArg2(v2, v3)
2214 v0.AddArg(v1)
2215 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2216 v4.AddArg2(x, v3)
2217 v.AddArg2(v0, v4)
2218 return true
2219 }
2220 }
2221 func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
2222 v_1 := v.Args[1]
2223 v_0 := v.Args[0]
2224 b := v.Block
2225 typ := &b.Func.Config.Types
2226
2227
2228 for {
2229 t := v.Type
2230 x := v_0
2231 y := v_1
2232 v.reset(OpMIPS64AND)
2233 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2234 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2235 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2236 v2.AuxInt = int64ToAuxInt(64)
2237 v1.AddArg2(v2, y)
2238 v0.AddArg(v1)
2239 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2240 v3.AddArg2(x, y)
2241 v.AddArg2(v0, v3)
2242 return true
2243 }
2244 }
2245 func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
2246 v_1 := v.Args[1]
2247 v_0 := v.Args[0]
2248 b := v.Block
2249 typ := &b.Func.Config.Types
2250
2251
2252 for {
2253 t := v.Type
2254 x := v_0
2255 y := v_1
2256 v.reset(OpMIPS64AND)
2257 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2258 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2259 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2260 v2.AuxInt = int64ToAuxInt(64)
2261 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2262 v3.AddArg(y)
2263 v1.AddArg2(v2, v3)
2264 v0.AddArg(v1)
2265 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2266 v4.AddArg2(x, v3)
2267 v.AddArg2(v0, v4)
2268 return true
2269 }
2270 }
2271 func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
2272 v_1 := v.Args[1]
2273 v_0 := v.Args[0]
2274 b := v.Block
2275 typ := &b.Func.Config.Types
2276
2277
2278 for {
2279 t := v.Type
2280 x := v_0
2281 y := v_1
2282 v.reset(OpMIPS64AND)
2283 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2284 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2285 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2286 v2.AuxInt = int64ToAuxInt(64)
2287 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2288 v3.AddArg(y)
2289 v1.AddArg2(v2, v3)
2290 v0.AddArg(v1)
2291 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2292 v4.AddArg2(x, v3)
2293 v.AddArg2(v0, v4)
2294 return true
2295 }
2296 }
2297 func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
2298 v_1 := v.Args[1]
2299 v_0 := v.Args[0]
2300 b := v.Block
2301 typ := &b.Func.Config.Types
2302
2303
2304 for {
2305 t := v.Type
2306 x := v_0
2307 y := v_1
2308 v.reset(OpMIPS64AND)
2309 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2310 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2311 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2312 v2.AuxInt = int64ToAuxInt(64)
2313 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2314 v3.AddArg(y)
2315 v1.AddArg2(v2, v3)
2316 v0.AddArg(v1)
2317 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2318 v4.AddArg2(x, v3)
2319 v.AddArg2(v0, v4)
2320 return true
2321 }
2322 }
2323 func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
2324 v_1 := v.Args[1]
2325 v_0 := v.Args[0]
2326 b := v.Block
2327 typ := &b.Func.Config.Types
2328
2329
2330 for {
2331 t := v.Type
2332 x := v_0
2333 y := v_1
2334 v.reset(OpMIPS64AND)
2335 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2336 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2337 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2338 v2.AuxInt = int64ToAuxInt(64)
2339 v1.AddArg2(v2, y)
2340 v0.AddArg(v1)
2341 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2342 v3.AddArg2(x, y)
2343 v.AddArg2(v0, v3)
2344 return true
2345 }
2346 }
2347 func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
2348 v_1 := v.Args[1]
2349 v_0 := v.Args[0]
2350 b := v.Block
2351 typ := &b.Func.Config.Types
2352
2353
2354 for {
2355 t := v.Type
2356 x := v_0
2357 y := v_1
2358 v.reset(OpMIPS64AND)
2359 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2360 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2361 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2362 v2.AuxInt = int64ToAuxInt(64)
2363 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2364 v3.AddArg(y)
2365 v1.AddArg2(v2, v3)
2366 v0.AddArg(v1)
2367 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2368 v4.AddArg2(x, v3)
2369 v.AddArg2(v0, v4)
2370 return true
2371 }
2372 }
2373 func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
2374 v_1 := v.Args[1]
2375 v_0 := v.Args[0]
2376 b := v.Block
2377 typ := &b.Func.Config.Types
2378
2379
2380 for {
2381 t := v.Type
2382 x := v_0
2383 y := v_1
2384 v.reset(OpMIPS64AND)
2385 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2386 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2387 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2388 v2.AuxInt = int64ToAuxInt(64)
2389 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2390 v3.AddArg(y)
2391 v1.AddArg2(v2, v3)
2392 v0.AddArg(v1)
2393 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2394 v4.AddArg2(x, v3)
2395 v.AddArg2(v0, v4)
2396 return true
2397 }
2398 }
2399 func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
2400 v_1 := v.Args[1]
2401 v_0 := v.Args[0]
2402 b := v.Block
2403 typ := &b.Func.Config.Types
2404
2405
2406 for {
2407 t := v.Type
2408 x := v_0
2409 y := v_1
2410 v.reset(OpMIPS64AND)
2411 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2412 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2413 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2414 v2.AuxInt = int64ToAuxInt(64)
2415 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2416 v3.AddArg(y)
2417 v1.AddArg2(v2, v3)
2418 v0.AddArg(v1)
2419 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2420 v4.AddArg2(x, v3)
2421 v.AddArg2(v0, v4)
2422 return true
2423 }
2424 }
2425 func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
2426 v_1 := v.Args[1]
2427 v_0 := v.Args[0]
2428 b := v.Block
2429 typ := &b.Func.Config.Types
2430
2431
2432 for {
2433 t := v.Type
2434 x := v_0
2435 y := v_1
2436 v.reset(OpMIPS64AND)
2437 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2438 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2439 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2440 v2.AuxInt = int64ToAuxInt(64)
2441 v1.AddArg2(v2, y)
2442 v0.AddArg(v1)
2443 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2444 v3.AddArg2(x, y)
2445 v.AddArg2(v0, v3)
2446 return true
2447 }
2448 }
2449 func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
2450 v_1 := v.Args[1]
2451 v_0 := v.Args[0]
2452 b := v.Block
2453 typ := &b.Func.Config.Types
2454
2455
2456 for {
2457 t := v.Type
2458 x := v_0
2459 y := v_1
2460 v.reset(OpMIPS64AND)
2461 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2462 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2463 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2464 v2.AuxInt = int64ToAuxInt(64)
2465 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2466 v3.AddArg(y)
2467 v1.AddArg2(v2, v3)
2468 v0.AddArg(v1)
2469 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2470 v4.AddArg2(x, v3)
2471 v.AddArg2(v0, v4)
2472 return true
2473 }
2474 }
2475 func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
2476 v_1 := v.Args[1]
2477 v_0 := v.Args[0]
2478
2479
2480
2481 for {
2482 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2483 x := v_0
2484 if v_1.Op != OpMIPS64MOVVconst {
2485 continue
2486 }
2487 t := v_1.Type
2488 c := auxIntToInt64(v_1.AuxInt)
2489 if !(is32Bit(c) && !t.IsPtr()) {
2490 continue
2491 }
2492 v.reset(OpMIPS64ADDVconst)
2493 v.AuxInt = int64ToAuxInt(c)
2494 v.AddArg(x)
2495 return true
2496 }
2497 break
2498 }
2499
2500
2501 for {
2502 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2503 x := v_0
2504 if v_1.Op != OpMIPS64NEGV {
2505 continue
2506 }
2507 y := v_1.Args[0]
2508 v.reset(OpMIPS64SUBV)
2509 v.AddArg2(x, y)
2510 return true
2511 }
2512 break
2513 }
2514 return false
2515 }
2516 func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
2517 v_0 := v.Args[0]
2518
2519
2520
2521 for {
2522 off1 := auxIntToInt64(v.AuxInt)
2523 if v_0.Op != OpMIPS64MOVVaddr {
2524 break
2525 }
2526 off2 := auxIntToInt32(v_0.AuxInt)
2527 sym := auxToSym(v_0.Aux)
2528 ptr := v_0.Args[0]
2529 if !(is32Bit(off1 + int64(off2))) {
2530 break
2531 }
2532 v.reset(OpMIPS64MOVVaddr)
2533 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
2534 v.Aux = symToAux(sym)
2535 v.AddArg(ptr)
2536 return true
2537 }
2538
2539
2540 for {
2541 if auxIntToInt64(v.AuxInt) != 0 {
2542 break
2543 }
2544 x := v_0
2545 v.copyOf(x)
2546 return true
2547 }
2548
2549
2550 for {
2551 c := auxIntToInt64(v.AuxInt)
2552 if v_0.Op != OpMIPS64MOVVconst {
2553 break
2554 }
2555 d := auxIntToInt64(v_0.AuxInt)
2556 v.reset(OpMIPS64MOVVconst)
2557 v.AuxInt = int64ToAuxInt(c + d)
2558 return true
2559 }
2560
2561
2562
2563 for {
2564 c := auxIntToInt64(v.AuxInt)
2565 if v_0.Op != OpMIPS64ADDVconst {
2566 break
2567 }
2568 d := auxIntToInt64(v_0.AuxInt)
2569 x := v_0.Args[0]
2570 if !(is32Bit(c + d)) {
2571 break
2572 }
2573 v.reset(OpMIPS64ADDVconst)
2574 v.AuxInt = int64ToAuxInt(c + d)
2575 v.AddArg(x)
2576 return true
2577 }
2578
2579
2580
2581 for {
2582 c := auxIntToInt64(v.AuxInt)
2583 if v_0.Op != OpMIPS64SUBVconst {
2584 break
2585 }
2586 d := auxIntToInt64(v_0.AuxInt)
2587 x := v_0.Args[0]
2588 if !(is32Bit(c - d)) {
2589 break
2590 }
2591 v.reset(OpMIPS64ADDVconst)
2592 v.AuxInt = int64ToAuxInt(c - d)
2593 v.AddArg(x)
2594 return true
2595 }
2596 return false
2597 }
2598 func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
2599 v_1 := v.Args[1]
2600 v_0 := v.Args[0]
2601
2602
2603
2604 for {
2605 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2606 x := v_0
2607 if v_1.Op != OpMIPS64MOVVconst {
2608 continue
2609 }
2610 c := auxIntToInt64(v_1.AuxInt)
2611 if !(is32Bit(c)) {
2612 continue
2613 }
2614 v.reset(OpMIPS64ANDconst)
2615 v.AuxInt = int64ToAuxInt(c)
2616 v.AddArg(x)
2617 return true
2618 }
2619 break
2620 }
2621
2622
2623 for {
2624 x := v_0
2625 if x != v_1 {
2626 break
2627 }
2628 v.copyOf(x)
2629 return true
2630 }
2631 return false
2632 }
2633 func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
2634 v_0 := v.Args[0]
2635
2636
2637 for {
2638 if auxIntToInt64(v.AuxInt) != 0 {
2639 break
2640 }
2641 v.reset(OpMIPS64MOVVconst)
2642 v.AuxInt = int64ToAuxInt(0)
2643 return true
2644 }
2645
2646
2647 for {
2648 if auxIntToInt64(v.AuxInt) != -1 {
2649 break
2650 }
2651 x := v_0
2652 v.copyOf(x)
2653 return true
2654 }
2655
2656
2657 for {
2658 c := auxIntToInt64(v.AuxInt)
2659 if v_0.Op != OpMIPS64MOVVconst {
2660 break
2661 }
2662 d := auxIntToInt64(v_0.AuxInt)
2663 v.reset(OpMIPS64MOVVconst)
2664 v.AuxInt = int64ToAuxInt(c & d)
2665 return true
2666 }
2667
2668
2669 for {
2670 c := auxIntToInt64(v.AuxInt)
2671 if v_0.Op != OpMIPS64ANDconst {
2672 break
2673 }
2674 d := auxIntToInt64(v_0.AuxInt)
2675 x := v_0.Args[0]
2676 v.reset(OpMIPS64ANDconst)
2677 v.AuxInt = int64ToAuxInt(c & d)
2678 v.AddArg(x)
2679 return true
2680 }
2681 return false
2682 }
2683 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
2684 v_2 := v.Args[2]
2685 v_1 := v.Args[1]
2686 v_0 := v.Args[0]
2687
2688
2689
2690 for {
2691 ptr := v_0
2692 if v_1.Op != OpMIPS64MOVVconst {
2693 break
2694 }
2695 c := auxIntToInt64(v_1.AuxInt)
2696 mem := v_2
2697 if !(is32Bit(c)) {
2698 break
2699 }
2700 v.reset(OpMIPS64LoweredAtomicAddconst32)
2701 v.AuxInt = int32ToAuxInt(int32(c))
2702 v.AddArg2(ptr, mem)
2703 return true
2704 }
2705 return false
2706 }
2707 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
2708 v_2 := v.Args[2]
2709 v_1 := v.Args[1]
2710 v_0 := v.Args[0]
2711
2712
2713
2714 for {
2715 ptr := v_0
2716 if v_1.Op != OpMIPS64MOVVconst {
2717 break
2718 }
2719 c := auxIntToInt64(v_1.AuxInt)
2720 mem := v_2
2721 if !(is32Bit(c)) {
2722 break
2723 }
2724 v.reset(OpMIPS64LoweredAtomicAddconst64)
2725 v.AuxInt = int64ToAuxInt(c)
2726 v.AddArg2(ptr, mem)
2727 return true
2728 }
2729 return false
2730 }
2731 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
2732 v_2 := v.Args[2]
2733 v_1 := v.Args[1]
2734 v_0 := v.Args[0]
2735
2736
2737 for {
2738 ptr := v_0
2739 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2740 break
2741 }
2742 mem := v_2
2743 v.reset(OpMIPS64LoweredAtomicStorezero32)
2744 v.AddArg2(ptr, mem)
2745 return true
2746 }
2747 return false
2748 }
2749 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
2750 v_2 := v.Args[2]
2751 v_1 := v.Args[1]
2752 v_0 := v.Args[0]
2753
2754
2755 for {
2756 ptr := v_0
2757 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2758 break
2759 }
2760 mem := v_2
2761 v.reset(OpMIPS64LoweredAtomicStorezero64)
2762 v.AddArg2(ptr, mem)
2763 return true
2764 }
2765 return false
2766 }
2767 func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsCR(v *Value) bool {
2768 v_1 := v.Args[1]
2769 v_0 := v.Args[0]
2770
2771
2772 for {
2773 kind := auxIntToInt64(v.AuxInt)
2774 p := auxToPanicBoundsC(v.Aux)
2775 if v_0.Op != OpMIPS64MOVVconst {
2776 break
2777 }
2778 c := auxIntToInt64(v_0.AuxInt)
2779 mem := v_1
2780 v.reset(OpMIPS64LoweredPanicBoundsCC)
2781 v.AuxInt = int64ToAuxInt(kind)
2782 v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
2783 v.AddArg(mem)
2784 return true
2785 }
2786 return false
2787 }
2788 func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRC(v *Value) bool {
2789 v_1 := v.Args[1]
2790 v_0 := v.Args[0]
2791
2792
2793 for {
2794 kind := auxIntToInt64(v.AuxInt)
2795 p := auxToPanicBoundsC(v.Aux)
2796 if v_0.Op != OpMIPS64MOVVconst {
2797 break
2798 }
2799 c := auxIntToInt64(v_0.AuxInt)
2800 mem := v_1
2801 v.reset(OpMIPS64LoweredPanicBoundsCC)
2802 v.AuxInt = int64ToAuxInt(kind)
2803 v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
2804 v.AddArg(mem)
2805 return true
2806 }
2807 return false
2808 }
2809 func rewriteValueMIPS64_OpMIPS64LoweredPanicBoundsRR(v *Value) bool {
2810 v_2 := v.Args[2]
2811 v_1 := v.Args[1]
2812 v_0 := v.Args[0]
2813
2814
2815 for {
2816 kind := auxIntToInt64(v.AuxInt)
2817 x := v_0
2818 if v_1.Op != OpMIPS64MOVVconst {
2819 break
2820 }
2821 c := auxIntToInt64(v_1.AuxInt)
2822 mem := v_2
2823 v.reset(OpMIPS64LoweredPanicBoundsRC)
2824 v.AuxInt = int64ToAuxInt(kind)
2825 v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
2826 v.AddArg2(x, mem)
2827 return true
2828 }
2829
2830
2831 for {
2832 kind := auxIntToInt64(v.AuxInt)
2833 if v_0.Op != OpMIPS64MOVVconst {
2834 break
2835 }
2836 c := auxIntToInt64(v_0.AuxInt)
2837 y := v_1
2838 mem := v_2
2839 v.reset(OpMIPS64LoweredPanicBoundsCR)
2840 v.AuxInt = int64ToAuxInt(kind)
2841 v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
2842 v.AddArg2(y, mem)
2843 return true
2844 }
2845 return false
2846 }
2847 func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
2848 v_1 := v.Args[1]
2849 v_0 := v.Args[0]
2850 b := v.Block
2851 config := b.Func.Config
2852
2853
2854
2855 for {
2856 off1 := auxIntToInt32(v.AuxInt)
2857 sym := auxToSym(v.Aux)
2858 if v_0.Op != OpMIPS64ADDVconst {
2859 break
2860 }
2861 off2 := auxIntToInt64(v_0.AuxInt)
2862 ptr := v_0.Args[0]
2863 mem := v_1
2864 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2865 break
2866 }
2867 v.reset(OpMIPS64MOVBUload)
2868 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2869 v.Aux = symToAux(sym)
2870 v.AddArg2(ptr, mem)
2871 return true
2872 }
2873
2874
2875
2876 for {
2877 off1 := auxIntToInt32(v.AuxInt)
2878 sym1 := auxToSym(v.Aux)
2879 if v_0.Op != OpMIPS64MOVVaddr {
2880 break
2881 }
2882 off2 := auxIntToInt32(v_0.AuxInt)
2883 sym2 := auxToSym(v_0.Aux)
2884 ptr := v_0.Args[0]
2885 mem := v_1
2886 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2887 break
2888 }
2889 v.reset(OpMIPS64MOVBUload)
2890 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2891 v.Aux = symToAux(mergeSym(sym1, sym2))
2892 v.AddArg2(ptr, mem)
2893 return true
2894 }
2895
2896
2897
2898 for {
2899 off := auxIntToInt32(v.AuxInt)
2900 sym := auxToSym(v.Aux)
2901 if v_0.Op != OpSB || !(symIsRO(sym)) {
2902 break
2903 }
2904 v.reset(OpMIPS64MOVVconst)
2905 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
2906 return true
2907 }
2908 return false
2909 }
2910 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
2911 v_0 := v.Args[0]
2912
2913
2914 for {
2915 x := v_0
2916 if x.Op != OpMIPS64MOVBUload {
2917 break
2918 }
2919 v.reset(OpMIPS64MOVVreg)
2920 v.AddArg(x)
2921 return true
2922 }
2923
2924
2925 for {
2926 x := v_0
2927 if x.Op != OpMIPS64MOVBUreg {
2928 break
2929 }
2930 v.reset(OpMIPS64MOVVreg)
2931 v.AddArg(x)
2932 return true
2933 }
2934
2935
2936 for {
2937 if v_0.Op != OpMIPS64MOVVconst {
2938 break
2939 }
2940 c := auxIntToInt64(v_0.AuxInt)
2941 v.reset(OpMIPS64MOVVconst)
2942 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
2943 return true
2944 }
2945 return false
2946 }
2947 func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
2948 v_1 := v.Args[1]
2949 v_0 := v.Args[0]
2950 b := v.Block
2951 config := b.Func.Config
2952
2953
2954
2955 for {
2956 off1 := auxIntToInt32(v.AuxInt)
2957 sym := auxToSym(v.Aux)
2958 if v_0.Op != OpMIPS64ADDVconst {
2959 break
2960 }
2961 off2 := auxIntToInt64(v_0.AuxInt)
2962 ptr := v_0.Args[0]
2963 mem := v_1
2964 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2965 break
2966 }
2967 v.reset(OpMIPS64MOVBload)
2968 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2969 v.Aux = symToAux(sym)
2970 v.AddArg2(ptr, mem)
2971 return true
2972 }
2973
2974
2975
2976 for {
2977 off1 := auxIntToInt32(v.AuxInt)
2978 sym1 := auxToSym(v.Aux)
2979 if v_0.Op != OpMIPS64MOVVaddr {
2980 break
2981 }
2982 off2 := auxIntToInt32(v_0.AuxInt)
2983 sym2 := auxToSym(v_0.Aux)
2984 ptr := v_0.Args[0]
2985 mem := v_1
2986 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2987 break
2988 }
2989 v.reset(OpMIPS64MOVBload)
2990 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2991 v.Aux = symToAux(mergeSym(sym1, sym2))
2992 v.AddArg2(ptr, mem)
2993 return true
2994 }
2995
2996
2997
2998 for {
2999 off := auxIntToInt32(v.AuxInt)
3000 sym := auxToSym(v.Aux)
3001 if v_0.Op != OpSB || !(symIsRO(sym)) {
3002 break
3003 }
3004 v.reset(OpMIPS64MOVVconst)
3005 v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
3006 return true
3007 }
3008 return false
3009 }
3010 func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
3011 v_0 := v.Args[0]
3012
3013
3014 for {
3015 x := v_0
3016 if x.Op != OpMIPS64MOVBload {
3017 break
3018 }
3019 v.reset(OpMIPS64MOVVreg)
3020 v.AddArg(x)
3021 return true
3022 }
3023
3024
3025 for {
3026 x := v_0
3027 if x.Op != OpMIPS64MOVBreg {
3028 break
3029 }
3030 v.reset(OpMIPS64MOVVreg)
3031 v.AddArg(x)
3032 return true
3033 }
3034
3035
3036 for {
3037 if v_0.Op != OpMIPS64MOVVconst {
3038 break
3039 }
3040 c := auxIntToInt64(v_0.AuxInt)
3041 v.reset(OpMIPS64MOVVconst)
3042 v.AuxInt = int64ToAuxInt(int64(int8(c)))
3043 return true
3044 }
3045 return false
3046 }
3047 func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
3048 v_2 := v.Args[2]
3049 v_1 := v.Args[1]
3050 v_0 := v.Args[0]
3051 b := v.Block
3052 config := b.Func.Config
3053
3054
3055
3056 for {
3057 off1 := auxIntToInt32(v.AuxInt)
3058 sym := auxToSym(v.Aux)
3059 if v_0.Op != OpMIPS64ADDVconst {
3060 break
3061 }
3062 off2 := auxIntToInt64(v_0.AuxInt)
3063 ptr := v_0.Args[0]
3064 val := v_1
3065 mem := v_2
3066 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3067 break
3068 }
3069 v.reset(OpMIPS64MOVBstore)
3070 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3071 v.Aux = symToAux(sym)
3072 v.AddArg3(ptr, val, mem)
3073 return true
3074 }
3075
3076
3077
3078 for {
3079 off1 := auxIntToInt32(v.AuxInt)
3080 sym1 := auxToSym(v.Aux)
3081 if v_0.Op != OpMIPS64MOVVaddr {
3082 break
3083 }
3084 off2 := auxIntToInt32(v_0.AuxInt)
3085 sym2 := auxToSym(v_0.Aux)
3086 ptr := v_0.Args[0]
3087 val := v_1
3088 mem := v_2
3089 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3090 break
3091 }
3092 v.reset(OpMIPS64MOVBstore)
3093 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3094 v.Aux = symToAux(mergeSym(sym1, sym2))
3095 v.AddArg3(ptr, val, mem)
3096 return true
3097 }
3098
3099
3100 for {
3101 off := auxIntToInt32(v.AuxInt)
3102 sym := auxToSym(v.Aux)
3103 ptr := v_0
3104 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3105 break
3106 }
3107 mem := v_2
3108 v.reset(OpMIPS64MOVBstorezero)
3109 v.AuxInt = int32ToAuxInt(off)
3110 v.Aux = symToAux(sym)
3111 v.AddArg2(ptr, mem)
3112 return true
3113 }
3114
3115
3116 for {
3117 off := auxIntToInt32(v.AuxInt)
3118 sym := auxToSym(v.Aux)
3119 ptr := v_0
3120 if v_1.Op != OpMIPS64MOVBreg {
3121 break
3122 }
3123 x := v_1.Args[0]
3124 mem := v_2
3125 v.reset(OpMIPS64MOVBstore)
3126 v.AuxInt = int32ToAuxInt(off)
3127 v.Aux = symToAux(sym)
3128 v.AddArg3(ptr, x, mem)
3129 return true
3130 }
3131
3132
3133 for {
3134 off := auxIntToInt32(v.AuxInt)
3135 sym := auxToSym(v.Aux)
3136 ptr := v_0
3137 if v_1.Op != OpMIPS64MOVBUreg {
3138 break
3139 }
3140 x := v_1.Args[0]
3141 mem := v_2
3142 v.reset(OpMIPS64MOVBstore)
3143 v.AuxInt = int32ToAuxInt(off)
3144 v.Aux = symToAux(sym)
3145 v.AddArg3(ptr, x, mem)
3146 return true
3147 }
3148
3149
3150 for {
3151 off := auxIntToInt32(v.AuxInt)
3152 sym := auxToSym(v.Aux)
3153 ptr := v_0
3154 if v_1.Op != OpMIPS64MOVHreg {
3155 break
3156 }
3157 x := v_1.Args[0]
3158 mem := v_2
3159 v.reset(OpMIPS64MOVBstore)
3160 v.AuxInt = int32ToAuxInt(off)
3161 v.Aux = symToAux(sym)
3162 v.AddArg3(ptr, x, mem)
3163 return true
3164 }
3165
3166
3167 for {
3168 off := auxIntToInt32(v.AuxInt)
3169 sym := auxToSym(v.Aux)
3170 ptr := v_0
3171 if v_1.Op != OpMIPS64MOVHUreg {
3172 break
3173 }
3174 x := v_1.Args[0]
3175 mem := v_2
3176 v.reset(OpMIPS64MOVBstore)
3177 v.AuxInt = int32ToAuxInt(off)
3178 v.Aux = symToAux(sym)
3179 v.AddArg3(ptr, x, mem)
3180 return true
3181 }
3182
3183
3184 for {
3185 off := auxIntToInt32(v.AuxInt)
3186 sym := auxToSym(v.Aux)
3187 ptr := v_0
3188 if v_1.Op != OpMIPS64MOVWreg {
3189 break
3190 }
3191 x := v_1.Args[0]
3192 mem := v_2
3193 v.reset(OpMIPS64MOVBstore)
3194 v.AuxInt = int32ToAuxInt(off)
3195 v.Aux = symToAux(sym)
3196 v.AddArg3(ptr, x, mem)
3197 return true
3198 }
3199
3200
3201 for {
3202 off := auxIntToInt32(v.AuxInt)
3203 sym := auxToSym(v.Aux)
3204 ptr := v_0
3205 if v_1.Op != OpMIPS64MOVWUreg {
3206 break
3207 }
3208 x := v_1.Args[0]
3209 mem := v_2
3210 v.reset(OpMIPS64MOVBstore)
3211 v.AuxInt = int32ToAuxInt(off)
3212 v.Aux = symToAux(sym)
3213 v.AddArg3(ptr, x, mem)
3214 return true
3215 }
3216 return false
3217 }
3218 func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
3219 v_1 := v.Args[1]
3220 v_0 := v.Args[0]
3221 b := v.Block
3222 config := b.Func.Config
3223
3224
3225
3226 for {
3227 off1 := auxIntToInt32(v.AuxInt)
3228 sym := auxToSym(v.Aux)
3229 if v_0.Op != OpMIPS64ADDVconst {
3230 break
3231 }
3232 off2 := auxIntToInt64(v_0.AuxInt)
3233 ptr := v_0.Args[0]
3234 mem := v_1
3235 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3236 break
3237 }
3238 v.reset(OpMIPS64MOVBstorezero)
3239 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3240 v.Aux = symToAux(sym)
3241 v.AddArg2(ptr, mem)
3242 return true
3243 }
3244
3245
3246
3247 for {
3248 off1 := auxIntToInt32(v.AuxInt)
3249 sym1 := auxToSym(v.Aux)
3250 if v_0.Op != OpMIPS64MOVVaddr {
3251 break
3252 }
3253 off2 := auxIntToInt32(v_0.AuxInt)
3254 sym2 := auxToSym(v_0.Aux)
3255 ptr := v_0.Args[0]
3256 mem := v_1
3257 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3258 break
3259 }
3260 v.reset(OpMIPS64MOVBstorezero)
3261 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3262 v.Aux = symToAux(mergeSym(sym1, sym2))
3263 v.AddArg2(ptr, mem)
3264 return true
3265 }
3266 return false
3267 }
3268 func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
3269 v_1 := v.Args[1]
3270 v_0 := v.Args[0]
3271 b := v.Block
3272 config := b.Func.Config
3273
3274
3275 for {
3276 off := auxIntToInt32(v.AuxInt)
3277 sym := auxToSym(v.Aux)
3278 ptr := v_0
3279 if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3280 break
3281 }
3282 val := v_1.Args[1]
3283 if ptr != v_1.Args[0] {
3284 break
3285 }
3286 v.reset(OpMIPS64MOVVgpfp)
3287 v.AddArg(val)
3288 return true
3289 }
3290
3291
3292
3293 for {
3294 off1 := auxIntToInt32(v.AuxInt)
3295 sym := auxToSym(v.Aux)
3296 if v_0.Op != OpMIPS64ADDVconst {
3297 break
3298 }
3299 off2 := auxIntToInt64(v_0.AuxInt)
3300 ptr := v_0.Args[0]
3301 mem := v_1
3302 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3303 break
3304 }
3305 v.reset(OpMIPS64MOVDload)
3306 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3307 v.Aux = symToAux(sym)
3308 v.AddArg2(ptr, mem)
3309 return true
3310 }
3311
3312
3313
3314 for {
3315 off1 := auxIntToInt32(v.AuxInt)
3316 sym1 := auxToSym(v.Aux)
3317 if v_0.Op != OpMIPS64MOVVaddr {
3318 break
3319 }
3320 off2 := auxIntToInt32(v_0.AuxInt)
3321 sym2 := auxToSym(v_0.Aux)
3322 ptr := v_0.Args[0]
3323 mem := v_1
3324 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3325 break
3326 }
3327 v.reset(OpMIPS64MOVDload)
3328 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3329 v.Aux = symToAux(mergeSym(sym1, sym2))
3330 v.AddArg2(ptr, mem)
3331 return true
3332 }
3333 return false
3334 }
3335 func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
3336 v_2 := v.Args[2]
3337 v_1 := v.Args[1]
3338 v_0 := v.Args[0]
3339 b := v.Block
3340 config := b.Func.Config
3341
3342
3343 for {
3344 off := auxIntToInt32(v.AuxInt)
3345 sym := auxToSym(v.Aux)
3346 ptr := v_0
3347 if v_1.Op != OpMIPS64MOVVgpfp {
3348 break
3349 }
3350 val := v_1.Args[0]
3351 mem := v_2
3352 v.reset(OpMIPS64MOVVstore)
3353 v.AuxInt = int32ToAuxInt(off)
3354 v.Aux = symToAux(sym)
3355 v.AddArg3(ptr, val, mem)
3356 return true
3357 }
3358
3359
3360
3361 for {
3362 off1 := auxIntToInt32(v.AuxInt)
3363 sym := auxToSym(v.Aux)
3364 if v_0.Op != OpMIPS64ADDVconst {
3365 break
3366 }
3367 off2 := auxIntToInt64(v_0.AuxInt)
3368 ptr := v_0.Args[0]
3369 val := v_1
3370 mem := v_2
3371 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3372 break
3373 }
3374 v.reset(OpMIPS64MOVDstore)
3375 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3376 v.Aux = symToAux(sym)
3377 v.AddArg3(ptr, val, mem)
3378 return true
3379 }
3380
3381
3382
3383 for {
3384 off1 := auxIntToInt32(v.AuxInt)
3385 sym1 := auxToSym(v.Aux)
3386 if v_0.Op != OpMIPS64MOVVaddr {
3387 break
3388 }
3389 off2 := auxIntToInt32(v_0.AuxInt)
3390 sym2 := auxToSym(v_0.Aux)
3391 ptr := v_0.Args[0]
3392 val := v_1
3393 mem := v_2
3394 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3395 break
3396 }
3397 v.reset(OpMIPS64MOVDstore)
3398 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3399 v.Aux = symToAux(mergeSym(sym1, sym2))
3400 v.AddArg3(ptr, val, mem)
3401 return true
3402 }
3403 return false
3404 }
3405 func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
3406 v_1 := v.Args[1]
3407 v_0 := v.Args[0]
3408 b := v.Block
3409 config := b.Func.Config
3410
3411
3412 for {
3413 off := auxIntToInt32(v.AuxInt)
3414 sym := auxToSym(v.Aux)
3415 ptr := v_0
3416 if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3417 break
3418 }
3419 val := v_1.Args[1]
3420 if ptr != v_1.Args[0] {
3421 break
3422 }
3423 v.reset(OpMIPS64MOVWgpfp)
3424 v.AddArg(val)
3425 return true
3426 }
3427
3428
3429
3430 for {
3431 off1 := auxIntToInt32(v.AuxInt)
3432 sym := auxToSym(v.Aux)
3433 if v_0.Op != OpMIPS64ADDVconst {
3434 break
3435 }
3436 off2 := auxIntToInt64(v_0.AuxInt)
3437 ptr := v_0.Args[0]
3438 mem := v_1
3439 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3440 break
3441 }
3442 v.reset(OpMIPS64MOVFload)
3443 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3444 v.Aux = symToAux(sym)
3445 v.AddArg2(ptr, mem)
3446 return true
3447 }
3448
3449
3450
3451 for {
3452 off1 := auxIntToInt32(v.AuxInt)
3453 sym1 := auxToSym(v.Aux)
3454 if v_0.Op != OpMIPS64MOVVaddr {
3455 break
3456 }
3457 off2 := auxIntToInt32(v_0.AuxInt)
3458 sym2 := auxToSym(v_0.Aux)
3459 ptr := v_0.Args[0]
3460 mem := v_1
3461 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3462 break
3463 }
3464 v.reset(OpMIPS64MOVFload)
3465 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3466 v.Aux = symToAux(mergeSym(sym1, sym2))
3467 v.AddArg2(ptr, mem)
3468 return true
3469 }
3470 return false
3471 }
3472 func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
3473 v_2 := v.Args[2]
3474 v_1 := v.Args[1]
3475 v_0 := v.Args[0]
3476 b := v.Block
3477 config := b.Func.Config
3478
3479
3480 for {
3481 off := auxIntToInt32(v.AuxInt)
3482 sym := auxToSym(v.Aux)
3483 ptr := v_0
3484 if v_1.Op != OpMIPS64MOVWgpfp {
3485 break
3486 }
3487 val := v_1.Args[0]
3488 mem := v_2
3489 v.reset(OpMIPS64MOVWstore)
3490 v.AuxInt = int32ToAuxInt(off)
3491 v.Aux = symToAux(sym)
3492 v.AddArg3(ptr, val, mem)
3493 return true
3494 }
3495
3496
3497
3498 for {
3499 off1 := auxIntToInt32(v.AuxInt)
3500 sym := auxToSym(v.Aux)
3501 if v_0.Op != OpMIPS64ADDVconst {
3502 break
3503 }
3504 off2 := auxIntToInt64(v_0.AuxInt)
3505 ptr := v_0.Args[0]
3506 val := v_1
3507 mem := v_2
3508 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3509 break
3510 }
3511 v.reset(OpMIPS64MOVFstore)
3512 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3513 v.Aux = symToAux(sym)
3514 v.AddArg3(ptr, val, mem)
3515 return true
3516 }
3517
3518
3519
3520 for {
3521 off1 := auxIntToInt32(v.AuxInt)
3522 sym1 := auxToSym(v.Aux)
3523 if v_0.Op != OpMIPS64MOVVaddr {
3524 break
3525 }
3526 off2 := auxIntToInt32(v_0.AuxInt)
3527 sym2 := auxToSym(v_0.Aux)
3528 ptr := v_0.Args[0]
3529 val := v_1
3530 mem := v_2
3531 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3532 break
3533 }
3534 v.reset(OpMIPS64MOVFstore)
3535 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3536 v.Aux = symToAux(mergeSym(sym1, sym2))
3537 v.AddArg3(ptr, val, mem)
3538 return true
3539 }
3540 return false
3541 }
3542 func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
3543 v_1 := v.Args[1]
3544 v_0 := v.Args[0]
3545 b := v.Block
3546 config := b.Func.Config
3547
3548
3549
3550 for {
3551 off1 := auxIntToInt32(v.AuxInt)
3552 sym := auxToSym(v.Aux)
3553 if v_0.Op != OpMIPS64ADDVconst {
3554 break
3555 }
3556 off2 := auxIntToInt64(v_0.AuxInt)
3557 ptr := v_0.Args[0]
3558 mem := v_1
3559 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3560 break
3561 }
3562 v.reset(OpMIPS64MOVHUload)
3563 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3564 v.Aux = symToAux(sym)
3565 v.AddArg2(ptr, mem)
3566 return true
3567 }
3568
3569
3570
3571 for {
3572 off1 := auxIntToInt32(v.AuxInt)
3573 sym1 := auxToSym(v.Aux)
3574 if v_0.Op != OpMIPS64MOVVaddr {
3575 break
3576 }
3577 off2 := auxIntToInt32(v_0.AuxInt)
3578 sym2 := auxToSym(v_0.Aux)
3579 ptr := v_0.Args[0]
3580 mem := v_1
3581 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3582 break
3583 }
3584 v.reset(OpMIPS64MOVHUload)
3585 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3586 v.Aux = symToAux(mergeSym(sym1, sym2))
3587 v.AddArg2(ptr, mem)
3588 return true
3589 }
3590
3591
3592
3593 for {
3594 off := auxIntToInt32(v.AuxInt)
3595 sym := auxToSym(v.Aux)
3596 if v_0.Op != OpSB || !(symIsRO(sym)) {
3597 break
3598 }
3599 v.reset(OpMIPS64MOVVconst)
3600 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3601 return true
3602 }
3603 return false
3604 }
3605 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
3606 v_0 := v.Args[0]
3607
3608
3609 for {
3610 x := v_0
3611 if x.Op != OpMIPS64MOVBUload {
3612 break
3613 }
3614 v.reset(OpMIPS64MOVVreg)
3615 v.AddArg(x)
3616 return true
3617 }
3618
3619
3620 for {
3621 x := v_0
3622 if x.Op != OpMIPS64MOVHUload {
3623 break
3624 }
3625 v.reset(OpMIPS64MOVVreg)
3626 v.AddArg(x)
3627 return true
3628 }
3629
3630
3631 for {
3632 x := v_0
3633 if x.Op != OpMIPS64MOVBUreg {
3634 break
3635 }
3636 v.reset(OpMIPS64MOVVreg)
3637 v.AddArg(x)
3638 return true
3639 }
3640
3641
3642 for {
3643 x := v_0
3644 if x.Op != OpMIPS64MOVHUreg {
3645 break
3646 }
3647 v.reset(OpMIPS64MOVVreg)
3648 v.AddArg(x)
3649 return true
3650 }
3651
3652
3653 for {
3654 if v_0.Op != OpMIPS64MOVVconst {
3655 break
3656 }
3657 c := auxIntToInt64(v_0.AuxInt)
3658 v.reset(OpMIPS64MOVVconst)
3659 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3660 return true
3661 }
3662 return false
3663 }
3664 func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
3665 v_1 := v.Args[1]
3666 v_0 := v.Args[0]
3667 b := v.Block
3668 config := b.Func.Config
3669
3670
3671
3672 for {
3673 off1 := auxIntToInt32(v.AuxInt)
3674 sym := auxToSym(v.Aux)
3675 if v_0.Op != OpMIPS64ADDVconst {
3676 break
3677 }
3678 off2 := auxIntToInt64(v_0.AuxInt)
3679 ptr := v_0.Args[0]
3680 mem := v_1
3681 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3682 break
3683 }
3684 v.reset(OpMIPS64MOVHload)
3685 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3686 v.Aux = symToAux(sym)
3687 v.AddArg2(ptr, mem)
3688 return true
3689 }
3690
3691
3692
3693 for {
3694 off1 := auxIntToInt32(v.AuxInt)
3695 sym1 := auxToSym(v.Aux)
3696 if v_0.Op != OpMIPS64MOVVaddr {
3697 break
3698 }
3699 off2 := auxIntToInt32(v_0.AuxInt)
3700 sym2 := auxToSym(v_0.Aux)
3701 ptr := v_0.Args[0]
3702 mem := v_1
3703 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3704 break
3705 }
3706 v.reset(OpMIPS64MOVHload)
3707 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3708 v.Aux = symToAux(mergeSym(sym1, sym2))
3709 v.AddArg2(ptr, mem)
3710 return true
3711 }
3712
3713
3714
3715 for {
3716 off := auxIntToInt32(v.AuxInt)
3717 sym := auxToSym(v.Aux)
3718 if v_0.Op != OpSB || !(symIsRO(sym)) {
3719 break
3720 }
3721 v.reset(OpMIPS64MOVVconst)
3722 v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
3723 return true
3724 }
3725 return false
3726 }
3727 func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
3728 v_0 := v.Args[0]
3729
3730
3731 for {
3732 x := v_0
3733 if x.Op != OpMIPS64MOVBload {
3734 break
3735 }
3736 v.reset(OpMIPS64MOVVreg)
3737 v.AddArg(x)
3738 return true
3739 }
3740
3741
3742 for {
3743 x := v_0
3744 if x.Op != OpMIPS64MOVBUload {
3745 break
3746 }
3747 v.reset(OpMIPS64MOVVreg)
3748 v.AddArg(x)
3749 return true
3750 }
3751
3752
3753 for {
3754 x := v_0
3755 if x.Op != OpMIPS64MOVHload {
3756 break
3757 }
3758 v.reset(OpMIPS64MOVVreg)
3759 v.AddArg(x)
3760 return true
3761 }
3762
3763
3764 for {
3765 x := v_0
3766 if x.Op != OpMIPS64MOVBreg {
3767 break
3768 }
3769 v.reset(OpMIPS64MOVVreg)
3770 v.AddArg(x)
3771 return true
3772 }
3773
3774
3775 for {
3776 x := v_0
3777 if x.Op != OpMIPS64MOVBUreg {
3778 break
3779 }
3780 v.reset(OpMIPS64MOVVreg)
3781 v.AddArg(x)
3782 return true
3783 }
3784
3785
3786 for {
3787 x := v_0
3788 if x.Op != OpMIPS64MOVHreg {
3789 break
3790 }
3791 v.reset(OpMIPS64MOVVreg)
3792 v.AddArg(x)
3793 return true
3794 }
3795
3796
3797 for {
3798 if v_0.Op != OpMIPS64MOVVconst {
3799 break
3800 }
3801 c := auxIntToInt64(v_0.AuxInt)
3802 v.reset(OpMIPS64MOVVconst)
3803 v.AuxInt = int64ToAuxInt(int64(int16(c)))
3804 return true
3805 }
3806 return false
3807 }
3808 func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
3809 v_2 := v.Args[2]
3810 v_1 := v.Args[1]
3811 v_0 := v.Args[0]
3812 b := v.Block
3813 config := b.Func.Config
3814
3815
3816
3817 for {
3818 off1 := auxIntToInt32(v.AuxInt)
3819 sym := auxToSym(v.Aux)
3820 if v_0.Op != OpMIPS64ADDVconst {
3821 break
3822 }
3823 off2 := auxIntToInt64(v_0.AuxInt)
3824 ptr := v_0.Args[0]
3825 val := v_1
3826 mem := v_2
3827 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3828 break
3829 }
3830 v.reset(OpMIPS64MOVHstore)
3831 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3832 v.Aux = symToAux(sym)
3833 v.AddArg3(ptr, val, mem)
3834 return true
3835 }
3836
3837
3838
3839 for {
3840 off1 := auxIntToInt32(v.AuxInt)
3841 sym1 := auxToSym(v.Aux)
3842 if v_0.Op != OpMIPS64MOVVaddr {
3843 break
3844 }
3845 off2 := auxIntToInt32(v_0.AuxInt)
3846 sym2 := auxToSym(v_0.Aux)
3847 ptr := v_0.Args[0]
3848 val := v_1
3849 mem := v_2
3850 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3851 break
3852 }
3853 v.reset(OpMIPS64MOVHstore)
3854 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3855 v.Aux = symToAux(mergeSym(sym1, sym2))
3856 v.AddArg3(ptr, val, mem)
3857 return true
3858 }
3859
3860
3861 for {
3862 off := auxIntToInt32(v.AuxInt)
3863 sym := auxToSym(v.Aux)
3864 ptr := v_0
3865 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3866 break
3867 }
3868 mem := v_2
3869 v.reset(OpMIPS64MOVHstorezero)
3870 v.AuxInt = int32ToAuxInt(off)
3871 v.Aux = symToAux(sym)
3872 v.AddArg2(ptr, mem)
3873 return true
3874 }
3875
3876
3877 for {
3878 off := auxIntToInt32(v.AuxInt)
3879 sym := auxToSym(v.Aux)
3880 ptr := v_0
3881 if v_1.Op != OpMIPS64MOVHreg {
3882 break
3883 }
3884 x := v_1.Args[0]
3885 mem := v_2
3886 v.reset(OpMIPS64MOVHstore)
3887 v.AuxInt = int32ToAuxInt(off)
3888 v.Aux = symToAux(sym)
3889 v.AddArg3(ptr, x, mem)
3890 return true
3891 }
3892
3893
3894 for {
3895 off := auxIntToInt32(v.AuxInt)
3896 sym := auxToSym(v.Aux)
3897 ptr := v_0
3898 if v_1.Op != OpMIPS64MOVHUreg {
3899 break
3900 }
3901 x := v_1.Args[0]
3902 mem := v_2
3903 v.reset(OpMIPS64MOVHstore)
3904 v.AuxInt = int32ToAuxInt(off)
3905 v.Aux = symToAux(sym)
3906 v.AddArg3(ptr, x, mem)
3907 return true
3908 }
3909
3910
3911 for {
3912 off := auxIntToInt32(v.AuxInt)
3913 sym := auxToSym(v.Aux)
3914 ptr := v_0
3915 if v_1.Op != OpMIPS64MOVWreg {
3916 break
3917 }
3918 x := v_1.Args[0]
3919 mem := v_2
3920 v.reset(OpMIPS64MOVHstore)
3921 v.AuxInt = int32ToAuxInt(off)
3922 v.Aux = symToAux(sym)
3923 v.AddArg3(ptr, x, mem)
3924 return true
3925 }
3926
3927
3928 for {
3929 off := auxIntToInt32(v.AuxInt)
3930 sym := auxToSym(v.Aux)
3931 ptr := v_0
3932 if v_1.Op != OpMIPS64MOVWUreg {
3933 break
3934 }
3935 x := v_1.Args[0]
3936 mem := v_2
3937 v.reset(OpMIPS64MOVHstore)
3938 v.AuxInt = int32ToAuxInt(off)
3939 v.Aux = symToAux(sym)
3940 v.AddArg3(ptr, x, mem)
3941 return true
3942 }
3943 return false
3944 }
3945 func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
3946 v_1 := v.Args[1]
3947 v_0 := v.Args[0]
3948 b := v.Block
3949 config := b.Func.Config
3950
3951
3952
3953 for {
3954 off1 := auxIntToInt32(v.AuxInt)
3955 sym := auxToSym(v.Aux)
3956 if v_0.Op != OpMIPS64ADDVconst {
3957 break
3958 }
3959 off2 := auxIntToInt64(v_0.AuxInt)
3960 ptr := v_0.Args[0]
3961 mem := v_1
3962 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3963 break
3964 }
3965 v.reset(OpMIPS64MOVHstorezero)
3966 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3967 v.Aux = symToAux(sym)
3968 v.AddArg2(ptr, mem)
3969 return true
3970 }
3971
3972
3973
3974 for {
3975 off1 := auxIntToInt32(v.AuxInt)
3976 sym1 := auxToSym(v.Aux)
3977 if v_0.Op != OpMIPS64MOVVaddr {
3978 break
3979 }
3980 off2 := auxIntToInt32(v_0.AuxInt)
3981 sym2 := auxToSym(v_0.Aux)
3982 ptr := v_0.Args[0]
3983 mem := v_1
3984 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3985 break
3986 }
3987 v.reset(OpMIPS64MOVHstorezero)
3988 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3989 v.Aux = symToAux(mergeSym(sym1, sym2))
3990 v.AddArg2(ptr, mem)
3991 return true
3992 }
3993 return false
3994 }
3995 func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
3996 v_1 := v.Args[1]
3997 v_0 := v.Args[0]
3998 b := v.Block
3999 config := b.Func.Config
4000
4001
4002 for {
4003 off := auxIntToInt32(v.AuxInt)
4004 sym := auxToSym(v.Aux)
4005 ptr := v_0
4006 if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4007 break
4008 }
4009 val := v_1.Args[1]
4010 if ptr != v_1.Args[0] {
4011 break
4012 }
4013 v.reset(OpMIPS64MOVVfpgp)
4014 v.AddArg(val)
4015 return true
4016 }
4017
4018
4019
4020 for {
4021 off1 := auxIntToInt32(v.AuxInt)
4022 sym := auxToSym(v.Aux)
4023 if v_0.Op != OpMIPS64ADDVconst {
4024 break
4025 }
4026 off2 := auxIntToInt64(v_0.AuxInt)
4027 ptr := v_0.Args[0]
4028 mem := v_1
4029 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4030 break
4031 }
4032 v.reset(OpMIPS64MOVVload)
4033 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4034 v.Aux = symToAux(sym)
4035 v.AddArg2(ptr, mem)
4036 return true
4037 }
4038
4039
4040
4041 for {
4042 off1 := auxIntToInt32(v.AuxInt)
4043 sym1 := auxToSym(v.Aux)
4044 if v_0.Op != OpMIPS64MOVVaddr {
4045 break
4046 }
4047 off2 := auxIntToInt32(v_0.AuxInt)
4048 sym2 := auxToSym(v_0.Aux)
4049 ptr := v_0.Args[0]
4050 mem := v_1
4051 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4052 break
4053 }
4054 v.reset(OpMIPS64MOVVload)
4055 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4056 v.Aux = symToAux(mergeSym(sym1, sym2))
4057 v.AddArg2(ptr, mem)
4058 return true
4059 }
4060
4061
4062
4063 for {
4064 off := auxIntToInt32(v.AuxInt)
4065 sym := auxToSym(v.Aux)
4066 if v_0.Op != OpSB || !(symIsRO(sym)) {
4067 break
4068 }
4069 v.reset(OpMIPS64MOVVconst)
4070 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
4071 return true
4072 }
4073 return false
4074 }
4075 func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
4076 v_0 := v.Args[0]
4077
4078
4079 for {
4080 if v_0.Op != OpMIPS64MOVVconst {
4081 break
4082 }
4083 c := auxIntToInt64(v_0.AuxInt)
4084 v.reset(OpMIPS64MOVVconst)
4085 v.AuxInt = int64ToAuxInt(c)
4086 return true
4087 }
4088 return false
4089 }
4090 func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
4091 v_0 := v.Args[0]
4092
4093
4094
4095 for {
4096 x := v_0
4097 if !(x.Uses == 1) {
4098 break
4099 }
4100 v.reset(OpMIPS64MOVVnop)
4101 v.AddArg(x)
4102 return true
4103 }
4104
4105
4106 for {
4107 if v_0.Op != OpMIPS64MOVVconst {
4108 break
4109 }
4110 c := auxIntToInt64(v_0.AuxInt)
4111 v.reset(OpMIPS64MOVVconst)
4112 v.AuxInt = int64ToAuxInt(c)
4113 return true
4114 }
4115 return false
4116 }
4117 func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
4118 v_2 := v.Args[2]
4119 v_1 := v.Args[1]
4120 v_0 := v.Args[0]
4121 b := v.Block
4122 config := b.Func.Config
4123
4124
4125 for {
4126 off := auxIntToInt32(v.AuxInt)
4127 sym := auxToSym(v.Aux)
4128 ptr := v_0
4129 if v_1.Op != OpMIPS64MOVVfpgp {
4130 break
4131 }
4132 val := v_1.Args[0]
4133 mem := v_2
4134 v.reset(OpMIPS64MOVDstore)
4135 v.AuxInt = int32ToAuxInt(off)
4136 v.Aux = symToAux(sym)
4137 v.AddArg3(ptr, val, mem)
4138 return true
4139 }
4140
4141
4142
4143 for {
4144 off1 := auxIntToInt32(v.AuxInt)
4145 sym := auxToSym(v.Aux)
4146 if v_0.Op != OpMIPS64ADDVconst {
4147 break
4148 }
4149 off2 := auxIntToInt64(v_0.AuxInt)
4150 ptr := v_0.Args[0]
4151 val := v_1
4152 mem := v_2
4153 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4154 break
4155 }
4156 v.reset(OpMIPS64MOVVstore)
4157 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4158 v.Aux = symToAux(sym)
4159 v.AddArg3(ptr, val, mem)
4160 return true
4161 }
4162
4163
4164
4165 for {
4166 off1 := auxIntToInt32(v.AuxInt)
4167 sym1 := auxToSym(v.Aux)
4168 if v_0.Op != OpMIPS64MOVVaddr {
4169 break
4170 }
4171 off2 := auxIntToInt32(v_0.AuxInt)
4172 sym2 := auxToSym(v_0.Aux)
4173 ptr := v_0.Args[0]
4174 val := v_1
4175 mem := v_2
4176 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4177 break
4178 }
4179 v.reset(OpMIPS64MOVVstore)
4180 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4181 v.Aux = symToAux(mergeSym(sym1, sym2))
4182 v.AddArg3(ptr, val, mem)
4183 return true
4184 }
4185
4186
4187 for {
4188 off := auxIntToInt32(v.AuxInt)
4189 sym := auxToSym(v.Aux)
4190 ptr := v_0
4191 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4192 break
4193 }
4194 mem := v_2
4195 v.reset(OpMIPS64MOVVstorezero)
4196 v.AuxInt = int32ToAuxInt(off)
4197 v.Aux = symToAux(sym)
4198 v.AddArg2(ptr, mem)
4199 return true
4200 }
4201 return false
4202 }
4203 func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
4204 v_1 := v.Args[1]
4205 v_0 := v.Args[0]
4206 b := v.Block
4207 config := b.Func.Config
4208
4209
4210
4211 for {
4212 off1 := auxIntToInt32(v.AuxInt)
4213 sym := auxToSym(v.Aux)
4214 if v_0.Op != OpMIPS64ADDVconst {
4215 break
4216 }
4217 off2 := auxIntToInt64(v_0.AuxInt)
4218 ptr := v_0.Args[0]
4219 mem := v_1
4220 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4221 break
4222 }
4223 v.reset(OpMIPS64MOVVstorezero)
4224 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4225 v.Aux = symToAux(sym)
4226 v.AddArg2(ptr, mem)
4227 return true
4228 }
4229
4230
4231
4232 for {
4233 off1 := auxIntToInt32(v.AuxInt)
4234 sym1 := auxToSym(v.Aux)
4235 if v_0.Op != OpMIPS64MOVVaddr {
4236 break
4237 }
4238 off2 := auxIntToInt32(v_0.AuxInt)
4239 sym2 := auxToSym(v_0.Aux)
4240 ptr := v_0.Args[0]
4241 mem := v_1
4242 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4243 break
4244 }
4245 v.reset(OpMIPS64MOVVstorezero)
4246 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4247 v.Aux = symToAux(mergeSym(sym1, sym2))
4248 v.AddArg2(ptr, mem)
4249 return true
4250 }
4251 return false
4252 }
4253 func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
4254 v_1 := v.Args[1]
4255 v_0 := v.Args[0]
4256 b := v.Block
4257 config := b.Func.Config
4258 typ := &b.Func.Config.Types
4259
4260
4261 for {
4262 off := auxIntToInt32(v.AuxInt)
4263 sym := auxToSym(v.Aux)
4264 ptr := v_0
4265 if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4266 break
4267 }
4268 val := v_1.Args[1]
4269 if ptr != v_1.Args[0] {
4270 break
4271 }
4272 v.reset(OpZeroExt32to64)
4273 v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
4274 v0.AddArg(val)
4275 v.AddArg(v0)
4276 return true
4277 }
4278
4279
4280
4281 for {
4282 off1 := auxIntToInt32(v.AuxInt)
4283 sym := auxToSym(v.Aux)
4284 if v_0.Op != OpMIPS64ADDVconst {
4285 break
4286 }
4287 off2 := auxIntToInt64(v_0.AuxInt)
4288 ptr := v_0.Args[0]
4289 mem := v_1
4290 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4291 break
4292 }
4293 v.reset(OpMIPS64MOVWUload)
4294 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4295 v.Aux = symToAux(sym)
4296 v.AddArg2(ptr, mem)
4297 return true
4298 }
4299
4300
4301
4302 for {
4303 off1 := auxIntToInt32(v.AuxInt)
4304 sym1 := auxToSym(v.Aux)
4305 if v_0.Op != OpMIPS64MOVVaddr {
4306 break
4307 }
4308 off2 := auxIntToInt32(v_0.AuxInt)
4309 sym2 := auxToSym(v_0.Aux)
4310 ptr := v_0.Args[0]
4311 mem := v_1
4312 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4313 break
4314 }
4315 v.reset(OpMIPS64MOVWUload)
4316 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4317 v.Aux = symToAux(mergeSym(sym1, sym2))
4318 v.AddArg2(ptr, mem)
4319 return true
4320 }
4321
4322
4323
4324 for {
4325 off := auxIntToInt32(v.AuxInt)
4326 sym := auxToSym(v.Aux)
4327 if v_0.Op != OpSB || !(symIsRO(sym)) {
4328 break
4329 }
4330 v.reset(OpMIPS64MOVVconst)
4331 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
4332 return true
4333 }
4334 return false
4335 }
4336 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
4337 v_0 := v.Args[0]
4338
4339
4340 for {
4341 x := v_0
4342 if x.Op != OpMIPS64MOVBUload {
4343 break
4344 }
4345 v.reset(OpMIPS64MOVVreg)
4346 v.AddArg(x)
4347 return true
4348 }
4349
4350
4351 for {
4352 x := v_0
4353 if x.Op != OpMIPS64MOVHUload {
4354 break
4355 }
4356 v.reset(OpMIPS64MOVVreg)
4357 v.AddArg(x)
4358 return true
4359 }
4360
4361
4362 for {
4363 x := v_0
4364 if x.Op != OpMIPS64MOVWUload {
4365 break
4366 }
4367 v.reset(OpMIPS64MOVVreg)
4368 v.AddArg(x)
4369 return true
4370 }
4371
4372
4373 for {
4374 x := v_0
4375 if x.Op != OpMIPS64MOVBUreg {
4376 break
4377 }
4378 v.reset(OpMIPS64MOVVreg)
4379 v.AddArg(x)
4380 return true
4381 }
4382
4383
4384 for {
4385 x := v_0
4386 if x.Op != OpMIPS64MOVHUreg {
4387 break
4388 }
4389 v.reset(OpMIPS64MOVVreg)
4390 v.AddArg(x)
4391 return true
4392 }
4393
4394
4395 for {
4396 x := v_0
4397 if x.Op != OpMIPS64MOVWUreg {
4398 break
4399 }
4400 v.reset(OpMIPS64MOVVreg)
4401 v.AddArg(x)
4402 return true
4403 }
4404
4405
4406 for {
4407 if v_0.Op != OpMIPS64MOVVconst {
4408 break
4409 }
4410 c := auxIntToInt64(v_0.AuxInt)
4411 v.reset(OpMIPS64MOVVconst)
4412 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4413 return true
4414 }
4415 return false
4416 }
4417 func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
4418 v_1 := v.Args[1]
4419 v_0 := v.Args[0]
4420 b := v.Block
4421 config := b.Func.Config
4422
4423
4424
4425 for {
4426 off1 := auxIntToInt32(v.AuxInt)
4427 sym := auxToSym(v.Aux)
4428 if v_0.Op != OpMIPS64ADDVconst {
4429 break
4430 }
4431 off2 := auxIntToInt64(v_0.AuxInt)
4432 ptr := v_0.Args[0]
4433 mem := v_1
4434 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4435 break
4436 }
4437 v.reset(OpMIPS64MOVWload)
4438 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4439 v.Aux = symToAux(sym)
4440 v.AddArg2(ptr, mem)
4441 return true
4442 }
4443
4444
4445
4446 for {
4447 off1 := auxIntToInt32(v.AuxInt)
4448 sym1 := auxToSym(v.Aux)
4449 if v_0.Op != OpMIPS64MOVVaddr {
4450 break
4451 }
4452 off2 := auxIntToInt32(v_0.AuxInt)
4453 sym2 := auxToSym(v_0.Aux)
4454 ptr := v_0.Args[0]
4455 mem := v_1
4456 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4457 break
4458 }
4459 v.reset(OpMIPS64MOVWload)
4460 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4461 v.Aux = symToAux(mergeSym(sym1, sym2))
4462 v.AddArg2(ptr, mem)
4463 return true
4464 }
4465
4466
4467
4468 for {
4469 off := auxIntToInt32(v.AuxInt)
4470 sym := auxToSym(v.Aux)
4471 if v_0.Op != OpSB || !(symIsRO(sym)) {
4472 break
4473 }
4474 v.reset(OpMIPS64MOVVconst)
4475 v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
4476 return true
4477 }
4478 return false
4479 }
4480 func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
4481 v_0 := v.Args[0]
4482
4483
4484 for {
4485 x := v_0
4486 if x.Op != OpMIPS64MOVBload {
4487 break
4488 }
4489 v.reset(OpMIPS64MOVVreg)
4490 v.AddArg(x)
4491 return true
4492 }
4493
4494
4495 for {
4496 x := v_0
4497 if x.Op != OpMIPS64MOVBUload {
4498 break
4499 }
4500 v.reset(OpMIPS64MOVVreg)
4501 v.AddArg(x)
4502 return true
4503 }
4504
4505
4506 for {
4507 x := v_0
4508 if x.Op != OpMIPS64MOVHload {
4509 break
4510 }
4511 v.reset(OpMIPS64MOVVreg)
4512 v.AddArg(x)
4513 return true
4514 }
4515
4516
4517 for {
4518 x := v_0
4519 if x.Op != OpMIPS64MOVHUload {
4520 break
4521 }
4522 v.reset(OpMIPS64MOVVreg)
4523 v.AddArg(x)
4524 return true
4525 }
4526
4527
4528 for {
4529 x := v_0
4530 if x.Op != OpMIPS64MOVWload {
4531 break
4532 }
4533 v.reset(OpMIPS64MOVVreg)
4534 v.AddArg(x)
4535 return true
4536 }
4537
4538
4539 for {
4540 x := v_0
4541 if x.Op != OpMIPS64MOVBreg {
4542 break
4543 }
4544 v.reset(OpMIPS64MOVVreg)
4545 v.AddArg(x)
4546 return true
4547 }
4548
4549
4550 for {
4551 x := v_0
4552 if x.Op != OpMIPS64MOVBUreg {
4553 break
4554 }
4555 v.reset(OpMIPS64MOVVreg)
4556 v.AddArg(x)
4557 return true
4558 }
4559
4560
4561 for {
4562 x := v_0
4563 if x.Op != OpMIPS64MOVHreg {
4564 break
4565 }
4566 v.reset(OpMIPS64MOVVreg)
4567 v.AddArg(x)
4568 return true
4569 }
4570
4571
4572 for {
4573 x := v_0
4574 if x.Op != OpMIPS64MOVWreg {
4575 break
4576 }
4577 v.reset(OpMIPS64MOVVreg)
4578 v.AddArg(x)
4579 return true
4580 }
4581
4582
4583 for {
4584 if v_0.Op != OpMIPS64MOVVconst {
4585 break
4586 }
4587 c := auxIntToInt64(v_0.AuxInt)
4588 v.reset(OpMIPS64MOVVconst)
4589 v.AuxInt = int64ToAuxInt(int64(int32(c)))
4590 return true
4591 }
4592 return false
4593 }
4594 func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
4595 v_2 := v.Args[2]
4596 v_1 := v.Args[1]
4597 v_0 := v.Args[0]
4598 b := v.Block
4599 config := b.Func.Config
4600
4601
4602 for {
4603 off := auxIntToInt32(v.AuxInt)
4604 sym := auxToSym(v.Aux)
4605 ptr := v_0
4606 if v_1.Op != OpMIPS64MOVWfpgp {
4607 break
4608 }
4609 val := v_1.Args[0]
4610 mem := v_2
4611 v.reset(OpMIPS64MOVFstore)
4612 v.AuxInt = int32ToAuxInt(off)
4613 v.Aux = symToAux(sym)
4614 v.AddArg3(ptr, val, mem)
4615 return true
4616 }
4617
4618
4619
4620 for {
4621 off1 := auxIntToInt32(v.AuxInt)
4622 sym := auxToSym(v.Aux)
4623 if v_0.Op != OpMIPS64ADDVconst {
4624 break
4625 }
4626 off2 := auxIntToInt64(v_0.AuxInt)
4627 ptr := v_0.Args[0]
4628 val := v_1
4629 mem := v_2
4630 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4631 break
4632 }
4633 v.reset(OpMIPS64MOVWstore)
4634 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4635 v.Aux = symToAux(sym)
4636 v.AddArg3(ptr, val, mem)
4637 return true
4638 }
4639
4640
4641
4642 for {
4643 off1 := auxIntToInt32(v.AuxInt)
4644 sym1 := auxToSym(v.Aux)
4645 if v_0.Op != OpMIPS64MOVVaddr {
4646 break
4647 }
4648 off2 := auxIntToInt32(v_0.AuxInt)
4649 sym2 := auxToSym(v_0.Aux)
4650 ptr := v_0.Args[0]
4651 val := v_1
4652 mem := v_2
4653 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4654 break
4655 }
4656 v.reset(OpMIPS64MOVWstore)
4657 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4658 v.Aux = symToAux(mergeSym(sym1, sym2))
4659 v.AddArg3(ptr, val, mem)
4660 return true
4661 }
4662
4663
4664 for {
4665 off := auxIntToInt32(v.AuxInt)
4666 sym := auxToSym(v.Aux)
4667 ptr := v_0
4668 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4669 break
4670 }
4671 mem := v_2
4672 v.reset(OpMIPS64MOVWstorezero)
4673 v.AuxInt = int32ToAuxInt(off)
4674 v.Aux = symToAux(sym)
4675 v.AddArg2(ptr, mem)
4676 return true
4677 }
4678
4679
4680 for {
4681 off := auxIntToInt32(v.AuxInt)
4682 sym := auxToSym(v.Aux)
4683 ptr := v_0
4684 if v_1.Op != OpMIPS64MOVWreg {
4685 break
4686 }
4687 x := v_1.Args[0]
4688 mem := v_2
4689 v.reset(OpMIPS64MOVWstore)
4690 v.AuxInt = int32ToAuxInt(off)
4691 v.Aux = symToAux(sym)
4692 v.AddArg3(ptr, x, mem)
4693 return true
4694 }
4695
4696
4697 for {
4698 off := auxIntToInt32(v.AuxInt)
4699 sym := auxToSym(v.Aux)
4700 ptr := v_0
4701 if v_1.Op != OpMIPS64MOVWUreg {
4702 break
4703 }
4704 x := v_1.Args[0]
4705 mem := v_2
4706 v.reset(OpMIPS64MOVWstore)
4707 v.AuxInt = int32ToAuxInt(off)
4708 v.Aux = symToAux(sym)
4709 v.AddArg3(ptr, x, mem)
4710 return true
4711 }
4712 return false
4713 }
4714 func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
4715 v_1 := v.Args[1]
4716 v_0 := v.Args[0]
4717 b := v.Block
4718 config := b.Func.Config
4719
4720
4721
4722 for {
4723 off1 := auxIntToInt32(v.AuxInt)
4724 sym := auxToSym(v.Aux)
4725 if v_0.Op != OpMIPS64ADDVconst {
4726 break
4727 }
4728 off2 := auxIntToInt64(v_0.AuxInt)
4729 ptr := v_0.Args[0]
4730 mem := v_1
4731 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4732 break
4733 }
4734 v.reset(OpMIPS64MOVWstorezero)
4735 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4736 v.Aux = symToAux(sym)
4737 v.AddArg2(ptr, mem)
4738 return true
4739 }
4740
4741
4742
4743 for {
4744 off1 := auxIntToInt32(v.AuxInt)
4745 sym1 := auxToSym(v.Aux)
4746 if v_0.Op != OpMIPS64MOVVaddr {
4747 break
4748 }
4749 off2 := auxIntToInt32(v_0.AuxInt)
4750 sym2 := auxToSym(v_0.Aux)
4751 ptr := v_0.Args[0]
4752 mem := v_1
4753 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4754 break
4755 }
4756 v.reset(OpMIPS64MOVWstorezero)
4757 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4758 v.Aux = symToAux(mergeSym(sym1, sym2))
4759 v.AddArg2(ptr, mem)
4760 return true
4761 }
4762 return false
4763 }
4764 func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
4765 v_0 := v.Args[0]
4766
4767
4768 for {
4769 if v_0.Op != OpMIPS64SUBV {
4770 break
4771 }
4772 y := v_0.Args[1]
4773 x := v_0.Args[0]
4774 v.reset(OpMIPS64SUBV)
4775 v.AddArg2(y, x)
4776 return true
4777 }
4778
4779
4780 for {
4781 if v_0.Op != OpMIPS64NEGV {
4782 break
4783 }
4784 x := v_0.Args[0]
4785 v.copyOf(x)
4786 return true
4787 }
4788
4789
4790 for {
4791 if v_0.Op != OpMIPS64MOVVconst {
4792 break
4793 }
4794 c := auxIntToInt64(v_0.AuxInt)
4795 v.reset(OpMIPS64MOVVconst)
4796 v.AuxInt = int64ToAuxInt(-c)
4797 return true
4798 }
4799 return false
4800 }
4801 func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
4802 v_1 := v.Args[1]
4803 v_0 := v.Args[0]
4804
4805
4806
4807 for {
4808 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4809 x := v_0
4810 if v_1.Op != OpMIPS64MOVVconst {
4811 continue
4812 }
4813 c := auxIntToInt64(v_1.AuxInt)
4814 if !(is32Bit(c)) {
4815 continue
4816 }
4817 v.reset(OpMIPS64NORconst)
4818 v.AuxInt = int64ToAuxInt(c)
4819 v.AddArg(x)
4820 return true
4821 }
4822 break
4823 }
4824 return false
4825 }
4826 func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
4827 v_0 := v.Args[0]
4828
4829
4830 for {
4831 c := auxIntToInt64(v.AuxInt)
4832 if v_0.Op != OpMIPS64MOVVconst {
4833 break
4834 }
4835 d := auxIntToInt64(v_0.AuxInt)
4836 v.reset(OpMIPS64MOVVconst)
4837 v.AuxInt = int64ToAuxInt(^(c | d))
4838 return true
4839 }
4840 return false
4841 }
4842 func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
4843 v_1 := v.Args[1]
4844 v_0 := v.Args[0]
4845
4846
4847
4848 for {
4849 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4850 x := v_0
4851 if v_1.Op != OpMIPS64MOVVconst {
4852 continue
4853 }
4854 c := auxIntToInt64(v_1.AuxInt)
4855 if !(is32Bit(c)) {
4856 continue
4857 }
4858 v.reset(OpMIPS64ORconst)
4859 v.AuxInt = int64ToAuxInt(c)
4860 v.AddArg(x)
4861 return true
4862 }
4863 break
4864 }
4865
4866
4867 for {
4868 x := v_0
4869 if x != v_1 {
4870 break
4871 }
4872 v.copyOf(x)
4873 return true
4874 }
4875 return false
4876 }
4877 func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
4878 v_0 := v.Args[0]
4879
4880
4881 for {
4882 if auxIntToInt64(v.AuxInt) != 0 {
4883 break
4884 }
4885 x := v_0
4886 v.copyOf(x)
4887 return true
4888 }
4889
4890
4891 for {
4892 if auxIntToInt64(v.AuxInt) != -1 {
4893 break
4894 }
4895 v.reset(OpMIPS64MOVVconst)
4896 v.AuxInt = int64ToAuxInt(-1)
4897 return true
4898 }
4899
4900
4901 for {
4902 c := auxIntToInt64(v.AuxInt)
4903 if v_0.Op != OpMIPS64MOVVconst {
4904 break
4905 }
4906 d := auxIntToInt64(v_0.AuxInt)
4907 v.reset(OpMIPS64MOVVconst)
4908 v.AuxInt = int64ToAuxInt(c | d)
4909 return true
4910 }
4911
4912
4913
4914 for {
4915 c := auxIntToInt64(v.AuxInt)
4916 if v_0.Op != OpMIPS64ORconst {
4917 break
4918 }
4919 d := auxIntToInt64(v_0.AuxInt)
4920 x := v_0.Args[0]
4921 if !(is32Bit(c | d)) {
4922 break
4923 }
4924 v.reset(OpMIPS64ORconst)
4925 v.AuxInt = int64ToAuxInt(c | d)
4926 v.AddArg(x)
4927 return true
4928 }
4929 return false
4930 }
4931 func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
4932 v_1 := v.Args[1]
4933 v_0 := v.Args[0]
4934
4935
4936
4937 for {
4938 if v_0.Op != OpMIPS64MOVVconst {
4939 break
4940 }
4941 c := auxIntToInt64(v_0.AuxInt)
4942 x := v_1
4943 if !(is32Bit(c)) {
4944 break
4945 }
4946 v.reset(OpMIPS64SGTconst)
4947 v.AuxInt = int64ToAuxInt(c)
4948 v.AddArg(x)
4949 return true
4950 }
4951
4952
4953 for {
4954 x := v_0
4955 if x != v_1 {
4956 break
4957 }
4958 v.reset(OpMIPS64MOVVconst)
4959 v.AuxInt = int64ToAuxInt(0)
4960 return true
4961 }
4962 return false
4963 }
4964 func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
4965 v_1 := v.Args[1]
4966 v_0 := v.Args[0]
4967
4968
4969
4970 for {
4971 if v_0.Op != OpMIPS64MOVVconst {
4972 break
4973 }
4974 c := auxIntToInt64(v_0.AuxInt)
4975 x := v_1
4976 if !(is32Bit(c)) {
4977 break
4978 }
4979 v.reset(OpMIPS64SGTUconst)
4980 v.AuxInt = int64ToAuxInt(c)
4981 v.AddArg(x)
4982 return true
4983 }
4984
4985
4986 for {
4987 x := v_0
4988 if x != v_1 {
4989 break
4990 }
4991 v.reset(OpMIPS64MOVVconst)
4992 v.AuxInt = int64ToAuxInt(0)
4993 return true
4994 }
4995 return false
4996 }
4997 func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
4998 v_0 := v.Args[0]
4999
5000
5001
5002 for {
5003 c := auxIntToInt64(v.AuxInt)
5004 if v_0.Op != OpMIPS64MOVVconst {
5005 break
5006 }
5007 d := auxIntToInt64(v_0.AuxInt)
5008 if !(uint64(c) > uint64(d)) {
5009 break
5010 }
5011 v.reset(OpMIPS64MOVVconst)
5012 v.AuxInt = int64ToAuxInt(1)
5013 return true
5014 }
5015
5016
5017
5018 for {
5019 c := auxIntToInt64(v.AuxInt)
5020 if v_0.Op != OpMIPS64MOVVconst {
5021 break
5022 }
5023 d := auxIntToInt64(v_0.AuxInt)
5024 if !(uint64(c) <= uint64(d)) {
5025 break
5026 }
5027 v.reset(OpMIPS64MOVVconst)
5028 v.AuxInt = int64ToAuxInt(0)
5029 return true
5030 }
5031
5032
5033
5034 for {
5035 c := auxIntToInt64(v.AuxInt)
5036 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
5037 break
5038 }
5039 v.reset(OpMIPS64MOVVconst)
5040 v.AuxInt = int64ToAuxInt(1)
5041 return true
5042 }
5043
5044
5045
5046 for {
5047 c := auxIntToInt64(v.AuxInt)
5048 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
5049 break
5050 }
5051 v.reset(OpMIPS64MOVVconst)
5052 v.AuxInt = int64ToAuxInt(1)
5053 return true
5054 }
5055
5056
5057
5058 for {
5059 c := auxIntToInt64(v.AuxInt)
5060 if v_0.Op != OpMIPS64ANDconst {
5061 break
5062 }
5063 m := auxIntToInt64(v_0.AuxInt)
5064 if !(uint64(m) < uint64(c)) {
5065 break
5066 }
5067 v.reset(OpMIPS64MOVVconst)
5068 v.AuxInt = int64ToAuxInt(1)
5069 return true
5070 }
5071
5072
5073
5074 for {
5075 c := auxIntToInt64(v.AuxInt)
5076 if v_0.Op != OpMIPS64SRLVconst {
5077 break
5078 }
5079 d := auxIntToInt64(v_0.AuxInt)
5080 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
5081 break
5082 }
5083 v.reset(OpMIPS64MOVVconst)
5084 v.AuxInt = int64ToAuxInt(1)
5085 return true
5086 }
5087 return false
5088 }
5089 func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
5090 v_0 := v.Args[0]
5091
5092
5093
5094 for {
5095 c := auxIntToInt64(v.AuxInt)
5096 if v_0.Op != OpMIPS64MOVVconst {
5097 break
5098 }
5099 d := auxIntToInt64(v_0.AuxInt)
5100 if !(c > d) {
5101 break
5102 }
5103 v.reset(OpMIPS64MOVVconst)
5104 v.AuxInt = int64ToAuxInt(1)
5105 return true
5106 }
5107
5108
5109
5110 for {
5111 c := auxIntToInt64(v.AuxInt)
5112 if v_0.Op != OpMIPS64MOVVconst {
5113 break
5114 }
5115 d := auxIntToInt64(v_0.AuxInt)
5116 if !(c <= d) {
5117 break
5118 }
5119 v.reset(OpMIPS64MOVVconst)
5120 v.AuxInt = int64ToAuxInt(0)
5121 return true
5122 }
5123
5124
5125
5126 for {
5127 c := auxIntToInt64(v.AuxInt)
5128 if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
5129 break
5130 }
5131 v.reset(OpMIPS64MOVVconst)
5132 v.AuxInt = int64ToAuxInt(1)
5133 return true
5134 }
5135
5136
5137
5138 for {
5139 c := auxIntToInt64(v.AuxInt)
5140 if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
5141 break
5142 }
5143 v.reset(OpMIPS64MOVVconst)
5144 v.AuxInt = int64ToAuxInt(0)
5145 return true
5146 }
5147
5148
5149
5150 for {
5151 c := auxIntToInt64(v.AuxInt)
5152 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
5153 break
5154 }
5155 v.reset(OpMIPS64MOVVconst)
5156 v.AuxInt = int64ToAuxInt(1)
5157 return true
5158 }
5159
5160
5161
5162 for {
5163 c := auxIntToInt64(v.AuxInt)
5164 if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
5165 break
5166 }
5167 v.reset(OpMIPS64MOVVconst)
5168 v.AuxInt = int64ToAuxInt(0)
5169 return true
5170 }
5171
5172
5173
5174 for {
5175 c := auxIntToInt64(v.AuxInt)
5176 if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
5177 break
5178 }
5179 v.reset(OpMIPS64MOVVconst)
5180 v.AuxInt = int64ToAuxInt(1)
5181 return true
5182 }
5183
5184
5185
5186 for {
5187 c := auxIntToInt64(v.AuxInt)
5188 if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
5189 break
5190 }
5191 v.reset(OpMIPS64MOVVconst)
5192 v.AuxInt = int64ToAuxInt(0)
5193 return true
5194 }
5195
5196
5197
5198 for {
5199 c := auxIntToInt64(v.AuxInt)
5200 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
5201 break
5202 }
5203 v.reset(OpMIPS64MOVVconst)
5204 v.AuxInt = int64ToAuxInt(1)
5205 return true
5206 }
5207
5208
5209
5210 for {
5211 c := auxIntToInt64(v.AuxInt)
5212 if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
5213 break
5214 }
5215 v.reset(OpMIPS64MOVVconst)
5216 v.AuxInt = int64ToAuxInt(0)
5217 return true
5218 }
5219
5220
5221
5222 for {
5223 c := auxIntToInt64(v.AuxInt)
5224 if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
5225 break
5226 }
5227 v.reset(OpMIPS64MOVVconst)
5228 v.AuxInt = int64ToAuxInt(0)
5229 return true
5230 }
5231
5232
5233
5234 for {
5235 c := auxIntToInt64(v.AuxInt)
5236 if v_0.Op != OpMIPS64ANDconst {
5237 break
5238 }
5239 m := auxIntToInt64(v_0.AuxInt)
5240 if !(0 <= m && m < c) {
5241 break
5242 }
5243 v.reset(OpMIPS64MOVVconst)
5244 v.AuxInt = int64ToAuxInt(1)
5245 return true
5246 }
5247
5248
5249
5250 for {
5251 c := auxIntToInt64(v.AuxInt)
5252 if v_0.Op != OpMIPS64SRLVconst {
5253 break
5254 }
5255 d := auxIntToInt64(v_0.AuxInt)
5256 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
5257 break
5258 }
5259 v.reset(OpMIPS64MOVVconst)
5260 v.AuxInt = int64ToAuxInt(1)
5261 return true
5262 }
5263 return false
5264 }
5265 func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
5266 v_1 := v.Args[1]
5267 v_0 := v.Args[0]
5268
5269
5270
5271 for {
5272 if v_1.Op != OpMIPS64MOVVconst {
5273 break
5274 }
5275 c := auxIntToInt64(v_1.AuxInt)
5276 if !(uint64(c) >= 64) {
5277 break
5278 }
5279 v.reset(OpMIPS64MOVVconst)
5280 v.AuxInt = int64ToAuxInt(0)
5281 return true
5282 }
5283
5284
5285 for {
5286 x := v_0
5287 if v_1.Op != OpMIPS64MOVVconst {
5288 break
5289 }
5290 c := auxIntToInt64(v_1.AuxInt)
5291 v.reset(OpMIPS64SLLVconst)
5292 v.AuxInt = int64ToAuxInt(c)
5293 v.AddArg(x)
5294 return true
5295 }
5296 return false
5297 }
5298 func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
5299 v_0 := v.Args[0]
5300
5301
5302 for {
5303 c := auxIntToInt64(v.AuxInt)
5304 if v_0.Op != OpMIPS64MOVVconst {
5305 break
5306 }
5307 d := auxIntToInt64(v_0.AuxInt)
5308 v.reset(OpMIPS64MOVVconst)
5309 v.AuxInt = int64ToAuxInt(d << uint64(c))
5310 return true
5311 }
5312 return false
5313 }
5314 func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
5315 v_1 := v.Args[1]
5316 v_0 := v.Args[0]
5317
5318
5319
5320 for {
5321 x := v_0
5322 if v_1.Op != OpMIPS64MOVVconst {
5323 break
5324 }
5325 c := auxIntToInt64(v_1.AuxInt)
5326 if !(uint64(c) >= 64) {
5327 break
5328 }
5329 v.reset(OpMIPS64SRAVconst)
5330 v.AuxInt = int64ToAuxInt(63)
5331 v.AddArg(x)
5332 return true
5333 }
5334
5335
5336 for {
5337 x := v_0
5338 if v_1.Op != OpMIPS64MOVVconst {
5339 break
5340 }
5341 c := auxIntToInt64(v_1.AuxInt)
5342 v.reset(OpMIPS64SRAVconst)
5343 v.AuxInt = int64ToAuxInt(c)
5344 v.AddArg(x)
5345 return true
5346 }
5347 return false
5348 }
5349 func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
5350 v_0 := v.Args[0]
5351
5352
5353 for {
5354 c := auxIntToInt64(v.AuxInt)
5355 if v_0.Op != OpMIPS64MOVVconst {
5356 break
5357 }
5358 d := auxIntToInt64(v_0.AuxInt)
5359 v.reset(OpMIPS64MOVVconst)
5360 v.AuxInt = int64ToAuxInt(d >> uint64(c))
5361 return true
5362 }
5363 return false
5364 }
5365 func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
5366 v_1 := v.Args[1]
5367 v_0 := v.Args[0]
5368
5369
5370
5371 for {
5372 if v_1.Op != OpMIPS64MOVVconst {
5373 break
5374 }
5375 c := auxIntToInt64(v_1.AuxInt)
5376 if !(uint64(c) >= 64) {
5377 break
5378 }
5379 v.reset(OpMIPS64MOVVconst)
5380 v.AuxInt = int64ToAuxInt(0)
5381 return true
5382 }
5383
5384
5385 for {
5386 x := v_0
5387 if v_1.Op != OpMIPS64MOVVconst {
5388 break
5389 }
5390 c := auxIntToInt64(v_1.AuxInt)
5391 v.reset(OpMIPS64SRLVconst)
5392 v.AuxInt = int64ToAuxInt(c)
5393 v.AddArg(x)
5394 return true
5395 }
5396 return false
5397 }
5398 func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
5399 v_0 := v.Args[0]
5400
5401
5402 for {
5403 c := auxIntToInt64(v.AuxInt)
5404 if v_0.Op != OpMIPS64MOVVconst {
5405 break
5406 }
5407 d := auxIntToInt64(v_0.AuxInt)
5408 v.reset(OpMIPS64MOVVconst)
5409 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
5410 return true
5411 }
5412 return false
5413 }
5414 func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
5415 v_1 := v.Args[1]
5416 v_0 := v.Args[0]
5417
5418
5419
5420 for {
5421 x := v_0
5422 if v_1.Op != OpMIPS64MOVVconst {
5423 break
5424 }
5425 c := auxIntToInt64(v_1.AuxInt)
5426 if !(is32Bit(c)) {
5427 break
5428 }
5429 v.reset(OpMIPS64SUBVconst)
5430 v.AuxInt = int64ToAuxInt(c)
5431 v.AddArg(x)
5432 return true
5433 }
5434
5435
5436 for {
5437 x := v_0
5438 if v_1.Op != OpMIPS64NEGV {
5439 break
5440 }
5441 y := v_1.Args[0]
5442 v.reset(OpMIPS64ADDV)
5443 v.AddArg2(x, y)
5444 return true
5445 }
5446
5447
5448 for {
5449 x := v_0
5450 if x != v_1 {
5451 break
5452 }
5453 v.reset(OpMIPS64MOVVconst)
5454 v.AuxInt = int64ToAuxInt(0)
5455 return true
5456 }
5457
5458
5459 for {
5460 if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
5461 break
5462 }
5463 x := v_1
5464 v.reset(OpMIPS64NEGV)
5465 v.AddArg(x)
5466 return true
5467 }
5468 return false
5469 }
5470 func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
5471 v_0 := v.Args[0]
5472
5473
5474 for {
5475 if auxIntToInt64(v.AuxInt) != 0 {
5476 break
5477 }
5478 x := v_0
5479 v.copyOf(x)
5480 return true
5481 }
5482
5483
5484 for {
5485 c := auxIntToInt64(v.AuxInt)
5486 if v_0.Op != OpMIPS64MOVVconst {
5487 break
5488 }
5489 d := auxIntToInt64(v_0.AuxInt)
5490 v.reset(OpMIPS64MOVVconst)
5491 v.AuxInt = int64ToAuxInt(d - c)
5492 return true
5493 }
5494
5495
5496
5497 for {
5498 c := auxIntToInt64(v.AuxInt)
5499 if v_0.Op != OpMIPS64SUBVconst {
5500 break
5501 }
5502 d := auxIntToInt64(v_0.AuxInt)
5503 x := v_0.Args[0]
5504 if !(is32Bit(-c - d)) {
5505 break
5506 }
5507 v.reset(OpMIPS64ADDVconst)
5508 v.AuxInt = int64ToAuxInt(-c - d)
5509 v.AddArg(x)
5510 return true
5511 }
5512
5513
5514
5515 for {
5516 c := auxIntToInt64(v.AuxInt)
5517 if v_0.Op != OpMIPS64ADDVconst {
5518 break
5519 }
5520 d := auxIntToInt64(v_0.AuxInt)
5521 x := v_0.Args[0]
5522 if !(is32Bit(-c + d)) {
5523 break
5524 }
5525 v.reset(OpMIPS64ADDVconst)
5526 v.AuxInt = int64ToAuxInt(-c + d)
5527 v.AddArg(x)
5528 return true
5529 }
5530 return false
5531 }
5532 func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
5533 v_1 := v.Args[1]
5534 v_0 := v.Args[0]
5535
5536
5537
5538 for {
5539 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5540 x := v_0
5541 if v_1.Op != OpMIPS64MOVVconst {
5542 continue
5543 }
5544 c := auxIntToInt64(v_1.AuxInt)
5545 if !(is32Bit(c)) {
5546 continue
5547 }
5548 v.reset(OpMIPS64XORconst)
5549 v.AuxInt = int64ToAuxInt(c)
5550 v.AddArg(x)
5551 return true
5552 }
5553 break
5554 }
5555
5556
5557 for {
5558 x := v_0
5559 if x != v_1 {
5560 break
5561 }
5562 v.reset(OpMIPS64MOVVconst)
5563 v.AuxInt = int64ToAuxInt(0)
5564 return true
5565 }
5566 return false
5567 }
5568 func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
5569 v_0 := v.Args[0]
5570
5571
5572 for {
5573 if auxIntToInt64(v.AuxInt) != 0 {
5574 break
5575 }
5576 x := v_0
5577 v.copyOf(x)
5578 return true
5579 }
5580
5581
5582 for {
5583 if auxIntToInt64(v.AuxInt) != -1 {
5584 break
5585 }
5586 x := v_0
5587 v.reset(OpMIPS64NORconst)
5588 v.AuxInt = int64ToAuxInt(0)
5589 v.AddArg(x)
5590 return true
5591 }
5592
5593
5594 for {
5595 c := auxIntToInt64(v.AuxInt)
5596 if v_0.Op != OpMIPS64MOVVconst {
5597 break
5598 }
5599 d := auxIntToInt64(v_0.AuxInt)
5600 v.reset(OpMIPS64MOVVconst)
5601 v.AuxInt = int64ToAuxInt(c ^ d)
5602 return true
5603 }
5604
5605
5606
5607 for {
5608 c := auxIntToInt64(v.AuxInt)
5609 if v_0.Op != OpMIPS64XORconst {
5610 break
5611 }
5612 d := auxIntToInt64(v_0.AuxInt)
5613 x := v_0.Args[0]
5614 if !(is32Bit(c ^ d)) {
5615 break
5616 }
5617 v.reset(OpMIPS64XORconst)
5618 v.AuxInt = int64ToAuxInt(c ^ d)
5619 v.AddArg(x)
5620 return true
5621 }
5622 return false
5623 }
5624 func rewriteValueMIPS64_OpMod16(v *Value) bool {
5625 v_1 := v.Args[1]
5626 v_0 := v.Args[0]
5627 b := v.Block
5628 typ := &b.Func.Config.Types
5629
5630
5631 for {
5632 x := v_0
5633 y := v_1
5634 v.reset(OpSelect0)
5635 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5636 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5637 v1.AddArg(x)
5638 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5639 v2.AddArg(y)
5640 v0.AddArg2(v1, v2)
5641 v.AddArg(v0)
5642 return true
5643 }
5644 }
5645 func rewriteValueMIPS64_OpMod16u(v *Value) bool {
5646 v_1 := v.Args[1]
5647 v_0 := v.Args[0]
5648 b := v.Block
5649 typ := &b.Func.Config.Types
5650
5651
5652 for {
5653 x := v_0
5654 y := v_1
5655 v.reset(OpSelect0)
5656 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5657 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5658 v1.AddArg(x)
5659 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5660 v2.AddArg(y)
5661 v0.AddArg2(v1, v2)
5662 v.AddArg(v0)
5663 return true
5664 }
5665 }
5666 func rewriteValueMIPS64_OpMod32(v *Value) bool {
5667 v_1 := v.Args[1]
5668 v_0 := v.Args[0]
5669 b := v.Block
5670 typ := &b.Func.Config.Types
5671
5672
5673 for {
5674 x := v_0
5675 y := v_1
5676 v.reset(OpSelect0)
5677 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5678 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5679 v1.AddArg(x)
5680 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5681 v2.AddArg(y)
5682 v0.AddArg2(v1, v2)
5683 v.AddArg(v0)
5684 return true
5685 }
5686 }
5687 func rewriteValueMIPS64_OpMod32u(v *Value) bool {
5688 v_1 := v.Args[1]
5689 v_0 := v.Args[0]
5690 b := v.Block
5691 typ := &b.Func.Config.Types
5692
5693
5694 for {
5695 x := v_0
5696 y := v_1
5697 v.reset(OpSelect0)
5698 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5699 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5700 v1.AddArg(x)
5701 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5702 v2.AddArg(y)
5703 v0.AddArg2(v1, v2)
5704 v.AddArg(v0)
5705 return true
5706 }
5707 }
5708 func rewriteValueMIPS64_OpMod64(v *Value) bool {
5709 v_1 := v.Args[1]
5710 v_0 := v.Args[0]
5711 b := v.Block
5712 typ := &b.Func.Config.Types
5713
5714
5715 for {
5716 x := v_0
5717 y := v_1
5718 v.reset(OpSelect0)
5719 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5720 v0.AddArg2(x, y)
5721 v.AddArg(v0)
5722 return true
5723 }
5724 }
5725 func rewriteValueMIPS64_OpMod64u(v *Value) bool {
5726 v_1 := v.Args[1]
5727 v_0 := v.Args[0]
5728 b := v.Block
5729 typ := &b.Func.Config.Types
5730
5731
5732 for {
5733 x := v_0
5734 y := v_1
5735 v.reset(OpSelect0)
5736 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5737 v0.AddArg2(x, y)
5738 v.AddArg(v0)
5739 return true
5740 }
5741 }
5742 func rewriteValueMIPS64_OpMod8(v *Value) bool {
5743 v_1 := v.Args[1]
5744 v_0 := v.Args[0]
5745 b := v.Block
5746 typ := &b.Func.Config.Types
5747
5748
5749 for {
5750 x := v_0
5751 y := v_1
5752 v.reset(OpSelect0)
5753 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5754 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5755 v1.AddArg(x)
5756 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5757 v2.AddArg(y)
5758 v0.AddArg2(v1, v2)
5759 v.AddArg(v0)
5760 return true
5761 }
5762 }
5763 func rewriteValueMIPS64_OpMod8u(v *Value) bool {
5764 v_1 := v.Args[1]
5765 v_0 := v.Args[0]
5766 b := v.Block
5767 typ := &b.Func.Config.Types
5768
5769
5770 for {
5771 x := v_0
5772 y := v_1
5773 v.reset(OpSelect0)
5774 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5775 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5776 v1.AddArg(x)
5777 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5778 v2.AddArg(y)
5779 v0.AddArg2(v1, v2)
5780 v.AddArg(v0)
5781 return true
5782 }
5783 }
5784 func rewriteValueMIPS64_OpMove(v *Value) bool {
5785 v_2 := v.Args[2]
5786 v_1 := v.Args[1]
5787 v_0 := v.Args[0]
5788 b := v.Block
5789 config := b.Func.Config
5790 typ := &b.Func.Config.Types
5791
5792
5793 for {
5794 if auxIntToInt64(v.AuxInt) != 0 {
5795 break
5796 }
5797 mem := v_2
5798 v.copyOf(mem)
5799 return true
5800 }
5801
5802
5803 for {
5804 if auxIntToInt64(v.AuxInt) != 1 {
5805 break
5806 }
5807 dst := v_0
5808 src := v_1
5809 mem := v_2
5810 v.reset(OpMIPS64MOVBstore)
5811 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5812 v0.AddArg2(src, mem)
5813 v.AddArg3(dst, v0, mem)
5814 return true
5815 }
5816
5817
5818
5819 for {
5820 if auxIntToInt64(v.AuxInt) != 2 {
5821 break
5822 }
5823 t := auxToType(v.Aux)
5824 dst := v_0
5825 src := v_1
5826 mem := v_2
5827 if !(t.Alignment()%2 == 0) {
5828 break
5829 }
5830 v.reset(OpMIPS64MOVHstore)
5831 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5832 v0.AddArg2(src, mem)
5833 v.AddArg3(dst, v0, mem)
5834 return true
5835 }
5836
5837
5838 for {
5839 if auxIntToInt64(v.AuxInt) != 2 {
5840 break
5841 }
5842 dst := v_0
5843 src := v_1
5844 mem := v_2
5845 v.reset(OpMIPS64MOVBstore)
5846 v.AuxInt = int32ToAuxInt(1)
5847 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5848 v0.AuxInt = int32ToAuxInt(1)
5849 v0.AddArg2(src, mem)
5850 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5851 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5852 v2.AddArg2(src, mem)
5853 v1.AddArg3(dst, v2, mem)
5854 v.AddArg3(dst, v0, v1)
5855 return true
5856 }
5857
5858
5859
5860 for {
5861 if auxIntToInt64(v.AuxInt) != 4 {
5862 break
5863 }
5864 t := auxToType(v.Aux)
5865 dst := v_0
5866 src := v_1
5867 mem := v_2
5868 if !(t.Alignment()%4 == 0) {
5869 break
5870 }
5871 v.reset(OpMIPS64MOVWstore)
5872 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5873 v0.AddArg2(src, mem)
5874 v.AddArg3(dst, v0, mem)
5875 return true
5876 }
5877
5878
5879
5880 for {
5881 if auxIntToInt64(v.AuxInt) != 4 {
5882 break
5883 }
5884 t := auxToType(v.Aux)
5885 dst := v_0
5886 src := v_1
5887 mem := v_2
5888 if !(t.Alignment()%2 == 0) {
5889 break
5890 }
5891 v.reset(OpMIPS64MOVHstore)
5892 v.AuxInt = int32ToAuxInt(2)
5893 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5894 v0.AuxInt = int32ToAuxInt(2)
5895 v0.AddArg2(src, mem)
5896 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5897 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5898 v2.AddArg2(src, mem)
5899 v1.AddArg3(dst, v2, mem)
5900 v.AddArg3(dst, v0, v1)
5901 return true
5902 }
5903
5904
5905 for {
5906 if auxIntToInt64(v.AuxInt) != 4 {
5907 break
5908 }
5909 dst := v_0
5910 src := v_1
5911 mem := v_2
5912 v.reset(OpMIPS64MOVBstore)
5913 v.AuxInt = int32ToAuxInt(3)
5914 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5915 v0.AuxInt = int32ToAuxInt(3)
5916 v0.AddArg2(src, mem)
5917 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5918 v1.AuxInt = int32ToAuxInt(2)
5919 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5920 v2.AuxInt = int32ToAuxInt(2)
5921 v2.AddArg2(src, mem)
5922 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5923 v3.AuxInt = int32ToAuxInt(1)
5924 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5925 v4.AuxInt = int32ToAuxInt(1)
5926 v4.AddArg2(src, mem)
5927 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5928 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5929 v6.AddArg2(src, mem)
5930 v5.AddArg3(dst, v6, mem)
5931 v3.AddArg3(dst, v4, v5)
5932 v1.AddArg3(dst, v2, v3)
5933 v.AddArg3(dst, v0, v1)
5934 return true
5935 }
5936
5937
5938
5939 for {
5940 if auxIntToInt64(v.AuxInt) != 8 {
5941 break
5942 }
5943 t := auxToType(v.Aux)
5944 dst := v_0
5945 src := v_1
5946 mem := v_2
5947 if !(t.Alignment()%8 == 0) {
5948 break
5949 }
5950 v.reset(OpMIPS64MOVVstore)
5951 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5952 v0.AddArg2(src, mem)
5953 v.AddArg3(dst, v0, mem)
5954 return true
5955 }
5956
5957
5958
5959 for {
5960 if auxIntToInt64(v.AuxInt) != 8 {
5961 break
5962 }
5963 t := auxToType(v.Aux)
5964 dst := v_0
5965 src := v_1
5966 mem := v_2
5967 if !(t.Alignment()%4 == 0) {
5968 break
5969 }
5970 v.reset(OpMIPS64MOVWstore)
5971 v.AuxInt = int32ToAuxInt(4)
5972 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5973 v0.AuxInt = int32ToAuxInt(4)
5974 v0.AddArg2(src, mem)
5975 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5976 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5977 v2.AddArg2(src, mem)
5978 v1.AddArg3(dst, v2, mem)
5979 v.AddArg3(dst, v0, v1)
5980 return true
5981 }
5982
5983
5984
5985 for {
5986 if auxIntToInt64(v.AuxInt) != 8 {
5987 break
5988 }
5989 t := auxToType(v.Aux)
5990 dst := v_0
5991 src := v_1
5992 mem := v_2
5993 if !(t.Alignment()%2 == 0) {
5994 break
5995 }
5996 v.reset(OpMIPS64MOVHstore)
5997 v.AuxInt = int32ToAuxInt(6)
5998 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5999 v0.AuxInt = int32ToAuxInt(6)
6000 v0.AddArg2(src, mem)
6001 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
6002 v1.AuxInt = int32ToAuxInt(4)
6003 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
6004 v2.AuxInt = int32ToAuxInt(4)
6005 v2.AddArg2(src, mem)
6006 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
6007 v3.AuxInt = int32ToAuxInt(2)
6008 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
6009 v4.AuxInt = int32ToAuxInt(2)
6010 v4.AddArg2(src, mem)
6011 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
6012 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
6013 v6.AddArg2(src, mem)
6014 v5.AddArg3(dst, v6, mem)
6015 v3.AddArg3(dst, v4, v5)
6016 v1.AddArg3(dst, v2, v3)
6017 v.AddArg3(dst, v0, v1)
6018 return true
6019 }
6020
6021
6022 for {
6023 if auxIntToInt64(v.AuxInt) != 3 {
6024 break
6025 }
6026 dst := v_0
6027 src := v_1
6028 mem := v_2
6029 v.reset(OpMIPS64MOVBstore)
6030 v.AuxInt = int32ToAuxInt(2)
6031 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
6032 v0.AuxInt = int32ToAuxInt(2)
6033 v0.AddArg2(src, mem)
6034 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
6035 v1.AuxInt = int32ToAuxInt(1)
6036 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
6037 v2.AuxInt = int32ToAuxInt(1)
6038 v2.AddArg2(src, mem)
6039 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
6040 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
6041 v4.AddArg2(src, mem)
6042 v3.AddArg3(dst, v4, mem)
6043 v1.AddArg3(dst, v2, v3)
6044 v.AddArg3(dst, v0, v1)
6045 return true
6046 }
6047
6048
6049
6050 for {
6051 if auxIntToInt64(v.AuxInt) != 6 {
6052 break
6053 }
6054 t := auxToType(v.Aux)
6055 dst := v_0
6056 src := v_1
6057 mem := v_2
6058 if !(t.Alignment()%2 == 0) {
6059 break
6060 }
6061 v.reset(OpMIPS64MOVHstore)
6062 v.AuxInt = int32ToAuxInt(4)
6063 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
6064 v0.AuxInt = int32ToAuxInt(4)
6065 v0.AddArg2(src, mem)
6066 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
6067 v1.AuxInt = int32ToAuxInt(2)
6068 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
6069 v2.AuxInt = int32ToAuxInt(2)
6070 v2.AddArg2(src, mem)
6071 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
6072 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
6073 v4.AddArg2(src, mem)
6074 v3.AddArg3(dst, v4, mem)
6075 v1.AddArg3(dst, v2, v3)
6076 v.AddArg3(dst, v0, v1)
6077 return true
6078 }
6079
6080
6081
6082 for {
6083 if auxIntToInt64(v.AuxInt) != 12 {
6084 break
6085 }
6086 t := auxToType(v.Aux)
6087 dst := v_0
6088 src := v_1
6089 mem := v_2
6090 if !(t.Alignment()%4 == 0) {
6091 break
6092 }
6093 v.reset(OpMIPS64MOVWstore)
6094 v.AuxInt = int32ToAuxInt(8)
6095 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
6096 v0.AuxInt = int32ToAuxInt(8)
6097 v0.AddArg2(src, mem)
6098 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
6099 v1.AuxInt = int32ToAuxInt(4)
6100 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
6101 v2.AuxInt = int32ToAuxInt(4)
6102 v2.AddArg2(src, mem)
6103 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
6104 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
6105 v4.AddArg2(src, mem)
6106 v3.AddArg3(dst, v4, mem)
6107 v1.AddArg3(dst, v2, v3)
6108 v.AddArg3(dst, v0, v1)
6109 return true
6110 }
6111
6112
6113
6114 for {
6115 if auxIntToInt64(v.AuxInt) != 16 {
6116 break
6117 }
6118 t := auxToType(v.Aux)
6119 dst := v_0
6120 src := v_1
6121 mem := v_2
6122 if !(t.Alignment()%8 == 0) {
6123 break
6124 }
6125 v.reset(OpMIPS64MOVVstore)
6126 v.AuxInt = int32ToAuxInt(8)
6127 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6128 v0.AuxInt = int32ToAuxInt(8)
6129 v0.AddArg2(src, mem)
6130 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6131 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6132 v2.AddArg2(src, mem)
6133 v1.AddArg3(dst, v2, mem)
6134 v.AddArg3(dst, v0, v1)
6135 return true
6136 }
6137
6138
6139
6140 for {
6141 if auxIntToInt64(v.AuxInt) != 24 {
6142 break
6143 }
6144 t := auxToType(v.Aux)
6145 dst := v_0
6146 src := v_1
6147 mem := v_2
6148 if !(t.Alignment()%8 == 0) {
6149 break
6150 }
6151 v.reset(OpMIPS64MOVVstore)
6152 v.AuxInt = int32ToAuxInt(16)
6153 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6154 v0.AuxInt = int32ToAuxInt(16)
6155 v0.AddArg2(src, mem)
6156 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6157 v1.AuxInt = int32ToAuxInt(8)
6158 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6159 v2.AuxInt = int32ToAuxInt(8)
6160 v2.AddArg2(src, mem)
6161 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
6162 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6163 v4.AddArg2(src, mem)
6164 v3.AddArg3(dst, v4, mem)
6165 v1.AddArg3(dst, v2, v3)
6166 v.AddArg3(dst, v0, v1)
6167 return true
6168 }
6169
6170
6171
6172 for {
6173 s := auxIntToInt64(v.AuxInt)
6174 t := auxToType(v.Aux)
6175 dst := v_0
6176 src := v_1
6177 mem := v_2
6178 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
6179 break
6180 }
6181 v.reset(OpMIPS64DUFFCOPY)
6182 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
6183 v.AddArg3(dst, src, mem)
6184 return true
6185 }
6186
6187
6188
6189 for {
6190 s := auxIntToInt64(v.AuxInt)
6191 t := auxToType(v.Aux)
6192 dst := v_0
6193 src := v_1
6194 mem := v_2
6195 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
6196 break
6197 }
6198 v.reset(OpMIPS64LoweredMove)
6199 v.AuxInt = int64ToAuxInt(t.Alignment())
6200 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
6201 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
6202 v0.AddArg(src)
6203 v.AddArg4(dst, src, v0, mem)
6204 return true
6205 }
6206 return false
6207 }
6208 func rewriteValueMIPS64_OpMul16(v *Value) bool {
6209 v_1 := v.Args[1]
6210 v_0 := v.Args[0]
6211 b := v.Block
6212 typ := &b.Func.Config.Types
6213
6214
6215 for {
6216 x := v_0
6217 y := v_1
6218 v.reset(OpSelect1)
6219 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6220 v0.AddArg2(x, y)
6221 v.AddArg(v0)
6222 return true
6223 }
6224 }
6225 func rewriteValueMIPS64_OpMul32(v *Value) bool {
6226 v_1 := v.Args[1]
6227 v_0 := v.Args[0]
6228 b := v.Block
6229 typ := &b.Func.Config.Types
6230
6231
6232 for {
6233 x := v_0
6234 y := v_1
6235 v.reset(OpSelect1)
6236 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6237 v0.AddArg2(x, y)
6238 v.AddArg(v0)
6239 return true
6240 }
6241 }
6242 func rewriteValueMIPS64_OpMul64(v *Value) bool {
6243 v_1 := v.Args[1]
6244 v_0 := v.Args[0]
6245 b := v.Block
6246 typ := &b.Func.Config.Types
6247
6248
6249 for {
6250 x := v_0
6251 y := v_1
6252 v.reset(OpSelect1)
6253 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6254 v0.AddArg2(x, y)
6255 v.AddArg(v0)
6256 return true
6257 }
6258 }
6259 func rewriteValueMIPS64_OpMul8(v *Value) bool {
6260 v_1 := v.Args[1]
6261 v_0 := v.Args[0]
6262 b := v.Block
6263 typ := &b.Func.Config.Types
6264
6265
6266 for {
6267 x := v_0
6268 y := v_1
6269 v.reset(OpSelect1)
6270 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6271 v0.AddArg2(x, y)
6272 v.AddArg(v0)
6273 return true
6274 }
6275 }
6276 func rewriteValueMIPS64_OpNeq16(v *Value) bool {
6277 v_1 := v.Args[1]
6278 v_0 := v.Args[0]
6279 b := v.Block
6280 typ := &b.Func.Config.Types
6281
6282
6283 for {
6284 x := v_0
6285 y := v_1
6286 v.reset(OpMIPS64SGTU)
6287 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6288 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6289 v1.AddArg(x)
6290 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6291 v2.AddArg(y)
6292 v0.AddArg2(v1, v2)
6293 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6294 v3.AuxInt = int64ToAuxInt(0)
6295 v.AddArg2(v0, v3)
6296 return true
6297 }
6298 }
6299 func rewriteValueMIPS64_OpNeq32(v *Value) bool {
6300 v_1 := v.Args[1]
6301 v_0 := v.Args[0]
6302 b := v.Block
6303 typ := &b.Func.Config.Types
6304
6305
6306 for {
6307 x := v_0
6308 y := v_1
6309 v.reset(OpMIPS64SGTU)
6310 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6311 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6312 v1.AddArg(x)
6313 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6314 v2.AddArg(y)
6315 v0.AddArg2(v1, v2)
6316 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6317 v3.AuxInt = int64ToAuxInt(0)
6318 v.AddArg2(v0, v3)
6319 return true
6320 }
6321 }
6322 func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
6323 v_1 := v.Args[1]
6324 v_0 := v.Args[0]
6325 b := v.Block
6326
6327
6328 for {
6329 x := v_0
6330 y := v_1
6331 v.reset(OpMIPS64FPFlagFalse)
6332 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
6333 v0.AddArg2(x, y)
6334 v.AddArg(v0)
6335 return true
6336 }
6337 }
6338 func rewriteValueMIPS64_OpNeq64(v *Value) bool {
6339 v_1 := v.Args[1]
6340 v_0 := v.Args[0]
6341 b := v.Block
6342 typ := &b.Func.Config.Types
6343
6344
6345 for {
6346 x := v_0
6347 y := v_1
6348 v.reset(OpMIPS64SGTU)
6349 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6350 v0.AddArg2(x, y)
6351 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6352 v1.AuxInt = int64ToAuxInt(0)
6353 v.AddArg2(v0, v1)
6354 return true
6355 }
6356 }
6357 func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
6358 v_1 := v.Args[1]
6359 v_0 := v.Args[0]
6360 b := v.Block
6361
6362
6363 for {
6364 x := v_0
6365 y := v_1
6366 v.reset(OpMIPS64FPFlagFalse)
6367 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
6368 v0.AddArg2(x, y)
6369 v.AddArg(v0)
6370 return true
6371 }
6372 }
6373 func rewriteValueMIPS64_OpNeq8(v *Value) bool {
6374 v_1 := v.Args[1]
6375 v_0 := v.Args[0]
6376 b := v.Block
6377 typ := &b.Func.Config.Types
6378
6379
6380 for {
6381 x := v_0
6382 y := v_1
6383 v.reset(OpMIPS64SGTU)
6384 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6385 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6386 v1.AddArg(x)
6387 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6388 v2.AddArg(y)
6389 v0.AddArg2(v1, v2)
6390 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6391 v3.AuxInt = int64ToAuxInt(0)
6392 v.AddArg2(v0, v3)
6393 return true
6394 }
6395 }
6396 func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
6397 v_1 := v.Args[1]
6398 v_0 := v.Args[0]
6399 b := v.Block
6400 typ := &b.Func.Config.Types
6401
6402
6403 for {
6404 x := v_0
6405 y := v_1
6406 v.reset(OpMIPS64SGTU)
6407 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6408 v0.AddArg2(x, y)
6409 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6410 v1.AuxInt = int64ToAuxInt(0)
6411 v.AddArg2(v0, v1)
6412 return true
6413 }
6414 }
6415 func rewriteValueMIPS64_OpNot(v *Value) bool {
6416 v_0 := v.Args[0]
6417
6418
6419 for {
6420 x := v_0
6421 v.reset(OpMIPS64XORconst)
6422 v.AuxInt = int64ToAuxInt(1)
6423 v.AddArg(x)
6424 return true
6425 }
6426 }
6427 func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
6428 v_0 := v.Args[0]
6429
6430
6431
6432 for {
6433 off := auxIntToInt64(v.AuxInt)
6434 ptr := v_0
6435 if ptr.Op != OpSP || !(is32Bit(off)) {
6436 break
6437 }
6438 v.reset(OpMIPS64MOVVaddr)
6439 v.AuxInt = int32ToAuxInt(int32(off))
6440 v.AddArg(ptr)
6441 return true
6442 }
6443
6444
6445 for {
6446 off := auxIntToInt64(v.AuxInt)
6447 ptr := v_0
6448 v.reset(OpMIPS64ADDVconst)
6449 v.AuxInt = int64ToAuxInt(off)
6450 v.AddArg(ptr)
6451 return true
6452 }
6453 }
6454 func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
6455 v_1 := v.Args[1]
6456 v_0 := v.Args[0]
6457 b := v.Block
6458 typ := &b.Func.Config.Types
6459
6460
6461 for {
6462 t := v.Type
6463 x := v_0
6464 if v_1.Op != OpMIPS64MOVVconst {
6465 break
6466 }
6467 c := auxIntToInt64(v_1.AuxInt)
6468 v.reset(OpOr16)
6469 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6470 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6471 v1.AuxInt = int64ToAuxInt(c & 15)
6472 v0.AddArg2(x, v1)
6473 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6474 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6475 v3.AuxInt = int64ToAuxInt(-c & 15)
6476 v2.AddArg2(x, v3)
6477 v.AddArg2(v0, v2)
6478 return true
6479 }
6480 return false
6481 }
6482 func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
6483 v_1 := v.Args[1]
6484 v_0 := v.Args[0]
6485 b := v.Block
6486 typ := &b.Func.Config.Types
6487
6488
6489 for {
6490 t := v.Type
6491 x := v_0
6492 if v_1.Op != OpMIPS64MOVVconst {
6493 break
6494 }
6495 c := auxIntToInt64(v_1.AuxInt)
6496 v.reset(OpOr32)
6497 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6498 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6499 v1.AuxInt = int64ToAuxInt(c & 31)
6500 v0.AddArg2(x, v1)
6501 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6502 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6503 v3.AuxInt = int64ToAuxInt(-c & 31)
6504 v2.AddArg2(x, v3)
6505 v.AddArg2(v0, v2)
6506 return true
6507 }
6508 return false
6509 }
6510 func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
6511 v_1 := v.Args[1]
6512 v_0 := v.Args[0]
6513 b := v.Block
6514 typ := &b.Func.Config.Types
6515
6516
6517 for {
6518 t := v.Type
6519 x := v_0
6520 if v_1.Op != OpMIPS64MOVVconst {
6521 break
6522 }
6523 c := auxIntToInt64(v_1.AuxInt)
6524 v.reset(OpOr64)
6525 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6526 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6527 v1.AuxInt = int64ToAuxInt(c & 63)
6528 v0.AddArg2(x, v1)
6529 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6530 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6531 v3.AuxInt = int64ToAuxInt(-c & 63)
6532 v2.AddArg2(x, v3)
6533 v.AddArg2(v0, v2)
6534 return true
6535 }
6536 return false
6537 }
6538 func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
6539 v_1 := v.Args[1]
6540 v_0 := v.Args[0]
6541 b := v.Block
6542 typ := &b.Func.Config.Types
6543
6544
6545 for {
6546 t := v.Type
6547 x := v_0
6548 if v_1.Op != OpMIPS64MOVVconst {
6549 break
6550 }
6551 c := auxIntToInt64(v_1.AuxInt)
6552 v.reset(OpOr8)
6553 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6554 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6555 v1.AuxInt = int64ToAuxInt(c & 7)
6556 v0.AddArg2(x, v1)
6557 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6558 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6559 v3.AuxInt = int64ToAuxInt(-c & 7)
6560 v2.AddArg2(x, v3)
6561 v.AddArg2(v0, v2)
6562 return true
6563 }
6564 return false
6565 }
6566 func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
6567 v_1 := v.Args[1]
6568 v_0 := v.Args[0]
6569 b := v.Block
6570 typ := &b.Func.Config.Types
6571
6572
6573 for {
6574 t := v.Type
6575 x := v_0
6576 y := v_1
6577 v.reset(OpMIPS64AND)
6578 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6579 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6580 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6581 v2.AuxInt = int64ToAuxInt(64)
6582 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6583 v3.AddArg(y)
6584 v1.AddArg2(v2, v3)
6585 v0.AddArg(v1)
6586 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6587 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6588 v5.AddArg(x)
6589 v4.AddArg2(v5, v3)
6590 v.AddArg2(v0, v4)
6591 return true
6592 }
6593 }
6594 func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
6595 v_1 := v.Args[1]
6596 v_0 := v.Args[0]
6597 b := v.Block
6598 typ := &b.Func.Config.Types
6599
6600
6601 for {
6602 t := v.Type
6603 x := v_0
6604 y := v_1
6605 v.reset(OpMIPS64AND)
6606 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6607 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6608 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6609 v2.AuxInt = int64ToAuxInt(64)
6610 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6611 v3.AddArg(y)
6612 v1.AddArg2(v2, v3)
6613 v0.AddArg(v1)
6614 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6615 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6616 v5.AddArg(x)
6617 v4.AddArg2(v5, v3)
6618 v.AddArg2(v0, v4)
6619 return true
6620 }
6621 }
6622 func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
6623 v_1 := v.Args[1]
6624 v_0 := v.Args[0]
6625 b := v.Block
6626 typ := &b.Func.Config.Types
6627
6628
6629 for {
6630 t := v.Type
6631 x := v_0
6632 y := v_1
6633 v.reset(OpMIPS64AND)
6634 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6635 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6636 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6637 v2.AuxInt = int64ToAuxInt(64)
6638 v1.AddArg2(v2, y)
6639 v0.AddArg(v1)
6640 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6641 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6642 v4.AddArg(x)
6643 v3.AddArg2(v4, y)
6644 v.AddArg2(v0, v3)
6645 return true
6646 }
6647 }
6648 func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
6649 v_1 := v.Args[1]
6650 v_0 := v.Args[0]
6651 b := v.Block
6652 typ := &b.Func.Config.Types
6653
6654
6655 for {
6656 t := v.Type
6657 x := v_0
6658 y := v_1
6659 v.reset(OpMIPS64AND)
6660 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6661 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6662 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6663 v2.AuxInt = int64ToAuxInt(64)
6664 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6665 v3.AddArg(y)
6666 v1.AddArg2(v2, v3)
6667 v0.AddArg(v1)
6668 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6669 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6670 v5.AddArg(x)
6671 v4.AddArg2(v5, v3)
6672 v.AddArg2(v0, v4)
6673 return true
6674 }
6675 }
6676 func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
6677 v_1 := v.Args[1]
6678 v_0 := v.Args[0]
6679 b := v.Block
6680 typ := &b.Func.Config.Types
6681
6682
6683 for {
6684 t := v.Type
6685 x := v_0
6686 y := v_1
6687 v.reset(OpMIPS64SRAV)
6688 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6689 v0.AddArg(x)
6690 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6691 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6692 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6693 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6694 v4.AddArg(y)
6695 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6696 v5.AuxInt = int64ToAuxInt(63)
6697 v3.AddArg2(v4, v5)
6698 v2.AddArg(v3)
6699 v1.AddArg2(v2, v4)
6700 v.AddArg2(v0, v1)
6701 return true
6702 }
6703 }
6704 func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
6705 v_1 := v.Args[1]
6706 v_0 := v.Args[0]
6707 b := v.Block
6708 typ := &b.Func.Config.Types
6709
6710
6711 for {
6712 t := v.Type
6713 x := v_0
6714 y := v_1
6715 v.reset(OpMIPS64SRAV)
6716 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6717 v0.AddArg(x)
6718 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6719 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6720 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6721 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6722 v4.AddArg(y)
6723 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6724 v5.AuxInt = int64ToAuxInt(63)
6725 v3.AddArg2(v4, v5)
6726 v2.AddArg(v3)
6727 v1.AddArg2(v2, v4)
6728 v.AddArg2(v0, v1)
6729 return true
6730 }
6731 }
6732 func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
6733 v_1 := v.Args[1]
6734 v_0 := v.Args[0]
6735 b := v.Block
6736 typ := &b.Func.Config.Types
6737
6738
6739 for {
6740 t := v.Type
6741 x := v_0
6742 y := v_1
6743 v.reset(OpMIPS64SRAV)
6744 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6745 v0.AddArg(x)
6746 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6747 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6748 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6749 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6750 v4.AuxInt = int64ToAuxInt(63)
6751 v3.AddArg2(y, v4)
6752 v2.AddArg(v3)
6753 v1.AddArg2(v2, y)
6754 v.AddArg2(v0, v1)
6755 return true
6756 }
6757 }
6758 func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
6759 v_1 := v.Args[1]
6760 v_0 := v.Args[0]
6761 b := v.Block
6762 typ := &b.Func.Config.Types
6763
6764
6765 for {
6766 t := v.Type
6767 x := v_0
6768 y := v_1
6769 v.reset(OpMIPS64SRAV)
6770 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6771 v0.AddArg(x)
6772 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6773 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6774 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6775 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6776 v4.AddArg(y)
6777 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6778 v5.AuxInt = int64ToAuxInt(63)
6779 v3.AddArg2(v4, v5)
6780 v2.AddArg(v3)
6781 v1.AddArg2(v2, v4)
6782 v.AddArg2(v0, v1)
6783 return true
6784 }
6785 }
6786 func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
6787 v_1 := v.Args[1]
6788 v_0 := v.Args[0]
6789 b := v.Block
6790 typ := &b.Func.Config.Types
6791
6792
6793 for {
6794 t := v.Type
6795 x := v_0
6796 y := v_1
6797 v.reset(OpMIPS64AND)
6798 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6799 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6800 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6801 v2.AuxInt = int64ToAuxInt(64)
6802 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6803 v3.AddArg(y)
6804 v1.AddArg2(v2, v3)
6805 v0.AddArg(v1)
6806 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6807 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6808 v5.AddArg(x)
6809 v4.AddArg2(v5, v3)
6810 v.AddArg2(v0, v4)
6811 return true
6812 }
6813 }
6814 func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
6815 v_1 := v.Args[1]
6816 v_0 := v.Args[0]
6817 b := v.Block
6818 typ := &b.Func.Config.Types
6819
6820
6821 for {
6822 t := v.Type
6823 x := v_0
6824 y := v_1
6825 v.reset(OpMIPS64AND)
6826 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6827 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6828 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6829 v2.AuxInt = int64ToAuxInt(64)
6830 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6831 v3.AddArg(y)
6832 v1.AddArg2(v2, v3)
6833 v0.AddArg(v1)
6834 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6835 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6836 v5.AddArg(x)
6837 v4.AddArg2(v5, v3)
6838 v.AddArg2(v0, v4)
6839 return true
6840 }
6841 }
6842 func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
6843 v_1 := v.Args[1]
6844 v_0 := v.Args[0]
6845 b := v.Block
6846 typ := &b.Func.Config.Types
6847
6848
6849 for {
6850 t := v.Type
6851 x := v_0
6852 y := v_1
6853 v.reset(OpMIPS64AND)
6854 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6855 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6856 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6857 v2.AuxInt = int64ToAuxInt(64)
6858 v1.AddArg2(v2, y)
6859 v0.AddArg(v1)
6860 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6861 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6862 v4.AddArg(x)
6863 v3.AddArg2(v4, y)
6864 v.AddArg2(v0, v3)
6865 return true
6866 }
6867 }
6868 func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
6869 v_1 := v.Args[1]
6870 v_0 := v.Args[0]
6871 b := v.Block
6872 typ := &b.Func.Config.Types
6873
6874
6875 for {
6876 t := v.Type
6877 x := v_0
6878 y := v_1
6879 v.reset(OpMIPS64AND)
6880 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6881 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6882 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6883 v2.AuxInt = int64ToAuxInt(64)
6884 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6885 v3.AddArg(y)
6886 v1.AddArg2(v2, v3)
6887 v0.AddArg(v1)
6888 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6889 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6890 v5.AddArg(x)
6891 v4.AddArg2(v5, v3)
6892 v.AddArg2(v0, v4)
6893 return true
6894 }
6895 }
6896 func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
6897 v_1 := v.Args[1]
6898 v_0 := v.Args[0]
6899 b := v.Block
6900 typ := &b.Func.Config.Types
6901
6902
6903 for {
6904 t := v.Type
6905 x := v_0
6906 y := v_1
6907 v.reset(OpMIPS64SRAV)
6908 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6909 v0.AddArg(x)
6910 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6911 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6912 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6913 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6914 v4.AddArg(y)
6915 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6916 v5.AuxInt = int64ToAuxInt(63)
6917 v3.AddArg2(v4, v5)
6918 v2.AddArg(v3)
6919 v1.AddArg2(v2, v4)
6920 v.AddArg2(v0, v1)
6921 return true
6922 }
6923 }
6924 func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
6925 v_1 := v.Args[1]
6926 v_0 := v.Args[0]
6927 b := v.Block
6928 typ := &b.Func.Config.Types
6929
6930
6931 for {
6932 t := v.Type
6933 x := v_0
6934 y := v_1
6935 v.reset(OpMIPS64SRAV)
6936 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6937 v0.AddArg(x)
6938 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6939 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6940 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6941 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6942 v4.AddArg(y)
6943 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6944 v5.AuxInt = int64ToAuxInt(63)
6945 v3.AddArg2(v4, v5)
6946 v2.AddArg(v3)
6947 v1.AddArg2(v2, v4)
6948 v.AddArg2(v0, v1)
6949 return true
6950 }
6951 }
6952 func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
6953 v_1 := v.Args[1]
6954 v_0 := v.Args[0]
6955 b := v.Block
6956 typ := &b.Func.Config.Types
6957
6958
6959 for {
6960 t := v.Type
6961 x := v_0
6962 y := v_1
6963 v.reset(OpMIPS64SRAV)
6964 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6965 v0.AddArg(x)
6966 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6967 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6968 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6969 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6970 v4.AuxInt = int64ToAuxInt(63)
6971 v3.AddArg2(y, v4)
6972 v2.AddArg(v3)
6973 v1.AddArg2(v2, y)
6974 v.AddArg2(v0, v1)
6975 return true
6976 }
6977 }
6978 func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
6979 v_1 := v.Args[1]
6980 v_0 := v.Args[0]
6981 b := v.Block
6982 typ := &b.Func.Config.Types
6983
6984
6985 for {
6986 t := v.Type
6987 x := v_0
6988 y := v_1
6989 v.reset(OpMIPS64SRAV)
6990 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6991 v0.AddArg(x)
6992 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6993 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6994 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6995 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6996 v4.AddArg(y)
6997 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6998 v5.AuxInt = int64ToAuxInt(63)
6999 v3.AddArg2(v4, v5)
7000 v2.AddArg(v3)
7001 v1.AddArg2(v2, v4)
7002 v.AddArg2(v0, v1)
7003 return true
7004 }
7005 }
7006 func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
7007 v_1 := v.Args[1]
7008 v_0 := v.Args[0]
7009 b := v.Block
7010 typ := &b.Func.Config.Types
7011
7012
7013 for {
7014 t := v.Type
7015 x := v_0
7016 y := v_1
7017 v.reset(OpMIPS64AND)
7018 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7019 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7020 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7021 v2.AuxInt = int64ToAuxInt(64)
7022 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7023 v3.AddArg(y)
7024 v1.AddArg2(v2, v3)
7025 v0.AddArg(v1)
7026 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7027 v4.AddArg2(x, v3)
7028 v.AddArg2(v0, v4)
7029 return true
7030 }
7031 }
7032 func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
7033 v_1 := v.Args[1]
7034 v_0 := v.Args[0]
7035 b := v.Block
7036 typ := &b.Func.Config.Types
7037
7038
7039 for {
7040 t := v.Type
7041 x := v_0
7042 y := v_1
7043 v.reset(OpMIPS64AND)
7044 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7045 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7046 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7047 v2.AuxInt = int64ToAuxInt(64)
7048 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7049 v3.AddArg(y)
7050 v1.AddArg2(v2, v3)
7051 v0.AddArg(v1)
7052 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7053 v4.AddArg2(x, v3)
7054 v.AddArg2(v0, v4)
7055 return true
7056 }
7057 }
7058 func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
7059 v_1 := v.Args[1]
7060 v_0 := v.Args[0]
7061 b := v.Block
7062 typ := &b.Func.Config.Types
7063
7064
7065 for {
7066 t := v.Type
7067 x := v_0
7068 y := v_1
7069 v.reset(OpMIPS64AND)
7070 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7071 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7072 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7073 v2.AuxInt = int64ToAuxInt(64)
7074 v1.AddArg2(v2, y)
7075 v0.AddArg(v1)
7076 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7077 v3.AddArg2(x, y)
7078 v.AddArg2(v0, v3)
7079 return true
7080 }
7081 }
7082 func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
7083 v_1 := v.Args[1]
7084 v_0 := v.Args[0]
7085 b := v.Block
7086 typ := &b.Func.Config.Types
7087
7088
7089 for {
7090 t := v.Type
7091 x := v_0
7092 y := v_1
7093 v.reset(OpMIPS64AND)
7094 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7095 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7096 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7097 v2.AuxInt = int64ToAuxInt(64)
7098 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7099 v3.AddArg(y)
7100 v1.AddArg2(v2, v3)
7101 v0.AddArg(v1)
7102 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7103 v4.AddArg2(x, v3)
7104 v.AddArg2(v0, v4)
7105 return true
7106 }
7107 }
7108 func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
7109 v_1 := v.Args[1]
7110 v_0 := v.Args[0]
7111 b := v.Block
7112 typ := &b.Func.Config.Types
7113
7114
7115 for {
7116 t := v.Type
7117 x := v_0
7118 y := v_1
7119 v.reset(OpMIPS64SRAV)
7120 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7121 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7122 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7123 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7124 v3.AddArg(y)
7125 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7126 v4.AuxInt = int64ToAuxInt(63)
7127 v2.AddArg2(v3, v4)
7128 v1.AddArg(v2)
7129 v0.AddArg2(v1, v3)
7130 v.AddArg2(x, v0)
7131 return true
7132 }
7133 }
7134 func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
7135 v_1 := v.Args[1]
7136 v_0 := v.Args[0]
7137 b := v.Block
7138 typ := &b.Func.Config.Types
7139
7140
7141 for {
7142 t := v.Type
7143 x := v_0
7144 y := v_1
7145 v.reset(OpMIPS64SRAV)
7146 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7147 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7148 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7149 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7150 v3.AddArg(y)
7151 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7152 v4.AuxInt = int64ToAuxInt(63)
7153 v2.AddArg2(v3, v4)
7154 v1.AddArg(v2)
7155 v0.AddArg2(v1, v3)
7156 v.AddArg2(x, v0)
7157 return true
7158 }
7159 }
7160 func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
7161 v_1 := v.Args[1]
7162 v_0 := v.Args[0]
7163 b := v.Block
7164 typ := &b.Func.Config.Types
7165
7166
7167 for {
7168 t := v.Type
7169 x := v_0
7170 y := v_1
7171 v.reset(OpMIPS64SRAV)
7172 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7173 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7174 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7175 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7176 v3.AuxInt = int64ToAuxInt(63)
7177 v2.AddArg2(y, v3)
7178 v1.AddArg(v2)
7179 v0.AddArg2(v1, y)
7180 v.AddArg2(x, v0)
7181 return true
7182 }
7183 }
7184 func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
7185 v_1 := v.Args[1]
7186 v_0 := v.Args[0]
7187 b := v.Block
7188 typ := &b.Func.Config.Types
7189
7190
7191 for {
7192 t := v.Type
7193 x := v_0
7194 y := v_1
7195 v.reset(OpMIPS64SRAV)
7196 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7197 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7198 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7199 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7200 v3.AddArg(y)
7201 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7202 v4.AuxInt = int64ToAuxInt(63)
7203 v2.AddArg2(v3, v4)
7204 v1.AddArg(v2)
7205 v0.AddArg2(v1, v3)
7206 v.AddArg2(x, v0)
7207 return true
7208 }
7209 }
7210 func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
7211 v_1 := v.Args[1]
7212 v_0 := v.Args[0]
7213 b := v.Block
7214 typ := &b.Func.Config.Types
7215
7216
7217 for {
7218 t := v.Type
7219 x := v_0
7220 y := v_1
7221 v.reset(OpMIPS64AND)
7222 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7223 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7224 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7225 v2.AuxInt = int64ToAuxInt(64)
7226 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7227 v3.AddArg(y)
7228 v1.AddArg2(v2, v3)
7229 v0.AddArg(v1)
7230 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7231 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7232 v5.AddArg(x)
7233 v4.AddArg2(v5, v3)
7234 v.AddArg2(v0, v4)
7235 return true
7236 }
7237 }
7238 func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
7239 v_1 := v.Args[1]
7240 v_0 := v.Args[0]
7241 b := v.Block
7242 typ := &b.Func.Config.Types
7243
7244
7245 for {
7246 t := v.Type
7247 x := v_0
7248 y := v_1
7249 v.reset(OpMIPS64AND)
7250 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7251 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7252 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7253 v2.AuxInt = int64ToAuxInt(64)
7254 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7255 v3.AddArg(y)
7256 v1.AddArg2(v2, v3)
7257 v0.AddArg(v1)
7258 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7259 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7260 v5.AddArg(x)
7261 v4.AddArg2(v5, v3)
7262 v.AddArg2(v0, v4)
7263 return true
7264 }
7265 }
7266 func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
7267 v_1 := v.Args[1]
7268 v_0 := v.Args[0]
7269 b := v.Block
7270 typ := &b.Func.Config.Types
7271
7272
7273 for {
7274 t := v.Type
7275 x := v_0
7276 y := v_1
7277 v.reset(OpMIPS64AND)
7278 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7279 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7280 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7281 v2.AuxInt = int64ToAuxInt(64)
7282 v1.AddArg2(v2, y)
7283 v0.AddArg(v1)
7284 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7285 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7286 v4.AddArg(x)
7287 v3.AddArg2(v4, y)
7288 v.AddArg2(v0, v3)
7289 return true
7290 }
7291 }
7292 func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
7293 v_1 := v.Args[1]
7294 v_0 := v.Args[0]
7295 b := v.Block
7296 typ := &b.Func.Config.Types
7297
7298
7299 for {
7300 t := v.Type
7301 x := v_0
7302 y := v_1
7303 v.reset(OpMIPS64AND)
7304 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7305 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7306 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7307 v2.AuxInt = int64ToAuxInt(64)
7308 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7309 v3.AddArg(y)
7310 v1.AddArg2(v2, v3)
7311 v0.AddArg(v1)
7312 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7313 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7314 v5.AddArg(x)
7315 v4.AddArg2(v5, v3)
7316 v.AddArg2(v0, v4)
7317 return true
7318 }
7319 }
7320 func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
7321 v_1 := v.Args[1]
7322 v_0 := v.Args[0]
7323 b := v.Block
7324 typ := &b.Func.Config.Types
7325
7326
7327 for {
7328 t := v.Type
7329 x := v_0
7330 y := v_1
7331 v.reset(OpMIPS64SRAV)
7332 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7333 v0.AddArg(x)
7334 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7335 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7336 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7337 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7338 v4.AddArg(y)
7339 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7340 v5.AuxInt = int64ToAuxInt(63)
7341 v3.AddArg2(v4, v5)
7342 v2.AddArg(v3)
7343 v1.AddArg2(v2, v4)
7344 v.AddArg2(v0, v1)
7345 return true
7346 }
7347 }
7348 func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
7349 v_1 := v.Args[1]
7350 v_0 := v.Args[0]
7351 b := v.Block
7352 typ := &b.Func.Config.Types
7353
7354
7355 for {
7356 t := v.Type
7357 x := v_0
7358 y := v_1
7359 v.reset(OpMIPS64SRAV)
7360 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7361 v0.AddArg(x)
7362 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7363 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7364 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7365 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7366 v4.AddArg(y)
7367 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7368 v5.AuxInt = int64ToAuxInt(63)
7369 v3.AddArg2(v4, v5)
7370 v2.AddArg(v3)
7371 v1.AddArg2(v2, v4)
7372 v.AddArg2(v0, v1)
7373 return true
7374 }
7375 }
7376 func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
7377 v_1 := v.Args[1]
7378 v_0 := v.Args[0]
7379 b := v.Block
7380 typ := &b.Func.Config.Types
7381
7382
7383 for {
7384 t := v.Type
7385 x := v_0
7386 y := v_1
7387 v.reset(OpMIPS64SRAV)
7388 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7389 v0.AddArg(x)
7390 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7391 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7392 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7393 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7394 v4.AuxInt = int64ToAuxInt(63)
7395 v3.AddArg2(y, v4)
7396 v2.AddArg(v3)
7397 v1.AddArg2(v2, y)
7398 v.AddArg2(v0, v1)
7399 return true
7400 }
7401 }
7402 func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
7403 v_1 := v.Args[1]
7404 v_0 := v.Args[0]
7405 b := v.Block
7406 typ := &b.Func.Config.Types
7407
7408
7409 for {
7410 t := v.Type
7411 x := v_0
7412 y := v_1
7413 v.reset(OpMIPS64SRAV)
7414 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7415 v0.AddArg(x)
7416 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7417 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7418 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7419 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7420 v4.AddArg(y)
7421 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7422 v5.AuxInt = int64ToAuxInt(63)
7423 v3.AddArg2(v4, v5)
7424 v2.AddArg(v3)
7425 v1.AddArg2(v2, v4)
7426 v.AddArg2(v0, v1)
7427 return true
7428 }
7429 }
7430 func rewriteValueMIPS64_OpSelect0(v *Value) bool {
7431 v_0 := v.Args[0]
7432 b := v.Block
7433 typ := &b.Func.Config.Types
7434
7435
7436 for {
7437 if v_0.Op != OpMul64uover {
7438 break
7439 }
7440 y := v_0.Args[1]
7441 x := v_0.Args[0]
7442 v.reset(OpSelect1)
7443 v.Type = typ.UInt64
7444 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7445 v0.AddArg2(x, y)
7446 v.AddArg(v0)
7447 return true
7448 }
7449
7450
7451 for {
7452 t := v.Type
7453 if v_0.Op != OpAdd64carry {
7454 break
7455 }
7456 c := v_0.Args[2]
7457 x := v_0.Args[0]
7458 y := v_0.Args[1]
7459 v.reset(OpMIPS64ADDV)
7460 v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7461 v0.AddArg2(x, y)
7462 v.AddArg2(v0, c)
7463 return true
7464 }
7465
7466
7467 for {
7468 t := v.Type
7469 if v_0.Op != OpSub64borrow {
7470 break
7471 }
7472 c := v_0.Args[2]
7473 x := v_0.Args[0]
7474 y := v_0.Args[1]
7475 v.reset(OpMIPS64SUBV)
7476 v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7477 v0.AddArg2(x, y)
7478 v.AddArg2(v0, c)
7479 return true
7480 }
7481
7482
7483 for {
7484 if v_0.Op != OpMIPS64DIVVU {
7485 break
7486 }
7487 _ = v_0.Args[1]
7488 v_0_1 := v_0.Args[1]
7489 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7490 break
7491 }
7492 v.reset(OpMIPS64MOVVconst)
7493 v.AuxInt = int64ToAuxInt(0)
7494 return true
7495 }
7496
7497
7498
7499 for {
7500 if v_0.Op != OpMIPS64DIVVU {
7501 break
7502 }
7503 _ = v_0.Args[1]
7504 x := v_0.Args[0]
7505 v_0_1 := v_0.Args[1]
7506 if v_0_1.Op != OpMIPS64MOVVconst {
7507 break
7508 }
7509 c := auxIntToInt64(v_0_1.AuxInt)
7510 if !(isPowerOfTwo(c)) {
7511 break
7512 }
7513 v.reset(OpMIPS64ANDconst)
7514 v.AuxInt = int64ToAuxInt(c - 1)
7515 v.AddArg(x)
7516 return true
7517 }
7518
7519
7520
7521 for {
7522 if v_0.Op != OpMIPS64DIVV {
7523 break
7524 }
7525 _ = v_0.Args[1]
7526 v_0_0 := v_0.Args[0]
7527 if v_0_0.Op != OpMIPS64MOVVconst {
7528 break
7529 }
7530 c := auxIntToInt64(v_0_0.AuxInt)
7531 v_0_1 := v_0.Args[1]
7532 if v_0_1.Op != OpMIPS64MOVVconst {
7533 break
7534 }
7535 d := auxIntToInt64(v_0_1.AuxInt)
7536 if !(d != 0) {
7537 break
7538 }
7539 v.reset(OpMIPS64MOVVconst)
7540 v.AuxInt = int64ToAuxInt(c % d)
7541 return true
7542 }
7543
7544
7545
7546 for {
7547 if v_0.Op != OpMIPS64DIVVU {
7548 break
7549 }
7550 _ = v_0.Args[1]
7551 v_0_0 := v_0.Args[0]
7552 if v_0_0.Op != OpMIPS64MOVVconst {
7553 break
7554 }
7555 c := auxIntToInt64(v_0_0.AuxInt)
7556 v_0_1 := v_0.Args[1]
7557 if v_0_1.Op != OpMIPS64MOVVconst {
7558 break
7559 }
7560 d := auxIntToInt64(v_0_1.AuxInt)
7561 if !(d != 0) {
7562 break
7563 }
7564 v.reset(OpMIPS64MOVVconst)
7565 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
7566 return true
7567 }
7568 return false
7569 }
7570 func rewriteValueMIPS64_OpSelect1(v *Value) bool {
7571 v_0 := v.Args[0]
7572 b := v.Block
7573 typ := &b.Func.Config.Types
7574
7575
7576 for {
7577 if v_0.Op != OpMul64uover {
7578 break
7579 }
7580 y := v_0.Args[1]
7581 x := v_0.Args[0]
7582 v.reset(OpMIPS64SGTU)
7583 v.Type = typ.Bool
7584 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
7585 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7586 v1.AddArg2(x, y)
7587 v0.AddArg(v1)
7588 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7589 v2.AuxInt = int64ToAuxInt(0)
7590 v.AddArg2(v0, v2)
7591 return true
7592 }
7593
7594
7595 for {
7596 t := v.Type
7597 if v_0.Op != OpAdd64carry {
7598 break
7599 }
7600 c := v_0.Args[2]
7601 x := v_0.Args[0]
7602 y := v_0.Args[1]
7603 v.reset(OpMIPS64OR)
7604 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7605 s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7606 s.AddArg2(x, y)
7607 v0.AddArg2(x, s)
7608 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7609 v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7610 v3.AddArg2(s, c)
7611 v2.AddArg2(s, v3)
7612 v.AddArg2(v0, v2)
7613 return true
7614 }
7615
7616
7617 for {
7618 t := v.Type
7619 if v_0.Op != OpSub64borrow {
7620 break
7621 }
7622 c := v_0.Args[2]
7623 x := v_0.Args[0]
7624 y := v_0.Args[1]
7625 v.reset(OpMIPS64OR)
7626 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7627 s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7628 s.AddArg2(x, y)
7629 v0.AddArg2(s, x)
7630 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7631 v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7632 v3.AddArg2(s, c)
7633 v2.AddArg2(v3, s)
7634 v.AddArg2(v0, v2)
7635 return true
7636 }
7637
7638
7639 for {
7640 if v_0.Op != OpMIPS64MULVU {
7641 break
7642 }
7643 _ = v_0.Args[1]
7644 v_0_0 := v_0.Args[0]
7645 v_0_1 := v_0.Args[1]
7646 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7647 x := v_0_0
7648 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
7649 continue
7650 }
7651 v.reset(OpMIPS64NEGV)
7652 v.AddArg(x)
7653 return true
7654 }
7655 break
7656 }
7657
7658
7659 for {
7660 if v_0.Op != OpMIPS64MULVU {
7661 break
7662 }
7663 _ = v_0.Args[1]
7664 v_0_0 := v_0.Args[0]
7665 v_0_1 := v_0.Args[1]
7666 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7667 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7668 continue
7669 }
7670 v.reset(OpMIPS64MOVVconst)
7671 v.AuxInt = int64ToAuxInt(0)
7672 return true
7673 }
7674 break
7675 }
7676
7677
7678 for {
7679 if v_0.Op != OpMIPS64MULVU {
7680 break
7681 }
7682 _ = v_0.Args[1]
7683 v_0_0 := v_0.Args[0]
7684 v_0_1 := v_0.Args[1]
7685 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7686 x := v_0_0
7687 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7688 continue
7689 }
7690 v.copyOf(x)
7691 return true
7692 }
7693 break
7694 }
7695
7696
7697
7698 for {
7699 if v_0.Op != OpMIPS64MULVU {
7700 break
7701 }
7702 _ = v_0.Args[1]
7703 v_0_0 := v_0.Args[0]
7704 v_0_1 := v_0.Args[1]
7705 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7706 x := v_0_0
7707 if v_0_1.Op != OpMIPS64MOVVconst {
7708 continue
7709 }
7710 c := auxIntToInt64(v_0_1.AuxInt)
7711 if !(isPowerOfTwo(c)) {
7712 continue
7713 }
7714 v.reset(OpMIPS64SLLVconst)
7715 v.AuxInt = int64ToAuxInt(log64(c))
7716 v.AddArg(x)
7717 return true
7718 }
7719 break
7720 }
7721
7722
7723 for {
7724 if v_0.Op != OpMIPS64DIVVU {
7725 break
7726 }
7727 _ = v_0.Args[1]
7728 x := v_0.Args[0]
7729 v_0_1 := v_0.Args[1]
7730 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7731 break
7732 }
7733 v.copyOf(x)
7734 return true
7735 }
7736
7737
7738
7739 for {
7740 if v_0.Op != OpMIPS64DIVVU {
7741 break
7742 }
7743 _ = v_0.Args[1]
7744 x := v_0.Args[0]
7745 v_0_1 := v_0.Args[1]
7746 if v_0_1.Op != OpMIPS64MOVVconst {
7747 break
7748 }
7749 c := auxIntToInt64(v_0_1.AuxInt)
7750 if !(isPowerOfTwo(c)) {
7751 break
7752 }
7753 v.reset(OpMIPS64SRLVconst)
7754 v.AuxInt = int64ToAuxInt(log64(c))
7755 v.AddArg(x)
7756 return true
7757 }
7758
7759
7760 for {
7761 if v_0.Op != OpMIPS64MULVU {
7762 break
7763 }
7764 _ = v_0.Args[1]
7765 v_0_0 := v_0.Args[0]
7766 v_0_1 := v_0.Args[1]
7767 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7768 if v_0_0.Op != OpMIPS64MOVVconst {
7769 continue
7770 }
7771 c := auxIntToInt64(v_0_0.AuxInt)
7772 if v_0_1.Op != OpMIPS64MOVVconst {
7773 continue
7774 }
7775 d := auxIntToInt64(v_0_1.AuxInt)
7776 v.reset(OpMIPS64MOVVconst)
7777 v.AuxInt = int64ToAuxInt(c * d)
7778 return true
7779 }
7780 break
7781 }
7782
7783
7784
7785 for {
7786 if v_0.Op != OpMIPS64DIVV {
7787 break
7788 }
7789 _ = v_0.Args[1]
7790 v_0_0 := v_0.Args[0]
7791 if v_0_0.Op != OpMIPS64MOVVconst {
7792 break
7793 }
7794 c := auxIntToInt64(v_0_0.AuxInt)
7795 v_0_1 := v_0.Args[1]
7796 if v_0_1.Op != OpMIPS64MOVVconst {
7797 break
7798 }
7799 d := auxIntToInt64(v_0_1.AuxInt)
7800 if !(d != 0) {
7801 break
7802 }
7803 v.reset(OpMIPS64MOVVconst)
7804 v.AuxInt = int64ToAuxInt(c / d)
7805 return true
7806 }
7807
7808
7809
7810 for {
7811 if v_0.Op != OpMIPS64DIVVU {
7812 break
7813 }
7814 _ = v_0.Args[1]
7815 v_0_0 := v_0.Args[0]
7816 if v_0_0.Op != OpMIPS64MOVVconst {
7817 break
7818 }
7819 c := auxIntToInt64(v_0_0.AuxInt)
7820 v_0_1 := v_0.Args[1]
7821 if v_0_1.Op != OpMIPS64MOVVconst {
7822 break
7823 }
7824 d := auxIntToInt64(v_0_1.AuxInt)
7825 if !(d != 0) {
7826 break
7827 }
7828 v.reset(OpMIPS64MOVVconst)
7829 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7830 return true
7831 }
7832 return false
7833 }
7834 func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
7835 v_0 := v.Args[0]
7836 b := v.Block
7837
7838
7839 for {
7840 t := v.Type
7841 x := v_0
7842 v.reset(OpMIPS64SRAVconst)
7843 v.AuxInt = int64ToAuxInt(63)
7844 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7845 v0.AddArg(x)
7846 v.AddArg(v0)
7847 return true
7848 }
7849 }
7850 func rewriteValueMIPS64_OpStore(v *Value) bool {
7851 v_2 := v.Args[2]
7852 v_1 := v.Args[1]
7853 v_0 := v.Args[0]
7854
7855
7856
7857 for {
7858 t := auxToType(v.Aux)
7859 ptr := v_0
7860 val := v_1
7861 mem := v_2
7862 if !(t.Size() == 1) {
7863 break
7864 }
7865 v.reset(OpMIPS64MOVBstore)
7866 v.AddArg3(ptr, val, mem)
7867 return true
7868 }
7869
7870
7871
7872 for {
7873 t := auxToType(v.Aux)
7874 ptr := v_0
7875 val := v_1
7876 mem := v_2
7877 if !(t.Size() == 2) {
7878 break
7879 }
7880 v.reset(OpMIPS64MOVHstore)
7881 v.AddArg3(ptr, val, mem)
7882 return true
7883 }
7884
7885
7886
7887 for {
7888 t := auxToType(v.Aux)
7889 ptr := v_0
7890 val := v_1
7891 mem := v_2
7892 if !(t.Size() == 4 && !t.IsFloat()) {
7893 break
7894 }
7895 v.reset(OpMIPS64MOVWstore)
7896 v.AddArg3(ptr, val, mem)
7897 return true
7898 }
7899
7900
7901
7902 for {
7903 t := auxToType(v.Aux)
7904 ptr := v_0
7905 val := v_1
7906 mem := v_2
7907 if !(t.Size() == 8 && !t.IsFloat()) {
7908 break
7909 }
7910 v.reset(OpMIPS64MOVVstore)
7911 v.AddArg3(ptr, val, mem)
7912 return true
7913 }
7914
7915
7916
7917 for {
7918 t := auxToType(v.Aux)
7919 ptr := v_0
7920 val := v_1
7921 mem := v_2
7922 if !(t.Size() == 4 && t.IsFloat()) {
7923 break
7924 }
7925 v.reset(OpMIPS64MOVFstore)
7926 v.AddArg3(ptr, val, mem)
7927 return true
7928 }
7929
7930
7931
7932 for {
7933 t := auxToType(v.Aux)
7934 ptr := v_0
7935 val := v_1
7936 mem := v_2
7937 if !(t.Size() == 8 && t.IsFloat()) {
7938 break
7939 }
7940 v.reset(OpMIPS64MOVDstore)
7941 v.AddArg3(ptr, val, mem)
7942 return true
7943 }
7944 return false
7945 }
7946 func rewriteValueMIPS64_OpZero(v *Value) bool {
7947 v_1 := v.Args[1]
7948 v_0 := v.Args[0]
7949 b := v.Block
7950 config := b.Func.Config
7951 typ := &b.Func.Config.Types
7952
7953
7954 for {
7955 if auxIntToInt64(v.AuxInt) != 0 {
7956 break
7957 }
7958 mem := v_1
7959 v.copyOf(mem)
7960 return true
7961 }
7962
7963
7964 for {
7965 if auxIntToInt64(v.AuxInt) != 1 {
7966 break
7967 }
7968 ptr := v_0
7969 mem := v_1
7970 v.reset(OpMIPS64MOVBstore)
7971 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7972 v0.AuxInt = int64ToAuxInt(0)
7973 v.AddArg3(ptr, v0, mem)
7974 return true
7975 }
7976
7977
7978
7979 for {
7980 if auxIntToInt64(v.AuxInt) != 2 {
7981 break
7982 }
7983 t := auxToType(v.Aux)
7984 ptr := v_0
7985 mem := v_1
7986 if !(t.Alignment()%2 == 0) {
7987 break
7988 }
7989 v.reset(OpMIPS64MOVHstore)
7990 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7991 v0.AuxInt = int64ToAuxInt(0)
7992 v.AddArg3(ptr, v0, mem)
7993 return true
7994 }
7995
7996
7997 for {
7998 if auxIntToInt64(v.AuxInt) != 2 {
7999 break
8000 }
8001 ptr := v_0
8002 mem := v_1
8003 v.reset(OpMIPS64MOVBstore)
8004 v.AuxInt = int32ToAuxInt(1)
8005 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8006 v0.AuxInt = int64ToAuxInt(0)
8007 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8008 v1.AuxInt = int32ToAuxInt(0)
8009 v1.AddArg3(ptr, v0, mem)
8010 v.AddArg3(ptr, v0, v1)
8011 return true
8012 }
8013
8014
8015
8016 for {
8017 if auxIntToInt64(v.AuxInt) != 4 {
8018 break
8019 }
8020 t := auxToType(v.Aux)
8021 ptr := v_0
8022 mem := v_1
8023 if !(t.Alignment()%4 == 0) {
8024 break
8025 }
8026 v.reset(OpMIPS64MOVWstore)
8027 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8028 v0.AuxInt = int64ToAuxInt(0)
8029 v.AddArg3(ptr, v0, mem)
8030 return true
8031 }
8032
8033
8034
8035 for {
8036 if auxIntToInt64(v.AuxInt) != 4 {
8037 break
8038 }
8039 t := auxToType(v.Aux)
8040 ptr := v_0
8041 mem := v_1
8042 if !(t.Alignment()%2 == 0) {
8043 break
8044 }
8045 v.reset(OpMIPS64MOVHstore)
8046 v.AuxInt = int32ToAuxInt(2)
8047 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8048 v0.AuxInt = int64ToAuxInt(0)
8049 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8050 v1.AuxInt = int32ToAuxInt(0)
8051 v1.AddArg3(ptr, v0, mem)
8052 v.AddArg3(ptr, v0, v1)
8053 return true
8054 }
8055
8056
8057 for {
8058 if auxIntToInt64(v.AuxInt) != 4 {
8059 break
8060 }
8061 ptr := v_0
8062 mem := v_1
8063 v.reset(OpMIPS64MOVBstore)
8064 v.AuxInt = int32ToAuxInt(3)
8065 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8066 v0.AuxInt = int64ToAuxInt(0)
8067 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8068 v1.AuxInt = int32ToAuxInt(2)
8069 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8070 v2.AuxInt = int32ToAuxInt(1)
8071 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8072 v3.AuxInt = int32ToAuxInt(0)
8073 v3.AddArg3(ptr, v0, mem)
8074 v2.AddArg3(ptr, v0, v3)
8075 v1.AddArg3(ptr, v0, v2)
8076 v.AddArg3(ptr, v0, v1)
8077 return true
8078 }
8079
8080
8081
8082 for {
8083 if auxIntToInt64(v.AuxInt) != 8 {
8084 break
8085 }
8086 t := auxToType(v.Aux)
8087 ptr := v_0
8088 mem := v_1
8089 if !(t.Alignment()%8 == 0) {
8090 break
8091 }
8092 v.reset(OpMIPS64MOVVstore)
8093 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8094 v0.AuxInt = int64ToAuxInt(0)
8095 v.AddArg3(ptr, v0, mem)
8096 return true
8097 }
8098
8099
8100
8101 for {
8102 if auxIntToInt64(v.AuxInt) != 8 {
8103 break
8104 }
8105 t := auxToType(v.Aux)
8106 ptr := v_0
8107 mem := v_1
8108 if !(t.Alignment()%4 == 0) {
8109 break
8110 }
8111 v.reset(OpMIPS64MOVWstore)
8112 v.AuxInt = int32ToAuxInt(4)
8113 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8114 v0.AuxInt = int64ToAuxInt(0)
8115 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8116 v1.AuxInt = int32ToAuxInt(0)
8117 v1.AddArg3(ptr, v0, mem)
8118 v.AddArg3(ptr, v0, v1)
8119 return true
8120 }
8121
8122
8123
8124 for {
8125 if auxIntToInt64(v.AuxInt) != 8 {
8126 break
8127 }
8128 t := auxToType(v.Aux)
8129 ptr := v_0
8130 mem := v_1
8131 if !(t.Alignment()%2 == 0) {
8132 break
8133 }
8134 v.reset(OpMIPS64MOVHstore)
8135 v.AuxInt = int32ToAuxInt(6)
8136 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8137 v0.AuxInt = int64ToAuxInt(0)
8138 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8139 v1.AuxInt = int32ToAuxInt(4)
8140 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8141 v2.AuxInt = int32ToAuxInt(2)
8142 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8143 v3.AuxInt = int32ToAuxInt(0)
8144 v3.AddArg3(ptr, v0, mem)
8145 v2.AddArg3(ptr, v0, v3)
8146 v1.AddArg3(ptr, v0, v2)
8147 v.AddArg3(ptr, v0, v1)
8148 return true
8149 }
8150
8151
8152 for {
8153 if auxIntToInt64(v.AuxInt) != 3 {
8154 break
8155 }
8156 ptr := v_0
8157 mem := v_1
8158 v.reset(OpMIPS64MOVBstore)
8159 v.AuxInt = int32ToAuxInt(2)
8160 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8161 v0.AuxInt = int64ToAuxInt(0)
8162 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8163 v1.AuxInt = int32ToAuxInt(1)
8164 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8165 v2.AuxInt = int32ToAuxInt(0)
8166 v2.AddArg3(ptr, v0, mem)
8167 v1.AddArg3(ptr, v0, v2)
8168 v.AddArg3(ptr, v0, v1)
8169 return true
8170 }
8171
8172
8173
8174 for {
8175 if auxIntToInt64(v.AuxInt) != 6 {
8176 break
8177 }
8178 t := auxToType(v.Aux)
8179 ptr := v_0
8180 mem := v_1
8181 if !(t.Alignment()%2 == 0) {
8182 break
8183 }
8184 v.reset(OpMIPS64MOVHstore)
8185 v.AuxInt = int32ToAuxInt(4)
8186 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8187 v0.AuxInt = int64ToAuxInt(0)
8188 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8189 v1.AuxInt = int32ToAuxInt(2)
8190 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8191 v2.AuxInt = int32ToAuxInt(0)
8192 v2.AddArg3(ptr, v0, mem)
8193 v1.AddArg3(ptr, v0, v2)
8194 v.AddArg3(ptr, v0, v1)
8195 return true
8196 }
8197
8198
8199
8200 for {
8201 if auxIntToInt64(v.AuxInt) != 12 {
8202 break
8203 }
8204 t := auxToType(v.Aux)
8205 ptr := v_0
8206 mem := v_1
8207 if !(t.Alignment()%4 == 0) {
8208 break
8209 }
8210 v.reset(OpMIPS64MOVWstore)
8211 v.AuxInt = int32ToAuxInt(8)
8212 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8213 v0.AuxInt = int64ToAuxInt(0)
8214 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8215 v1.AuxInt = int32ToAuxInt(4)
8216 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8217 v2.AuxInt = int32ToAuxInt(0)
8218 v2.AddArg3(ptr, v0, mem)
8219 v1.AddArg3(ptr, v0, v2)
8220 v.AddArg3(ptr, v0, v1)
8221 return true
8222 }
8223
8224
8225
8226 for {
8227 if auxIntToInt64(v.AuxInt) != 16 {
8228 break
8229 }
8230 t := auxToType(v.Aux)
8231 ptr := v_0
8232 mem := v_1
8233 if !(t.Alignment()%8 == 0) {
8234 break
8235 }
8236 v.reset(OpMIPS64MOVVstore)
8237 v.AuxInt = int32ToAuxInt(8)
8238 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8239 v0.AuxInt = int64ToAuxInt(0)
8240 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8241 v1.AuxInt = int32ToAuxInt(0)
8242 v1.AddArg3(ptr, v0, mem)
8243 v.AddArg3(ptr, v0, v1)
8244 return true
8245 }
8246
8247
8248
8249 for {
8250 if auxIntToInt64(v.AuxInt) != 24 {
8251 break
8252 }
8253 t := auxToType(v.Aux)
8254 ptr := v_0
8255 mem := v_1
8256 if !(t.Alignment()%8 == 0) {
8257 break
8258 }
8259 v.reset(OpMIPS64MOVVstore)
8260 v.AuxInt = int32ToAuxInt(16)
8261 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8262 v0.AuxInt = int64ToAuxInt(0)
8263 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8264 v1.AuxInt = int32ToAuxInt(8)
8265 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8266 v2.AuxInt = int32ToAuxInt(0)
8267 v2.AddArg3(ptr, v0, mem)
8268 v1.AddArg3(ptr, v0, v2)
8269 v.AddArg3(ptr, v0, v1)
8270 return true
8271 }
8272
8273
8274
8275 for {
8276 s := auxIntToInt64(v.AuxInt)
8277 t := auxToType(v.Aux)
8278 ptr := v_0
8279 mem := v_1
8280 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0) {
8281 break
8282 }
8283 v.reset(OpMIPS64DUFFZERO)
8284 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8285 v.AddArg2(ptr, mem)
8286 return true
8287 }
8288
8289
8290
8291 for {
8292 s := auxIntToInt64(v.AuxInt)
8293 t := auxToType(v.Aux)
8294 ptr := v_0
8295 mem := v_1
8296 if !(s > 8*128 || t.Alignment()%8 != 0) {
8297 break
8298 }
8299 v.reset(OpMIPS64LoweredZero)
8300 v.AuxInt = int64ToAuxInt(t.Alignment())
8301 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
8302 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8303 v0.AddArg(ptr)
8304 v.AddArg3(ptr, v0, mem)
8305 return true
8306 }
8307 return false
8308 }
8309 func rewriteBlockMIPS64(b *Block) bool {
8310 switch b.Kind {
8311 case BlockMIPS64EQ:
8312
8313
8314 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8315 v_0 := b.Controls[0]
8316 cmp := v_0.Args[0]
8317 b.resetWithControl(BlockMIPS64FPF, cmp)
8318 return true
8319 }
8320
8321
8322 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8323 v_0 := b.Controls[0]
8324 cmp := v_0.Args[0]
8325 b.resetWithControl(BlockMIPS64FPT, cmp)
8326 return true
8327 }
8328
8329
8330 for b.Controls[0].Op == OpMIPS64XORconst {
8331 v_0 := b.Controls[0]
8332 if auxIntToInt64(v_0.AuxInt) != 1 {
8333 break
8334 }
8335 cmp := v_0.Args[0]
8336 if cmp.Op != OpMIPS64SGT {
8337 break
8338 }
8339 b.resetWithControl(BlockMIPS64NE, cmp)
8340 return true
8341 }
8342
8343
8344 for b.Controls[0].Op == OpMIPS64XORconst {
8345 v_0 := b.Controls[0]
8346 if auxIntToInt64(v_0.AuxInt) != 1 {
8347 break
8348 }
8349 cmp := v_0.Args[0]
8350 if cmp.Op != OpMIPS64SGTU {
8351 break
8352 }
8353 b.resetWithControl(BlockMIPS64NE, cmp)
8354 return true
8355 }
8356
8357
8358 for b.Controls[0].Op == OpMIPS64XORconst {
8359 v_0 := b.Controls[0]
8360 if auxIntToInt64(v_0.AuxInt) != 1 {
8361 break
8362 }
8363 cmp := v_0.Args[0]
8364 if cmp.Op != OpMIPS64SGTconst {
8365 break
8366 }
8367 b.resetWithControl(BlockMIPS64NE, cmp)
8368 return true
8369 }
8370
8371
8372 for b.Controls[0].Op == OpMIPS64XORconst {
8373 v_0 := b.Controls[0]
8374 if auxIntToInt64(v_0.AuxInt) != 1 {
8375 break
8376 }
8377 cmp := v_0.Args[0]
8378 if cmp.Op != OpMIPS64SGTUconst {
8379 break
8380 }
8381 b.resetWithControl(BlockMIPS64NE, cmp)
8382 return true
8383 }
8384
8385
8386 for b.Controls[0].Op == OpMIPS64SGTUconst {
8387 v_0 := b.Controls[0]
8388 if auxIntToInt64(v_0.AuxInt) != 1 {
8389 break
8390 }
8391 x := v_0.Args[0]
8392 b.resetWithControl(BlockMIPS64NE, x)
8393 return true
8394 }
8395
8396
8397 for b.Controls[0].Op == OpMIPS64SGTU {
8398 v_0 := b.Controls[0]
8399 _ = v_0.Args[1]
8400 x := v_0.Args[0]
8401 v_0_1 := v_0.Args[1]
8402 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8403 break
8404 }
8405 b.resetWithControl(BlockMIPS64EQ, x)
8406 return true
8407 }
8408
8409
8410 for b.Controls[0].Op == OpMIPS64SGTconst {
8411 v_0 := b.Controls[0]
8412 if auxIntToInt64(v_0.AuxInt) != 0 {
8413 break
8414 }
8415 x := v_0.Args[0]
8416 b.resetWithControl(BlockMIPS64GEZ, x)
8417 return true
8418 }
8419
8420
8421 for b.Controls[0].Op == OpMIPS64SGT {
8422 v_0 := b.Controls[0]
8423 _ = v_0.Args[1]
8424 x := v_0.Args[0]
8425 v_0_1 := v_0.Args[1]
8426 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8427 break
8428 }
8429 b.resetWithControl(BlockMIPS64LEZ, x)
8430 return true
8431 }
8432
8433
8434 for b.Controls[0].Op == OpMIPS64MOVVconst {
8435 v_0 := b.Controls[0]
8436 if auxIntToInt64(v_0.AuxInt) != 0 {
8437 break
8438 }
8439 b.Reset(BlockFirst)
8440 return true
8441 }
8442
8443
8444
8445 for b.Controls[0].Op == OpMIPS64MOVVconst {
8446 v_0 := b.Controls[0]
8447 c := auxIntToInt64(v_0.AuxInt)
8448 if !(c != 0) {
8449 break
8450 }
8451 b.Reset(BlockFirst)
8452 b.swapSuccessors()
8453 return true
8454 }
8455 case BlockMIPS64GEZ:
8456
8457
8458
8459 for b.Controls[0].Op == OpMIPS64MOVVconst {
8460 v_0 := b.Controls[0]
8461 c := auxIntToInt64(v_0.AuxInt)
8462 if !(c >= 0) {
8463 break
8464 }
8465 b.Reset(BlockFirst)
8466 return true
8467 }
8468
8469
8470
8471 for b.Controls[0].Op == OpMIPS64MOVVconst {
8472 v_0 := b.Controls[0]
8473 c := auxIntToInt64(v_0.AuxInt)
8474 if !(c < 0) {
8475 break
8476 }
8477 b.Reset(BlockFirst)
8478 b.swapSuccessors()
8479 return true
8480 }
8481 case BlockMIPS64GTZ:
8482
8483
8484
8485 for b.Controls[0].Op == OpMIPS64MOVVconst {
8486 v_0 := b.Controls[0]
8487 c := auxIntToInt64(v_0.AuxInt)
8488 if !(c > 0) {
8489 break
8490 }
8491 b.Reset(BlockFirst)
8492 return true
8493 }
8494
8495
8496
8497 for b.Controls[0].Op == OpMIPS64MOVVconst {
8498 v_0 := b.Controls[0]
8499 c := auxIntToInt64(v_0.AuxInt)
8500 if !(c <= 0) {
8501 break
8502 }
8503 b.Reset(BlockFirst)
8504 b.swapSuccessors()
8505 return true
8506 }
8507 case BlockIf:
8508
8509
8510 for {
8511 cond := b.Controls[0]
8512 b.resetWithControl(BlockMIPS64NE, cond)
8513 return true
8514 }
8515 case BlockMIPS64LEZ:
8516
8517
8518
8519 for b.Controls[0].Op == OpMIPS64MOVVconst {
8520 v_0 := b.Controls[0]
8521 c := auxIntToInt64(v_0.AuxInt)
8522 if !(c <= 0) {
8523 break
8524 }
8525 b.Reset(BlockFirst)
8526 return true
8527 }
8528
8529
8530
8531 for b.Controls[0].Op == OpMIPS64MOVVconst {
8532 v_0 := b.Controls[0]
8533 c := auxIntToInt64(v_0.AuxInt)
8534 if !(c > 0) {
8535 break
8536 }
8537 b.Reset(BlockFirst)
8538 b.swapSuccessors()
8539 return true
8540 }
8541 case BlockMIPS64LTZ:
8542
8543
8544
8545 for b.Controls[0].Op == OpMIPS64MOVVconst {
8546 v_0 := b.Controls[0]
8547 c := auxIntToInt64(v_0.AuxInt)
8548 if !(c < 0) {
8549 break
8550 }
8551 b.Reset(BlockFirst)
8552 return true
8553 }
8554
8555
8556
8557 for b.Controls[0].Op == OpMIPS64MOVVconst {
8558 v_0 := b.Controls[0]
8559 c := auxIntToInt64(v_0.AuxInt)
8560 if !(c >= 0) {
8561 break
8562 }
8563 b.Reset(BlockFirst)
8564 b.swapSuccessors()
8565 return true
8566 }
8567 case BlockMIPS64NE:
8568
8569
8570 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8571 v_0 := b.Controls[0]
8572 cmp := v_0.Args[0]
8573 b.resetWithControl(BlockMIPS64FPT, cmp)
8574 return true
8575 }
8576
8577
8578 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8579 v_0 := b.Controls[0]
8580 cmp := v_0.Args[0]
8581 b.resetWithControl(BlockMIPS64FPF, cmp)
8582 return true
8583 }
8584
8585
8586 for b.Controls[0].Op == OpMIPS64XORconst {
8587 v_0 := b.Controls[0]
8588 if auxIntToInt64(v_0.AuxInt) != 1 {
8589 break
8590 }
8591 cmp := v_0.Args[0]
8592 if cmp.Op != OpMIPS64SGT {
8593 break
8594 }
8595 b.resetWithControl(BlockMIPS64EQ, cmp)
8596 return true
8597 }
8598
8599
8600 for b.Controls[0].Op == OpMIPS64XORconst {
8601 v_0 := b.Controls[0]
8602 if auxIntToInt64(v_0.AuxInt) != 1 {
8603 break
8604 }
8605 cmp := v_0.Args[0]
8606 if cmp.Op != OpMIPS64SGTU {
8607 break
8608 }
8609 b.resetWithControl(BlockMIPS64EQ, cmp)
8610 return true
8611 }
8612
8613
8614 for b.Controls[0].Op == OpMIPS64XORconst {
8615 v_0 := b.Controls[0]
8616 if auxIntToInt64(v_0.AuxInt) != 1 {
8617 break
8618 }
8619 cmp := v_0.Args[0]
8620 if cmp.Op != OpMIPS64SGTconst {
8621 break
8622 }
8623 b.resetWithControl(BlockMIPS64EQ, cmp)
8624 return true
8625 }
8626
8627
8628 for b.Controls[0].Op == OpMIPS64XORconst {
8629 v_0 := b.Controls[0]
8630 if auxIntToInt64(v_0.AuxInt) != 1 {
8631 break
8632 }
8633 cmp := v_0.Args[0]
8634 if cmp.Op != OpMIPS64SGTUconst {
8635 break
8636 }
8637 b.resetWithControl(BlockMIPS64EQ, cmp)
8638 return true
8639 }
8640
8641
8642 for b.Controls[0].Op == OpMIPS64SGTUconst {
8643 v_0 := b.Controls[0]
8644 if auxIntToInt64(v_0.AuxInt) != 1 {
8645 break
8646 }
8647 x := v_0.Args[0]
8648 b.resetWithControl(BlockMIPS64EQ, x)
8649 return true
8650 }
8651
8652
8653 for b.Controls[0].Op == OpMIPS64SGTU {
8654 v_0 := b.Controls[0]
8655 _ = v_0.Args[1]
8656 x := v_0.Args[0]
8657 v_0_1 := v_0.Args[1]
8658 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8659 break
8660 }
8661 b.resetWithControl(BlockMIPS64NE, x)
8662 return true
8663 }
8664
8665
8666 for b.Controls[0].Op == OpMIPS64SGTconst {
8667 v_0 := b.Controls[0]
8668 if auxIntToInt64(v_0.AuxInt) != 0 {
8669 break
8670 }
8671 x := v_0.Args[0]
8672 b.resetWithControl(BlockMIPS64LTZ, x)
8673 return true
8674 }
8675
8676
8677 for b.Controls[0].Op == OpMIPS64SGT {
8678 v_0 := b.Controls[0]
8679 _ = v_0.Args[1]
8680 x := v_0.Args[0]
8681 v_0_1 := v_0.Args[1]
8682 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8683 break
8684 }
8685 b.resetWithControl(BlockMIPS64GTZ, x)
8686 return true
8687 }
8688
8689
8690 for b.Controls[0].Op == OpMIPS64MOVVconst {
8691 v_0 := b.Controls[0]
8692 if auxIntToInt64(v_0.AuxInt) != 0 {
8693 break
8694 }
8695 b.Reset(BlockFirst)
8696 b.swapSuccessors()
8697 return true
8698 }
8699
8700
8701
8702 for b.Controls[0].Op == OpMIPS64MOVVconst {
8703 v_0 := b.Controls[0]
8704 c := auxIntToInt64(v_0.AuxInt)
8705 if !(c != 0) {
8706 break
8707 }
8708 b.Reset(BlockFirst)
8709 return true
8710 }
8711 }
8712 return false
8713 }
8714
View as plain text