1
2
3
4
5 package mips64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/ssa"
14 "cmd/compile/internal/ssagen"
15 "cmd/compile/internal/types"
16 "cmd/internal/obj"
17 "cmd/internal/obj/mips"
18 "internal/abi"
19 )
20
21
22 func isFPreg(r int16) bool {
23 return mips.REG_F0 <= r && r <= mips.REG_F31
24 }
25
26
27 func isHILO(r int16) bool {
28 return r == mips.REG_HI || r == mips.REG_LO
29 }
30
31
32 func loadByType(t *types.Type, r int16) obj.As {
33 if isFPreg(r) {
34 if t.Size() == 4 {
35 return mips.AMOVF
36 } else {
37 return mips.AMOVD
38 }
39 } else {
40 switch t.Size() {
41 case 1:
42 if t.IsSigned() {
43 return mips.AMOVB
44 } else {
45 return mips.AMOVBU
46 }
47 case 2:
48 if t.IsSigned() {
49 return mips.AMOVH
50 } else {
51 return mips.AMOVHU
52 }
53 case 4:
54 if t.IsSigned() {
55 return mips.AMOVW
56 } else {
57 return mips.AMOVWU
58 }
59 case 8:
60 return mips.AMOVV
61 }
62 }
63 panic("bad load type")
64 }
65
66
67 func storeByType(t *types.Type, r int16) obj.As {
68 if isFPreg(r) {
69 if t.Size() == 4 {
70 return mips.AMOVF
71 } else {
72 return mips.AMOVD
73 }
74 } else {
75 switch t.Size() {
76 case 1:
77 return mips.AMOVB
78 case 2:
79 return mips.AMOVH
80 case 4:
81 return mips.AMOVW
82 case 8:
83 return mips.AMOVV
84 }
85 }
86 panic("bad store type")
87 }
88
89 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
90 switch v.Op {
91 case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
92 if v.Type.IsMemory() {
93 return
94 }
95 x := v.Args[0].Reg()
96 y := v.Reg()
97 if x == y {
98 return
99 }
100 as := mips.AMOVV
101 if isFPreg(x) && isFPreg(y) {
102 as = mips.AMOVD
103 }
104 p := s.Prog(as)
105 p.From.Type = obj.TYPE_REG
106 p.From.Reg = x
107 p.To.Type = obj.TYPE_REG
108 p.To.Reg = y
109 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
110
111 p.To.Reg = mips.REGTMP
112 p = s.Prog(mips.AMOVV)
113 p.From.Type = obj.TYPE_REG
114 p.From.Reg = mips.REGTMP
115 p.To.Type = obj.TYPE_REG
116 p.To.Reg = y
117 }
118 case ssa.OpMIPS64MOVVnop:
119
120 case ssa.OpLoadReg:
121 if v.Type.IsFlags() {
122 v.Fatalf("load flags not implemented: %v", v.LongString())
123 return
124 }
125 r := v.Reg()
126 p := s.Prog(loadByType(v.Type, r))
127 ssagen.AddrAuto(&p.From, v.Args[0])
128 p.To.Type = obj.TYPE_REG
129 p.To.Reg = r
130 if isHILO(r) {
131
132 p.To.Reg = mips.REGTMP
133 p = s.Prog(mips.AMOVV)
134 p.From.Type = obj.TYPE_REG
135 p.From.Reg = mips.REGTMP
136 p.To.Type = obj.TYPE_REG
137 p.To.Reg = r
138 }
139 case ssa.OpStoreReg:
140 if v.Type.IsFlags() {
141 v.Fatalf("store flags not implemented: %v", v.LongString())
142 return
143 }
144 r := v.Args[0].Reg()
145 if isHILO(r) {
146
147 p := s.Prog(mips.AMOVV)
148 p.From.Type = obj.TYPE_REG
149 p.From.Reg = r
150 p.To.Type = obj.TYPE_REG
151 p.To.Reg = mips.REGTMP
152 r = mips.REGTMP
153 }
154 p := s.Prog(storeByType(v.Type, r))
155 p.From.Type = obj.TYPE_REG
156 p.From.Reg = r
157 ssagen.AddrAuto(&p.To, v)
158 case ssa.OpMIPS64ADDV,
159 ssa.OpMIPS64SUBV,
160 ssa.OpMIPS64AND,
161 ssa.OpMIPS64OR,
162 ssa.OpMIPS64XOR,
163 ssa.OpMIPS64NOR,
164 ssa.OpMIPS64SLLV,
165 ssa.OpMIPS64SRLV,
166 ssa.OpMIPS64SRAV,
167 ssa.OpMIPS64ADDF,
168 ssa.OpMIPS64ADDD,
169 ssa.OpMIPS64SUBF,
170 ssa.OpMIPS64SUBD,
171 ssa.OpMIPS64MULF,
172 ssa.OpMIPS64MULD,
173 ssa.OpMIPS64DIVF,
174 ssa.OpMIPS64DIVD:
175 p := s.Prog(v.Op.Asm())
176 p.From.Type = obj.TYPE_REG
177 p.From.Reg = v.Args[1].Reg()
178 p.Reg = v.Args[0].Reg()
179 p.To.Type = obj.TYPE_REG
180 p.To.Reg = v.Reg()
181 case ssa.OpMIPS64SGT,
182 ssa.OpMIPS64SGTU:
183 p := s.Prog(v.Op.Asm())
184 p.From.Type = obj.TYPE_REG
185 p.From.Reg = v.Args[0].Reg()
186 p.Reg = v.Args[1].Reg()
187 p.To.Type = obj.TYPE_REG
188 p.To.Reg = v.Reg()
189 case ssa.OpMIPS64ADDVconst,
190 ssa.OpMIPS64SUBVconst,
191 ssa.OpMIPS64ANDconst,
192 ssa.OpMIPS64ORconst,
193 ssa.OpMIPS64XORconst,
194 ssa.OpMIPS64NORconst,
195 ssa.OpMIPS64SLLVconst,
196 ssa.OpMIPS64SRLVconst,
197 ssa.OpMIPS64SRAVconst,
198 ssa.OpMIPS64SGTconst,
199 ssa.OpMIPS64SGTUconst:
200 p := s.Prog(v.Op.Asm())
201 p.From.Type = obj.TYPE_CONST
202 p.From.Offset = v.AuxInt
203 p.Reg = v.Args[0].Reg()
204 p.To.Type = obj.TYPE_REG
205 p.To.Reg = v.Reg()
206 case ssa.OpMIPS64MULV,
207 ssa.OpMIPS64MULVU,
208 ssa.OpMIPS64DIVV,
209 ssa.OpMIPS64DIVVU:
210
211 p := s.Prog(v.Op.Asm())
212 p.From.Type = obj.TYPE_REG
213 p.From.Reg = v.Args[1].Reg()
214 p.Reg = v.Args[0].Reg()
215 case ssa.OpMIPS64MOVVconst:
216 r := v.Reg()
217 p := s.Prog(v.Op.Asm())
218 p.From.Type = obj.TYPE_CONST
219 p.From.Offset = v.AuxInt
220 p.To.Type = obj.TYPE_REG
221 p.To.Reg = r
222 if isFPreg(r) || isHILO(r) {
223
224 p.To.Reg = mips.REGTMP
225 p = s.Prog(mips.AMOVV)
226 p.From.Type = obj.TYPE_REG
227 p.From.Reg = mips.REGTMP
228 p.To.Type = obj.TYPE_REG
229 p.To.Reg = r
230 }
231 case ssa.OpMIPS64MOVFconst,
232 ssa.OpMIPS64MOVDconst:
233 p := s.Prog(v.Op.Asm())
234 p.From.Type = obj.TYPE_FCONST
235 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
236 p.To.Type = obj.TYPE_REG
237 p.To.Reg = v.Reg()
238 case ssa.OpMIPS64CMPEQF,
239 ssa.OpMIPS64CMPEQD,
240 ssa.OpMIPS64CMPGEF,
241 ssa.OpMIPS64CMPGED,
242 ssa.OpMIPS64CMPGTF,
243 ssa.OpMIPS64CMPGTD:
244 p := s.Prog(v.Op.Asm())
245 p.From.Type = obj.TYPE_REG
246 p.From.Reg = v.Args[0].Reg()
247 p.Reg = v.Args[1].Reg()
248 case ssa.OpMIPS64MOVVaddr:
249 p := s.Prog(mips.AMOVV)
250 p.From.Type = obj.TYPE_ADDR
251 p.From.Reg = v.Args[0].Reg()
252 var wantreg string
253
254
255
256
257
258 switch v.Aux.(type) {
259 default:
260 v.Fatalf("aux is of unknown type %T", v.Aux)
261 case *obj.LSym:
262 wantreg = "SB"
263 ssagen.AddAux(&p.From, v)
264 case *ir.Name:
265 wantreg = "SP"
266 ssagen.AddAux(&p.From, v)
267 case nil:
268
269 wantreg = "SP"
270 p.From.Offset = v.AuxInt
271 }
272 if reg := v.Args[0].RegName(); reg != wantreg {
273 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
274 }
275 p.To.Type = obj.TYPE_REG
276 p.To.Reg = v.Reg()
277 case ssa.OpMIPS64MOVBload,
278 ssa.OpMIPS64MOVBUload,
279 ssa.OpMIPS64MOVHload,
280 ssa.OpMIPS64MOVHUload,
281 ssa.OpMIPS64MOVWload,
282 ssa.OpMIPS64MOVWUload,
283 ssa.OpMIPS64MOVVload,
284 ssa.OpMIPS64MOVFload,
285 ssa.OpMIPS64MOVDload:
286 p := s.Prog(v.Op.Asm())
287 p.From.Type = obj.TYPE_MEM
288 p.From.Reg = v.Args[0].Reg()
289 ssagen.AddAux(&p.From, v)
290 p.To.Type = obj.TYPE_REG
291 p.To.Reg = v.Reg()
292 case ssa.OpMIPS64MOVBstore,
293 ssa.OpMIPS64MOVHstore,
294 ssa.OpMIPS64MOVWstore,
295 ssa.OpMIPS64MOVVstore,
296 ssa.OpMIPS64MOVFstore,
297 ssa.OpMIPS64MOVDstore:
298 p := s.Prog(v.Op.Asm())
299 p.From.Type = obj.TYPE_REG
300 p.From.Reg = v.Args[1].Reg()
301 p.To.Type = obj.TYPE_MEM
302 p.To.Reg = v.Args[0].Reg()
303 ssagen.AddAux(&p.To, v)
304 case ssa.OpMIPS64MOVBstorezero,
305 ssa.OpMIPS64MOVHstorezero,
306 ssa.OpMIPS64MOVWstorezero,
307 ssa.OpMIPS64MOVVstorezero:
308 p := s.Prog(v.Op.Asm())
309 p.From.Type = obj.TYPE_REG
310 p.From.Reg = mips.REGZERO
311 p.To.Type = obj.TYPE_MEM
312 p.To.Reg = v.Args[0].Reg()
313 ssagen.AddAux(&p.To, v)
314 case ssa.OpMIPS64MOVBreg,
315 ssa.OpMIPS64MOVBUreg,
316 ssa.OpMIPS64MOVHreg,
317 ssa.OpMIPS64MOVHUreg,
318 ssa.OpMIPS64MOVWreg,
319 ssa.OpMIPS64MOVWUreg:
320 a := v.Args[0]
321 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
322 a = a.Args[0]
323 }
324 if a.Op == ssa.OpLoadReg && mips.REG_R0 <= a.Reg() && a.Reg() <= mips.REG_R31 {
325
326
327
328 t := a.Type
329 switch {
330 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
331 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
332 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
333 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
334 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
335 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
336
337 if v.Reg() == v.Args[0].Reg() {
338 return
339 }
340 p := s.Prog(mips.AMOVV)
341 p.From.Type = obj.TYPE_REG
342 p.From.Reg = v.Args[0].Reg()
343 p.To.Type = obj.TYPE_REG
344 p.To.Reg = v.Reg()
345 return
346 default:
347 }
348 }
349 fallthrough
350 case ssa.OpMIPS64MOVWF,
351 ssa.OpMIPS64MOVWD,
352 ssa.OpMIPS64TRUNCFW,
353 ssa.OpMIPS64TRUNCDW,
354 ssa.OpMIPS64MOVVF,
355 ssa.OpMIPS64MOVVD,
356 ssa.OpMIPS64TRUNCFV,
357 ssa.OpMIPS64TRUNCDV,
358 ssa.OpMIPS64MOVFD,
359 ssa.OpMIPS64MOVDF,
360 ssa.OpMIPS64MOVWfpgp,
361 ssa.OpMIPS64MOVWgpfp,
362 ssa.OpMIPS64MOVVfpgp,
363 ssa.OpMIPS64MOVVgpfp,
364 ssa.OpMIPS64NEGF,
365 ssa.OpMIPS64NEGD,
366 ssa.OpMIPS64ABSD,
367 ssa.OpMIPS64SQRTF,
368 ssa.OpMIPS64SQRTD:
369 p := s.Prog(v.Op.Asm())
370 p.From.Type = obj.TYPE_REG
371 p.From.Reg = v.Args[0].Reg()
372 p.To.Type = obj.TYPE_REG
373 p.To.Reg = v.Reg()
374 case ssa.OpMIPS64NEGV:
375
376 p := s.Prog(mips.ASUBVU)
377 p.From.Type = obj.TYPE_REG
378 p.From.Reg = v.Args[0].Reg()
379 p.Reg = mips.REGZERO
380 p.To.Type = obj.TYPE_REG
381 p.To.Reg = v.Reg()
382 case ssa.OpMIPS64DUFFZERO:
383
384 p := s.Prog(mips.ASUBVU)
385 p.From.Type = obj.TYPE_CONST
386 p.From.Offset = 8
387 p.Reg = v.Args[0].Reg()
388 p.To.Type = obj.TYPE_REG
389 p.To.Reg = mips.REG_R1
390 p = s.Prog(obj.ADUFFZERO)
391 p.To.Type = obj.TYPE_MEM
392 p.To.Name = obj.NAME_EXTERN
393 p.To.Sym = ir.Syms.Duffzero
394 p.To.Offset = v.AuxInt
395 case ssa.OpMIPS64LoweredZero:
396
397
398
399
400
401 var sz int64
402 var mov obj.As
403 switch {
404 case v.AuxInt%8 == 0:
405 sz = 8
406 mov = mips.AMOVV
407 case v.AuxInt%4 == 0:
408 sz = 4
409 mov = mips.AMOVW
410 case v.AuxInt%2 == 0:
411 sz = 2
412 mov = mips.AMOVH
413 default:
414 sz = 1
415 mov = mips.AMOVB
416 }
417 p := s.Prog(mips.ASUBVU)
418 p.From.Type = obj.TYPE_CONST
419 p.From.Offset = sz
420 p.To.Type = obj.TYPE_REG
421 p.To.Reg = mips.REG_R1
422 p2 := s.Prog(mov)
423 p2.From.Type = obj.TYPE_REG
424 p2.From.Reg = mips.REGZERO
425 p2.To.Type = obj.TYPE_MEM
426 p2.To.Reg = mips.REG_R1
427 p2.To.Offset = sz
428 p3 := s.Prog(mips.AADDVU)
429 p3.From.Type = obj.TYPE_CONST
430 p3.From.Offset = sz
431 p3.To.Type = obj.TYPE_REG
432 p3.To.Reg = mips.REG_R1
433 p4 := s.Prog(mips.ABNE)
434 p4.From.Type = obj.TYPE_REG
435 p4.From.Reg = v.Args[1].Reg()
436 p4.Reg = mips.REG_R1
437 p4.To.Type = obj.TYPE_BRANCH
438 p4.To.SetTarget(p2)
439 case ssa.OpMIPS64DUFFCOPY:
440 p := s.Prog(obj.ADUFFCOPY)
441 p.To.Type = obj.TYPE_MEM
442 p.To.Name = obj.NAME_EXTERN
443 p.To.Sym = ir.Syms.Duffcopy
444 p.To.Offset = v.AuxInt
445 case ssa.OpMIPS64LoweredMove:
446
447
448
449
450
451
452
453 var sz int64
454 var mov obj.As
455 switch {
456 case v.AuxInt%8 == 0:
457 sz = 8
458 mov = mips.AMOVV
459 case v.AuxInt%4 == 0:
460 sz = 4
461 mov = mips.AMOVW
462 case v.AuxInt%2 == 0:
463 sz = 2
464 mov = mips.AMOVH
465 default:
466 sz = 1
467 mov = mips.AMOVB
468 }
469 p := s.Prog(mips.ASUBVU)
470 p.From.Type = obj.TYPE_CONST
471 p.From.Offset = sz
472 p.To.Type = obj.TYPE_REG
473 p.To.Reg = mips.REG_R1
474 p2 := s.Prog(mov)
475 p2.From.Type = obj.TYPE_MEM
476 p2.From.Reg = mips.REG_R1
477 p2.From.Offset = sz
478 p2.To.Type = obj.TYPE_REG
479 p2.To.Reg = mips.REGTMP
480 p3 := s.Prog(mov)
481 p3.From.Type = obj.TYPE_REG
482 p3.From.Reg = mips.REGTMP
483 p3.To.Type = obj.TYPE_MEM
484 p3.To.Reg = mips.REG_R2
485 p4 := s.Prog(mips.AADDVU)
486 p4.From.Type = obj.TYPE_CONST
487 p4.From.Offset = sz
488 p4.To.Type = obj.TYPE_REG
489 p4.To.Reg = mips.REG_R1
490 p5 := s.Prog(mips.AADDVU)
491 p5.From.Type = obj.TYPE_CONST
492 p5.From.Offset = sz
493 p5.To.Type = obj.TYPE_REG
494 p5.To.Reg = mips.REG_R2
495 p6 := s.Prog(mips.ABNE)
496 p6.From.Type = obj.TYPE_REG
497 p6.From.Reg = v.Args[2].Reg()
498 p6.Reg = mips.REG_R1
499 p6.To.Type = obj.TYPE_BRANCH
500 p6.To.SetTarget(p2)
501 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
502 s.Call(v)
503 case ssa.OpMIPS64CALLtail:
504 s.TailCall(v)
505 case ssa.OpMIPS64LoweredWB:
506 p := s.Prog(obj.ACALL)
507 p.To.Type = obj.TYPE_MEM
508 p.To.Name = obj.NAME_EXTERN
509
510 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
511
512 case ssa.OpMIPS64LoweredPanicBoundsRR, ssa.OpMIPS64LoweredPanicBoundsRC, ssa.OpMIPS64LoweredPanicBoundsCR, ssa.OpMIPS64LoweredPanicBoundsCC:
513
514 code, signed := ssa.BoundsKind(v.AuxInt).Code()
515 xIsReg := false
516 yIsReg := false
517 xVal := 0
518 yVal := 0
519 switch v.Op {
520 case ssa.OpMIPS64LoweredPanicBoundsRR:
521 xIsReg = true
522 xVal = int(v.Args[0].Reg() - mips.REG_R1)
523 yIsReg = true
524 yVal = int(v.Args[1].Reg() - mips.REG_R1)
525 case ssa.OpMIPS64LoweredPanicBoundsRC:
526 xIsReg = true
527 xVal = int(v.Args[0].Reg() - mips.REG_R1)
528 c := v.Aux.(ssa.PanicBoundsC).C
529 if c >= 0 && c <= abi.BoundsMaxConst {
530 yVal = int(c)
531 } else {
532
533 yIsReg = true
534 if yVal == xVal {
535 yVal = 1
536 }
537 p := s.Prog(mips.AMOVV)
538 p.From.Type = obj.TYPE_CONST
539 p.From.Offset = c
540 p.To.Type = obj.TYPE_REG
541 p.To.Reg = mips.REG_R1 + int16(yVal)
542 }
543 case ssa.OpMIPS64LoweredPanicBoundsCR:
544 yIsReg = true
545 yVal := int(v.Args[0].Reg() - mips.REG_R1)
546 c := v.Aux.(ssa.PanicBoundsC).C
547 if c >= 0 && c <= abi.BoundsMaxConst {
548 xVal = int(c)
549 } else {
550
551 xIsReg = true
552 if xVal == yVal {
553 xVal = 1
554 }
555 p := s.Prog(mips.AMOVV)
556 p.From.Type = obj.TYPE_CONST
557 p.From.Offset = c
558 p.To.Type = obj.TYPE_REG
559 p.To.Reg = mips.REG_R1 + int16(xVal)
560 }
561 case ssa.OpMIPS64LoweredPanicBoundsCC:
562 c := v.Aux.(ssa.PanicBoundsCC).Cx
563 if c >= 0 && c <= abi.BoundsMaxConst {
564 xVal = int(c)
565 } else {
566
567 xIsReg = true
568 p := s.Prog(mips.AMOVV)
569 p.From.Type = obj.TYPE_CONST
570 p.From.Offset = c
571 p.To.Type = obj.TYPE_REG
572 p.To.Reg = mips.REG_R1 + int16(xVal)
573 }
574 c = v.Aux.(ssa.PanicBoundsCC).Cy
575 if c >= 0 && c <= abi.BoundsMaxConst {
576 yVal = int(c)
577 } else {
578
579 yIsReg = true
580 yVal = 1
581 p := s.Prog(mips.AMOVV)
582 p.From.Type = obj.TYPE_CONST
583 p.From.Offset = c
584 p.To.Type = obj.TYPE_REG
585 p.To.Reg = mips.REG_R1 + int16(yVal)
586 }
587 }
588 c := abi.BoundsEncode(code, signed, xIsReg, yIsReg, xVal, yVal)
589
590 p := s.Prog(obj.APCDATA)
591 p.From.SetConst(abi.PCDATA_PanicBounds)
592 p.To.SetConst(int64(c))
593 p = s.Prog(obj.ACALL)
594 p.To.Type = obj.TYPE_MEM
595 p.To.Name = obj.NAME_EXTERN
596 p.To.Sym = ir.Syms.PanicBounds
597
598 case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
599 as := mips.AMOVV
600 switch v.Op {
601 case ssa.OpMIPS64LoweredAtomicLoad8:
602 as = mips.AMOVB
603 case ssa.OpMIPS64LoweredAtomicLoad32:
604 as = mips.AMOVW
605 }
606 s.Prog(mips.ASYNC)
607 p := s.Prog(as)
608 p.From.Type = obj.TYPE_MEM
609 p.From.Reg = v.Args[0].Reg()
610 p.To.Type = obj.TYPE_REG
611 p.To.Reg = v.Reg0()
612 s.Prog(mips.ASYNC)
613 case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
614 as := mips.AMOVV
615 switch v.Op {
616 case ssa.OpMIPS64LoweredAtomicStore8:
617 as = mips.AMOVB
618 case ssa.OpMIPS64LoweredAtomicStore32:
619 as = mips.AMOVW
620 }
621 s.Prog(mips.ASYNC)
622 p := s.Prog(as)
623 p.From.Type = obj.TYPE_REG
624 p.From.Reg = v.Args[1].Reg()
625 p.To.Type = obj.TYPE_MEM
626 p.To.Reg = v.Args[0].Reg()
627 s.Prog(mips.ASYNC)
628 case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
629 as := mips.AMOVV
630 if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
631 as = mips.AMOVW
632 }
633 s.Prog(mips.ASYNC)
634 p := s.Prog(as)
635 p.From.Type = obj.TYPE_REG
636 p.From.Reg = mips.REGZERO
637 p.To.Type = obj.TYPE_MEM
638 p.To.Reg = v.Args[0].Reg()
639 s.Prog(mips.ASYNC)
640 case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
641
642
643
644
645
646
647 ll := mips.ALLV
648 sc := mips.ASCV
649 if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
650 ll = mips.ALL
651 sc = mips.ASC
652 }
653 s.Prog(mips.ASYNC)
654 p := s.Prog(mips.AMOVV)
655 p.From.Type = obj.TYPE_REG
656 p.From.Reg = v.Args[1].Reg()
657 p.To.Type = obj.TYPE_REG
658 p.To.Reg = mips.REGTMP
659 p1 := s.Prog(ll)
660 p1.From.Type = obj.TYPE_MEM
661 p1.From.Reg = v.Args[0].Reg()
662 p1.To.Type = obj.TYPE_REG
663 p1.To.Reg = v.Reg0()
664 p2 := s.Prog(sc)
665 p2.From.Type = obj.TYPE_REG
666 p2.From.Reg = mips.REGTMP
667 p2.To.Type = obj.TYPE_MEM
668 p2.To.Reg = v.Args[0].Reg()
669 p3 := s.Prog(mips.ABEQ)
670 p3.From.Type = obj.TYPE_REG
671 p3.From.Reg = mips.REGTMP
672 p3.To.Type = obj.TYPE_BRANCH
673 p3.To.SetTarget(p)
674 s.Prog(mips.ASYNC)
675 case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
676
677
678
679
680
681
682
683 ll := mips.ALLV
684 sc := mips.ASCV
685 if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
686 ll = mips.ALL
687 sc = mips.ASC
688 }
689 s.Prog(mips.ASYNC)
690 p := s.Prog(ll)
691 p.From.Type = obj.TYPE_MEM
692 p.From.Reg = v.Args[0].Reg()
693 p.To.Type = obj.TYPE_REG
694 p.To.Reg = v.Reg0()
695 p1 := s.Prog(mips.AADDVU)
696 p1.From.Type = obj.TYPE_REG
697 p1.From.Reg = v.Args[1].Reg()
698 p1.Reg = v.Reg0()
699 p1.To.Type = obj.TYPE_REG
700 p1.To.Reg = mips.REGTMP
701 p2 := s.Prog(sc)
702 p2.From.Type = obj.TYPE_REG
703 p2.From.Reg = mips.REGTMP
704 p2.To.Type = obj.TYPE_MEM
705 p2.To.Reg = v.Args[0].Reg()
706 p3 := s.Prog(mips.ABEQ)
707 p3.From.Type = obj.TYPE_REG
708 p3.From.Reg = mips.REGTMP
709 p3.To.Type = obj.TYPE_BRANCH
710 p3.To.SetTarget(p)
711 s.Prog(mips.ASYNC)
712 p4 := s.Prog(mips.AADDVU)
713 p4.From.Type = obj.TYPE_REG
714 p4.From.Reg = v.Args[1].Reg()
715 p4.Reg = v.Reg0()
716 p4.To.Type = obj.TYPE_REG
717 p4.To.Reg = v.Reg0()
718 case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
719
720
721
722
723
724
725
726 ll := mips.ALLV
727 sc := mips.ASCV
728 if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
729 ll = mips.ALL
730 sc = mips.ASC
731 }
732 s.Prog(mips.ASYNC)
733 p := s.Prog(ll)
734 p.From.Type = obj.TYPE_MEM
735 p.From.Reg = v.Args[0].Reg()
736 p.To.Type = obj.TYPE_REG
737 p.To.Reg = v.Reg0()
738 p1 := s.Prog(mips.AADDVU)
739 p1.From.Type = obj.TYPE_CONST
740 p1.From.Offset = v.AuxInt
741 p1.Reg = v.Reg0()
742 p1.To.Type = obj.TYPE_REG
743 p1.To.Reg = mips.REGTMP
744 p2 := s.Prog(sc)
745 p2.From.Type = obj.TYPE_REG
746 p2.From.Reg = mips.REGTMP
747 p2.To.Type = obj.TYPE_MEM
748 p2.To.Reg = v.Args[0].Reg()
749 p3 := s.Prog(mips.ABEQ)
750 p3.From.Type = obj.TYPE_REG
751 p3.From.Reg = mips.REGTMP
752 p3.To.Type = obj.TYPE_BRANCH
753 p3.To.SetTarget(p)
754 s.Prog(mips.ASYNC)
755 p4 := s.Prog(mips.AADDVU)
756 p4.From.Type = obj.TYPE_CONST
757 p4.From.Offset = v.AuxInt
758 p4.Reg = v.Reg0()
759 p4.To.Type = obj.TYPE_REG
760 p4.To.Reg = v.Reg0()
761 case ssa.OpMIPS64LoweredAtomicAnd32,
762 ssa.OpMIPS64LoweredAtomicOr32:
763
764
765
766
767
768
769 s.Prog(mips.ASYNC)
770
771 p := s.Prog(mips.ALL)
772 p.From.Type = obj.TYPE_MEM
773 p.From.Reg = v.Args[0].Reg()
774 p.To.Type = obj.TYPE_REG
775 p.To.Reg = mips.REGTMP
776
777 p1 := s.Prog(v.Op.Asm())
778 p1.From.Type = obj.TYPE_REG
779 p1.From.Reg = v.Args[1].Reg()
780 p1.Reg = mips.REGTMP
781 p1.To.Type = obj.TYPE_REG
782 p1.To.Reg = mips.REGTMP
783
784 p2 := s.Prog(mips.ASC)
785 p2.From.Type = obj.TYPE_REG
786 p2.From.Reg = mips.REGTMP
787 p2.To.Type = obj.TYPE_MEM
788 p2.To.Reg = v.Args[0].Reg()
789
790 p3 := s.Prog(mips.ABEQ)
791 p3.From.Type = obj.TYPE_REG
792 p3.From.Reg = mips.REGTMP
793 p3.To.Type = obj.TYPE_BRANCH
794 p3.To.SetTarget(p)
795
796 s.Prog(mips.ASYNC)
797
798 case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
799
800
801
802
803
804
805
806
807 ll := mips.ALLV
808 sc := mips.ASCV
809 if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
810 ll = mips.ALL
811 sc = mips.ASC
812 }
813 p := s.Prog(mips.AMOVV)
814 p.From.Type = obj.TYPE_REG
815 p.From.Reg = mips.REGZERO
816 p.To.Type = obj.TYPE_REG
817 p.To.Reg = v.Reg0()
818 s.Prog(mips.ASYNC)
819 p1 := s.Prog(ll)
820 p1.From.Type = obj.TYPE_MEM
821 p1.From.Reg = v.Args[0].Reg()
822 p1.To.Type = obj.TYPE_REG
823 p1.To.Reg = mips.REGTMP
824 p2 := s.Prog(mips.ABNE)
825 p2.From.Type = obj.TYPE_REG
826 p2.From.Reg = v.Args[1].Reg()
827 p2.Reg = mips.REGTMP
828 p2.To.Type = obj.TYPE_BRANCH
829 p3 := s.Prog(mips.AMOVV)
830 p3.From.Type = obj.TYPE_REG
831 p3.From.Reg = v.Args[2].Reg()
832 p3.To.Type = obj.TYPE_REG
833 p3.To.Reg = v.Reg0()
834 p4 := s.Prog(sc)
835 p4.From.Type = obj.TYPE_REG
836 p4.From.Reg = v.Reg0()
837 p4.To.Type = obj.TYPE_MEM
838 p4.To.Reg = v.Args[0].Reg()
839 p5 := s.Prog(mips.ABEQ)
840 p5.From.Type = obj.TYPE_REG
841 p5.From.Reg = v.Reg0()
842 p5.To.Type = obj.TYPE_BRANCH
843 p5.To.SetTarget(p1)
844 p6 := s.Prog(mips.ASYNC)
845 p2.To.SetTarget(p6)
846 case ssa.OpMIPS64LoweredNilCheck:
847
848 p := s.Prog(mips.AMOVB)
849 p.From.Type = obj.TYPE_MEM
850 p.From.Reg = v.Args[0].Reg()
851 ssagen.AddAux(&p.From, v)
852 p.To.Type = obj.TYPE_REG
853 p.To.Reg = mips.REGTMP
854 if logopt.Enabled() {
855 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
856 }
857 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
858 base.WarnfAt(v.Pos, "generated nil check")
859 }
860 case ssa.OpMIPS64FPFlagTrue,
861 ssa.OpMIPS64FPFlagFalse:
862
863
864
865 branch := mips.ABFPF
866 if v.Op == ssa.OpMIPS64FPFlagFalse {
867 branch = mips.ABFPT
868 }
869 p := s.Prog(mips.AMOVV)
870 p.From.Type = obj.TYPE_REG
871 p.From.Reg = mips.REGZERO
872 p.To.Type = obj.TYPE_REG
873 p.To.Reg = v.Reg()
874 p2 := s.Prog(branch)
875 p2.To.Type = obj.TYPE_BRANCH
876 p3 := s.Prog(mips.AMOVV)
877 p3.From.Type = obj.TYPE_CONST
878 p3.From.Offset = 1
879 p3.To.Type = obj.TYPE_REG
880 p3.To.Reg = v.Reg()
881 p4 := s.Prog(obj.ANOP)
882 p2.To.SetTarget(p4)
883 case ssa.OpMIPS64LoweredGetClosurePtr:
884
885 ssagen.CheckLoweredGetClosurePtr(v)
886 case ssa.OpMIPS64LoweredGetCallerSP:
887
888 p := s.Prog(mips.AMOVV)
889 p.From.Type = obj.TYPE_ADDR
890 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
891 p.From.Name = obj.NAME_PARAM
892 p.To.Type = obj.TYPE_REG
893 p.To.Reg = v.Reg()
894 case ssa.OpMIPS64LoweredGetCallerPC:
895 p := s.Prog(obj.AGETCALLERPC)
896 p.To.Type = obj.TYPE_REG
897 p.To.Reg = v.Reg()
898 case ssa.OpMIPS64LoweredPubBarrier:
899
900 s.Prog(v.Op.Asm())
901 case ssa.OpClobber, ssa.OpClobberReg:
902
903 default:
904 v.Fatalf("genValue not implemented: %s", v.LongString())
905 }
906 }
907
908 var blockJump = map[ssa.BlockKind]struct {
909 asm, invasm obj.As
910 }{
911 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE},
912 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ},
913 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
914 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
915 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
916 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
917 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
918 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
919 }
920
921 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
922 switch b.Kind {
923 case ssa.BlockPlain, ssa.BlockDefer:
924 if b.Succs[0].Block() != next {
925 p := s.Prog(obj.AJMP)
926 p.To.Type = obj.TYPE_BRANCH
927 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
928 }
929 case ssa.BlockExit, ssa.BlockRetJmp:
930 case ssa.BlockRet:
931 s.Prog(obj.ARET)
932 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
933 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
934 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
935 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
936 jmp := blockJump[b.Kind]
937 var p *obj.Prog
938 switch next {
939 case b.Succs[0].Block():
940 p = s.Br(jmp.invasm, b.Succs[1].Block())
941 case b.Succs[1].Block():
942 p = s.Br(jmp.asm, b.Succs[0].Block())
943 default:
944 if b.Likely != ssa.BranchUnlikely {
945 p = s.Br(jmp.asm, b.Succs[0].Block())
946 s.Br(obj.AJMP, b.Succs[1].Block())
947 } else {
948 p = s.Br(jmp.invasm, b.Succs[1].Block())
949 s.Br(obj.AJMP, b.Succs[0].Block())
950 }
951 }
952 if !b.Controls[0].Type.IsFlags() {
953 p.From.Type = obj.TYPE_REG
954 p.From.Reg = b.Controls[0].Reg()
955 }
956 default:
957 b.Fatalf("branch not implemented: %s", b.LongString())
958 }
959 }
960
View as plain text