1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/logopt"
10 "cmd/compile/internal/reflectdata"
11 "cmd/compile/internal/types"
12 "cmd/internal/obj"
13 "cmd/internal/obj/s390x"
14 "cmd/internal/objabi"
15 "cmd/internal/src"
16 "encoding/binary"
17 "fmt"
18 "internal/buildcfg"
19 "io"
20 "math"
21 "math/bits"
22 "os"
23 "path/filepath"
24 "strings"
25 )
26
27 type deadValueChoice bool
28
29 const (
30 leaveDeadValues deadValueChoice = false
31 removeDeadValues = true
32
33 repZeroThreshold = 1408
34 )
35
36
37 func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValueChoice) {
38
39 pendingLines := f.cachedLineStarts
40 pendingLines.clear()
41 debug := f.pass.debug
42 if debug > 1 {
43 fmt.Printf("%s: rewriting for %s\n", f.pass.name, f.Name)
44 }
45
46
47
48
49 itersLimit := f.NumBlocks()
50 if itersLimit < 20 {
51 itersLimit = 20
52 }
53 var iters int
54 var states map[string]bool
55 for {
56 change := false
57 deadChange := false
58 for _, b := range f.Blocks {
59 var b0 *Block
60 if debug > 1 {
61 b0 = new(Block)
62 *b0 = *b
63 b0.Succs = append([]Edge{}, b.Succs...)
64 }
65 for i, c := range b.ControlValues() {
66 for c.Op == OpCopy {
67 c = c.Args[0]
68 b.ReplaceControl(i, c)
69 }
70 }
71 if rb(b) {
72 change = true
73 if debug > 1 {
74 fmt.Printf("rewriting %s -> %s\n", b0.LongString(), b.LongString())
75 }
76 }
77 for j, v := range b.Values {
78 var v0 *Value
79 if debug > 1 {
80 v0 = new(Value)
81 *v0 = *v
82 v0.Args = append([]*Value{}, v.Args...)
83 }
84 if v.Uses == 0 && v.removeable() {
85 if v.Op != OpInvalid && deadcode == removeDeadValues {
86
87
88
89
90 v.reset(OpInvalid)
91 deadChange = true
92 }
93
94 continue
95 }
96
97 vchange := phielimValue(v)
98 if vchange && debug > 1 {
99 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
100 }
101
102
103
104
105
106
107
108
109 for i, a := range v.Args {
110 if a.Op != OpCopy {
111 continue
112 }
113 aa := copySource(a)
114 v.SetArg(i, aa)
115
116
117
118
119
120 if a.Pos.IsStmt() == src.PosIsStmt {
121 if aa.Block == a.Block && aa.Pos.Line() == a.Pos.Line() && aa.Pos.IsStmt() != src.PosNotStmt {
122 aa.Pos = aa.Pos.WithIsStmt()
123 } else if v.Block == a.Block && v.Pos.Line() == a.Pos.Line() && v.Pos.IsStmt() != src.PosNotStmt {
124 v.Pos = v.Pos.WithIsStmt()
125 } else {
126
127
128
129
130 pendingLines.set(a.Pos, int32(a.Block.ID))
131 }
132 a.Pos = a.Pos.WithNotStmt()
133 }
134 vchange = true
135 for a.Uses == 0 {
136 b := a.Args[0]
137 a.reset(OpInvalid)
138 a = b
139 }
140 }
141 if vchange && debug > 1 {
142 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
143 }
144
145
146 if rv(v) {
147 vchange = true
148
149 if v.Pos.IsStmt() == src.PosIsStmt {
150 if k := nextGoodStatementIndex(v, j, b); k != j {
151 v.Pos = v.Pos.WithNotStmt()
152 b.Values[k].Pos = b.Values[k].Pos.WithIsStmt()
153 }
154 }
155 }
156
157 change = change || vchange
158 if vchange && debug > 1 {
159 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
160 }
161 }
162 }
163 if !change && !deadChange {
164 break
165 }
166 iters++
167 if (iters > itersLimit || debug >= 2) && change {
168
169
170
171
172
173 if states == nil {
174 states = make(map[string]bool)
175 }
176 h := f.rewriteHash()
177 if _, ok := states[h]; ok {
178
179
180
181
182 if debug < 2 {
183 debug = 2
184 states = make(map[string]bool)
185 } else {
186 f.Fatalf("rewrite cycle detected")
187 }
188 }
189 states[h] = true
190 }
191 }
192
193 for _, b := range f.Blocks {
194 j := 0
195 for i, v := range b.Values {
196 vl := v.Pos
197 if v.Op == OpInvalid {
198 if v.Pos.IsStmt() == src.PosIsStmt {
199 pendingLines.set(vl, int32(b.ID))
200 }
201 f.freeValue(v)
202 continue
203 }
204 if v.Pos.IsStmt() != src.PosNotStmt && !notStmtBoundary(v.Op) {
205 if pl, ok := pendingLines.get(vl); ok && pl == int32(b.ID) {
206 pendingLines.remove(vl)
207 v.Pos = v.Pos.WithIsStmt()
208 }
209 }
210 if i != j {
211 b.Values[j] = v
212 }
213 j++
214 }
215 if pl, ok := pendingLines.get(b.Pos); ok && pl == int32(b.ID) {
216 b.Pos = b.Pos.WithIsStmt()
217 pendingLines.remove(b.Pos)
218 }
219 b.truncateValues(j)
220 }
221 }
222
223
224
225 func is64BitFloat(t *types.Type) bool {
226 return t.Size() == 8 && t.IsFloat()
227 }
228
229 func is32BitFloat(t *types.Type) bool {
230 return t.Size() == 4 && t.IsFloat()
231 }
232
233 func is64BitInt(t *types.Type) bool {
234 return t.Size() == 8 && t.IsInteger()
235 }
236
237 func is32BitInt(t *types.Type) bool {
238 return t.Size() == 4 && t.IsInteger()
239 }
240
241 func is16BitInt(t *types.Type) bool {
242 return t.Size() == 2 && t.IsInteger()
243 }
244
245 func is8BitInt(t *types.Type) bool {
246 return t.Size() == 1 && t.IsInteger()
247 }
248
249 func isPtr(t *types.Type) bool {
250 return t.IsPtrShaped()
251 }
252
253 func copyCompatibleType(t1, t2 *types.Type) bool {
254 if t1.Size() != t2.Size() {
255 return false
256 }
257 if t1.IsInteger() {
258 return t2.IsInteger()
259 }
260 if isPtr(t1) {
261 return isPtr(t2)
262 }
263 return t1.Compare(t2) == types.CMPeq
264 }
265
266
267
268 func mergeSym(x, y Sym) Sym {
269 if x == nil {
270 return y
271 }
272 if y == nil {
273 return x
274 }
275 panic(fmt.Sprintf("mergeSym with two non-nil syms %v %v", x, y))
276 }
277
278 func canMergeSym(x, y Sym) bool {
279 return x == nil || y == nil
280 }
281
282
283
284
285
286 func canMergeLoadClobber(target, load, x *Value) bool {
287
288
289
290
291
292
293 switch {
294 case x.Uses == 2 && x.Op == OpPhi && len(x.Args) == 2 && (x.Args[0] == target || x.Args[1] == target) && target.Uses == 1:
295
296
297
298
299
300
301
302
303
304 case x.Uses > 1:
305 return false
306 }
307 loopnest := x.Block.Func.loopnest()
308 if loopnest.depth(target.Block.ID) > loopnest.depth(x.Block.ID) {
309 return false
310 }
311 return canMergeLoad(target, load)
312 }
313
314
315
316 func canMergeLoad(target, load *Value) bool {
317 if target.Block.ID != load.Block.ID {
318
319 return false
320 }
321
322
323
324 if load.Uses != 1 {
325 return false
326 }
327
328 mem := load.MemoryArg()
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345 var args []*Value
346 for _, a := range target.Args {
347 if a != load && a.Block.ID == target.Block.ID {
348 args = append(args, a)
349 }
350 }
351
352
353
354 var memPreds map[*Value]bool
355 for i := 0; len(args) > 0; i++ {
356 const limit = 100
357 if i >= limit {
358
359 return false
360 }
361 v := args[len(args)-1]
362 args = args[:len(args)-1]
363 if target.Block.ID != v.Block.ID {
364
365
366 continue
367 }
368 if v.Op == OpPhi {
369
370
371
372 continue
373 }
374 if v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
375
376
377 return false
378 }
379 if v.Op.SymEffect()&SymAddr != 0 {
380
381
382
383
384
385
386
387
388
389
390
391 return false
392 }
393 if v.Type.IsMemory() {
394 if memPreds == nil {
395
396
397
398 memPreds = make(map[*Value]bool)
399 m := mem
400 const limit = 50
401 for i := 0; i < limit; i++ {
402 if m.Op == OpPhi {
403
404
405 break
406 }
407 if m.Block.ID != target.Block.ID {
408 break
409 }
410 if !m.Type.IsMemory() {
411 break
412 }
413 memPreds[m] = true
414 if len(m.Args) == 0 {
415 break
416 }
417 m = m.MemoryArg()
418 }
419 }
420
421
422
423
424
425
426
427
428
429 if memPreds[v] {
430 continue
431 }
432 return false
433 }
434 if len(v.Args) > 0 && v.Args[len(v.Args)-1] == mem {
435
436
437 continue
438 }
439 for _, a := range v.Args {
440 if target.Block.ID == a.Block.ID {
441 args = append(args, a)
442 }
443 }
444 }
445
446 return true
447 }
448
449
450 func isSameCall(aux Aux, name string) bool {
451 fn := aux.(*AuxCall).Fn
452 return fn != nil && fn.String() == name
453 }
454
455
456 func canLoadUnaligned(c *Config) bool {
457 return c.ctxt.Arch.Alignment == 1
458 }
459
460
461 func nlz64(x int64) int { return bits.LeadingZeros64(uint64(x)) }
462 func nlz32(x int32) int { return bits.LeadingZeros32(uint32(x)) }
463 func nlz16(x int16) int { return bits.LeadingZeros16(uint16(x)) }
464 func nlz8(x int8) int { return bits.LeadingZeros8(uint8(x)) }
465
466
467 func ntz64(x int64) int { return bits.TrailingZeros64(uint64(x)) }
468 func ntz32(x int32) int { return bits.TrailingZeros32(uint32(x)) }
469 func ntz16(x int16) int { return bits.TrailingZeros16(uint16(x)) }
470 func ntz8(x int8) int { return bits.TrailingZeros8(uint8(x)) }
471
472 func oneBit(x int64) bool { return x&(x-1) == 0 && x != 0 }
473 func oneBit8(x int8) bool { return x&(x-1) == 0 && x != 0 }
474 func oneBit16(x int16) bool { return x&(x-1) == 0 && x != 0 }
475 func oneBit32(x int32) bool { return x&(x-1) == 0 && x != 0 }
476 func oneBit64(x int64) bool { return x&(x-1) == 0 && x != 0 }
477
478
479 func nto(x int64) int64 {
480 return int64(ntz64(^x))
481 }
482
483
484
485 func log8(n int8) int64 { return log8u(uint8(n)) }
486 func log16(n int16) int64 { return log16u(uint16(n)) }
487 func log32(n int32) int64 { return log32u(uint32(n)) }
488 func log64(n int64) int64 { return log64u(uint64(n)) }
489
490
491
492 func log8u(n uint8) int64 { return int64(bits.Len8(n)) - 1 }
493 func log16u(n uint16) int64 { return int64(bits.Len16(n)) - 1 }
494 func log32u(n uint32) int64 { return int64(bits.Len32(n)) - 1 }
495 func log64u(n uint64) int64 { return int64(bits.Len64(n)) - 1 }
496
497
498 func isPowerOfTwo[T int8 | int16 | int32 | int64](n T) bool {
499 return n > 0 && n&(n-1) == 0
500 }
501
502
503 func isUnsignedPowerOfTwo[T uint8 | uint16 | uint32 | uint64](n T) bool {
504 return n != 0 && n&(n-1) == 0
505 }
506
507
508 func is32Bit(n int64) bool {
509 return n == int64(int32(n))
510 }
511
512
513 func is16Bit(n int64) bool {
514 return n == int64(int16(n))
515 }
516
517
518 func is8Bit(n int64) bool {
519 return n == int64(int8(n))
520 }
521
522
523 func isU8Bit(n int64) bool {
524 return n == int64(uint8(n))
525 }
526
527
528 func is12Bit(n int64) bool {
529 return -(1<<11) <= n && n < (1<<11)
530 }
531
532
533 func isU12Bit(n int64) bool {
534 return 0 <= n && n < (1<<12)
535 }
536
537
538 func isU16Bit(n int64) bool {
539 return n == int64(uint16(n))
540 }
541
542
543 func isU32Bit(n int64) bool {
544 return n == int64(uint32(n))
545 }
546
547
548 func is20Bit(n int64) bool {
549 return -(1<<19) <= n && n < (1<<19)
550 }
551
552
553 func b2i(b bool) int64 {
554 if b {
555 return 1
556 }
557 return 0
558 }
559
560
561 func b2i32(b bool) int32 {
562 if b {
563 return 1
564 }
565 return 0
566 }
567
568 func canMulStrengthReduce(config *Config, x int64) bool {
569 _, ok := config.mulRecipes[x]
570 return ok
571 }
572 func canMulStrengthReduce32(config *Config, x int32) bool {
573 _, ok := config.mulRecipes[int64(x)]
574 return ok
575 }
576
577
578
579
580 func mulStrengthReduce(m *Value, v *Value, x int64) *Value {
581 return v.Block.Func.Config.mulRecipes[x].build(m, v)
582 }
583
584
585
586
587
588 func mulStrengthReduce32(m *Value, v *Value, x int32) *Value {
589 return v.Block.Func.Config.mulRecipes[int64(x)].build(m, v)
590 }
591
592
593
594 func shiftIsBounded(v *Value) bool {
595 return v.AuxInt != 0
596 }
597
598
599
600 func canonLessThan(x, y *Value) bool {
601 if x.Op != y.Op {
602 return x.Op < y.Op
603 }
604 if !x.Pos.SameFileAndLine(y.Pos) {
605 return x.Pos.Before(y.Pos)
606 }
607 return x.ID < y.ID
608 }
609
610
611
612 func truncate64Fto32F(f float64) float32 {
613 if !isExactFloat32(f) {
614 panic("truncate64Fto32F: truncation is not exact")
615 }
616 if !math.IsNaN(f) {
617 return float32(f)
618 }
619
620
621 b := math.Float64bits(f)
622 m := b & ((1 << 52) - 1)
623
624 r := uint32(((b >> 32) & (1 << 31)) | 0x7f800000 | (m >> (52 - 23)))
625 return math.Float32frombits(r)
626 }
627
628
629 func DivisionNeedsFixUp(v *Value) bool {
630 return v.AuxInt == 0
631 }
632
633
634 func auxTo32F(i int64) float32 {
635 return truncate64Fto32F(math.Float64frombits(uint64(i)))
636 }
637
638 func auxIntToBool(i int64) bool {
639 if i == 0 {
640 return false
641 }
642 return true
643 }
644 func auxIntToInt8(i int64) int8 {
645 return int8(i)
646 }
647 func auxIntToInt16(i int64) int16 {
648 return int16(i)
649 }
650 func auxIntToInt32(i int64) int32 {
651 return int32(i)
652 }
653 func auxIntToInt64(i int64) int64 {
654 return i
655 }
656 func auxIntToUint8(i int64) uint8 {
657 return uint8(i)
658 }
659 func auxIntToFloat32(i int64) float32 {
660 return float32(math.Float64frombits(uint64(i)))
661 }
662 func auxIntToFloat64(i int64) float64 {
663 return math.Float64frombits(uint64(i))
664 }
665 func auxIntToValAndOff(i int64) ValAndOff {
666 return ValAndOff(i)
667 }
668 func auxIntToArm64BitField(i int64) arm64BitField {
669 return arm64BitField(i)
670 }
671 func auxIntToFlagConstant(x int64) flagConstant {
672 return flagConstant(x)
673 }
674
675 func auxIntToOp(cc int64) Op {
676 return Op(cc)
677 }
678
679 func boolToAuxInt(b bool) int64 {
680 if b {
681 return 1
682 }
683 return 0
684 }
685 func int8ToAuxInt(i int8) int64 {
686 return int64(i)
687 }
688 func int16ToAuxInt(i int16) int64 {
689 return int64(i)
690 }
691 func int32ToAuxInt(i int32) int64 {
692 return int64(i)
693 }
694 func int64ToAuxInt(i int64) int64 {
695 return int64(i)
696 }
697 func uint8ToAuxInt(i uint8) int64 {
698 return int64(int8(i))
699 }
700 func float32ToAuxInt(f float32) int64 {
701 return int64(math.Float64bits(float64(f)))
702 }
703 func float64ToAuxInt(f float64) int64 {
704 return int64(math.Float64bits(f))
705 }
706 func valAndOffToAuxInt(v ValAndOff) int64 {
707 return int64(v)
708 }
709 func arm64BitFieldToAuxInt(v arm64BitField) int64 {
710 return int64(v)
711 }
712 func flagConstantToAuxInt(x flagConstant) int64 {
713 return int64(x)
714 }
715
716 func opToAuxInt(o Op) int64 {
717 return int64(o)
718 }
719
720
721 type Aux interface {
722 CanBeAnSSAAux()
723 }
724
725
726 type auxMark bool
727
728 func (auxMark) CanBeAnSSAAux() {}
729
730 var AuxMark auxMark
731
732
733 type stringAux string
734
735 func (stringAux) CanBeAnSSAAux() {}
736
737 func auxToString(i Aux) string {
738 return string(i.(stringAux))
739 }
740 func auxToSym(i Aux) Sym {
741
742 s, _ := i.(Sym)
743 return s
744 }
745 func auxToType(i Aux) *types.Type {
746 return i.(*types.Type)
747 }
748 func auxToCall(i Aux) *AuxCall {
749 return i.(*AuxCall)
750 }
751 func auxToS390xCCMask(i Aux) s390x.CCMask {
752 return i.(s390x.CCMask)
753 }
754 func auxToS390xRotateParams(i Aux) s390x.RotateParams {
755 return i.(s390x.RotateParams)
756 }
757
758 func StringToAux(s string) Aux {
759 return stringAux(s)
760 }
761 func symToAux(s Sym) Aux {
762 return s
763 }
764 func callToAux(s *AuxCall) Aux {
765 return s
766 }
767 func typeToAux(t *types.Type) Aux {
768 return t
769 }
770 func s390xCCMaskToAux(c s390x.CCMask) Aux {
771 return c
772 }
773 func s390xRotateParamsToAux(r s390x.RotateParams) Aux {
774 return r
775 }
776
777
778 func uaddOvf(a, b int64) bool {
779 return uint64(a)+uint64(b) < uint64(a)
780 }
781
782 func devirtLECall(v *Value, sym *obj.LSym) *Value {
783 v.Op = OpStaticLECall
784 auxcall := v.Aux.(*AuxCall)
785 auxcall.Fn = sym
786
787 v.Args[0].Uses--
788 copy(v.Args[0:], v.Args[1:])
789 v.Args[len(v.Args)-1] = nil
790 v.Args = v.Args[:len(v.Args)-1]
791 if f := v.Block.Func; f.pass.debug > 0 {
792 f.Warnl(v.Pos, "de-virtualizing call")
793 }
794 return v
795 }
796
797
798 func isSamePtr(p1, p2 *Value) bool {
799 if p1 == p2 {
800 return true
801 }
802 if p1.Op != p2.Op {
803 for p1.Op == OpOffPtr && p1.AuxInt == 0 {
804 p1 = p1.Args[0]
805 }
806 for p2.Op == OpOffPtr && p2.AuxInt == 0 {
807 p2 = p2.Args[0]
808 }
809 if p1 == p2 {
810 return true
811 }
812 if p1.Op != p2.Op {
813 return false
814 }
815 }
816 switch p1.Op {
817 case OpOffPtr:
818 return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
819 case OpAddr, OpLocalAddr:
820 return p1.Aux == p2.Aux
821 case OpAddPtr:
822 return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
823 }
824 return false
825 }
826
827 func isStackPtr(v *Value) bool {
828 for v.Op == OpOffPtr || v.Op == OpAddPtr {
829 v = v.Args[0]
830 }
831 return v.Op == OpSP || v.Op == OpLocalAddr
832 }
833
834
835
836
837 func disjoint(p1 *Value, n1 int64, p2 *Value, n2 int64) bool {
838 if n1 == 0 || n2 == 0 {
839 return true
840 }
841 if p1 == p2 {
842 return false
843 }
844 baseAndOffset := func(ptr *Value) (base *Value, offset int64) {
845 base, offset = ptr, 0
846 for base.Op == OpOffPtr {
847 offset += base.AuxInt
848 base = base.Args[0]
849 }
850 if opcodeTable[base.Op].nilCheck {
851 base = base.Args[0]
852 }
853 return base, offset
854 }
855
856
857 if disjointTypes(p1.Type, p2.Type) {
858 return true
859 }
860
861 p1, off1 := baseAndOffset(p1)
862 p2, off2 := baseAndOffset(p2)
863 if isSamePtr(p1, p2) {
864 return !overlap(off1, n1, off2, n2)
865 }
866
867
868
869
870 switch p1.Op {
871 case OpAddr, OpLocalAddr:
872 if p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpSP {
873 return true
874 }
875 return (p2.Op == OpArg || p2.Op == OpArgIntReg) && p1.Args[0].Op == OpSP
876 case OpArg, OpArgIntReg:
877 if p2.Op == OpSP || p2.Op == OpLocalAddr {
878 return true
879 }
880 case OpSP:
881 return p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpArg || p2.Op == OpArgIntReg || p2.Op == OpSP
882 }
883 return false
884 }
885
886
887
888
889 func disjointTypes(t1 *types.Type, t2 *types.Type) bool {
890
891 if t1.IsUnsafePtr() || t2.IsUnsafePtr() {
892 return false
893 }
894
895 if !t1.IsPtr() || !t2.IsPtr() {
896 panic("disjointTypes: one of arguments is not a pointer")
897 }
898
899 t1 = t1.Elem()
900 t2 = t2.Elem()
901
902
903
904 if t1.NotInHeap() || t2.NotInHeap() {
905 return false
906 }
907
908 isPtrShaped := func(t *types.Type) bool { return int(t.Size()) == types.PtrSize && t.HasPointers() }
909
910
911 if (isPtrShaped(t1) && !t2.HasPointers()) ||
912 (isPtrShaped(t2) && !t1.HasPointers()) {
913 return true
914 }
915
916 return false
917 }
918
919
920 func moveSize(align int64, c *Config) int64 {
921 switch {
922 case align%8 == 0 && c.PtrSize == 8:
923 return 8
924 case align%4 == 0:
925 return 4
926 case align%2 == 0:
927 return 2
928 }
929 return 1
930 }
931
932
933
934
935 func mergePoint(b *Block, a ...*Value) *Block {
936
937
938
939 d := 100
940
941 for d > 0 {
942 for _, x := range a {
943 if b == x.Block {
944 goto found
945 }
946 }
947 if len(b.Preds) > 1 {
948
949 return nil
950 }
951 b = b.Preds[0].b
952 d--
953 }
954 return nil
955 found:
956
957
958 r := b
959
960
961 na := 0
962 for d > 0 {
963 for _, x := range a {
964 if b == x.Block {
965 na++
966 }
967 }
968 if na == len(a) {
969
970 return r
971 }
972 if len(b.Preds) > 1 {
973 return nil
974 }
975 b = b.Preds[0].b
976 d--
977
978 }
979 return nil
980 }
981
982
983
984
985
986
987 func clobber(vv ...*Value) bool {
988 for _, v := range vv {
989 v.reset(OpInvalid)
990
991 }
992 return true
993 }
994
995
996
997 func resetCopy(v *Value, arg *Value) bool {
998 v.reset(OpCopy)
999 v.AddArg(arg)
1000 return true
1001 }
1002
1003
1004
1005
1006 func clobberIfDead(v *Value) bool {
1007 if v.Uses == 1 {
1008 v.reset(OpInvalid)
1009 }
1010
1011 return true
1012 }
1013
1014
1015
1016
1017
1018
1019
1020 func noteRule(s string) bool {
1021 fmt.Println(s)
1022 return true
1023 }
1024
1025
1026
1027
1028
1029
1030 func countRule(v *Value, key string) bool {
1031 f := v.Block.Func
1032 if f.ruleMatches == nil {
1033 f.ruleMatches = make(map[string]int)
1034 }
1035 f.ruleMatches[key]++
1036 return true
1037 }
1038
1039
1040
1041 func warnRule(cond bool, v *Value, s string) bool {
1042 if pos := v.Pos; pos.Line() > 1 && cond {
1043 v.Block.Func.Warnl(pos, s)
1044 }
1045 return true
1046 }
1047
1048
1049 func flagArg(v *Value) *Value {
1050 if len(v.Args) != 1 || !v.Args[0].Type.IsFlags() {
1051 return nil
1052 }
1053 return v.Args[0]
1054 }
1055
1056
1057
1058
1059
1060
1061 func arm64Negate(op Op) Op {
1062 switch op {
1063 case OpARM64LessThan:
1064 return OpARM64GreaterEqual
1065 case OpARM64LessThanU:
1066 return OpARM64GreaterEqualU
1067 case OpARM64GreaterThan:
1068 return OpARM64LessEqual
1069 case OpARM64GreaterThanU:
1070 return OpARM64LessEqualU
1071 case OpARM64LessEqual:
1072 return OpARM64GreaterThan
1073 case OpARM64LessEqualU:
1074 return OpARM64GreaterThanU
1075 case OpARM64GreaterEqual:
1076 return OpARM64LessThan
1077 case OpARM64GreaterEqualU:
1078 return OpARM64LessThanU
1079 case OpARM64Equal:
1080 return OpARM64NotEqual
1081 case OpARM64NotEqual:
1082 return OpARM64Equal
1083 case OpARM64LessThanF:
1084 return OpARM64NotLessThanF
1085 case OpARM64NotLessThanF:
1086 return OpARM64LessThanF
1087 case OpARM64LessEqualF:
1088 return OpARM64NotLessEqualF
1089 case OpARM64NotLessEqualF:
1090 return OpARM64LessEqualF
1091 case OpARM64GreaterThanF:
1092 return OpARM64NotGreaterThanF
1093 case OpARM64NotGreaterThanF:
1094 return OpARM64GreaterThanF
1095 case OpARM64GreaterEqualF:
1096 return OpARM64NotGreaterEqualF
1097 case OpARM64NotGreaterEqualF:
1098 return OpARM64GreaterEqualF
1099 default:
1100 panic("unreachable")
1101 }
1102 }
1103
1104
1105
1106
1107
1108
1109 func arm64Invert(op Op) Op {
1110 switch op {
1111 case OpARM64LessThan:
1112 return OpARM64GreaterThan
1113 case OpARM64LessThanU:
1114 return OpARM64GreaterThanU
1115 case OpARM64GreaterThan:
1116 return OpARM64LessThan
1117 case OpARM64GreaterThanU:
1118 return OpARM64LessThanU
1119 case OpARM64LessEqual:
1120 return OpARM64GreaterEqual
1121 case OpARM64LessEqualU:
1122 return OpARM64GreaterEqualU
1123 case OpARM64GreaterEqual:
1124 return OpARM64LessEqual
1125 case OpARM64GreaterEqualU:
1126 return OpARM64LessEqualU
1127 case OpARM64Equal, OpARM64NotEqual:
1128 return op
1129 case OpARM64LessThanF:
1130 return OpARM64GreaterThanF
1131 case OpARM64GreaterThanF:
1132 return OpARM64LessThanF
1133 case OpARM64LessEqualF:
1134 return OpARM64GreaterEqualF
1135 case OpARM64GreaterEqualF:
1136 return OpARM64LessEqualF
1137 case OpARM64NotLessThanF:
1138 return OpARM64NotGreaterThanF
1139 case OpARM64NotGreaterThanF:
1140 return OpARM64NotLessThanF
1141 case OpARM64NotLessEqualF:
1142 return OpARM64NotGreaterEqualF
1143 case OpARM64NotGreaterEqualF:
1144 return OpARM64NotLessEqualF
1145 default:
1146 panic("unreachable")
1147 }
1148 }
1149
1150
1151
1152
1153 func ccARM64Eval(op Op, flags *Value) int {
1154 fop := flags.Op
1155 if fop == OpARM64InvertFlags {
1156 return -ccARM64Eval(op, flags.Args[0])
1157 }
1158 if fop != OpARM64FlagConstant {
1159 return 0
1160 }
1161 fc := flagConstant(flags.AuxInt)
1162 b2i := func(b bool) int {
1163 if b {
1164 return 1
1165 }
1166 return -1
1167 }
1168 switch op {
1169 case OpARM64Equal:
1170 return b2i(fc.eq())
1171 case OpARM64NotEqual:
1172 return b2i(fc.ne())
1173 case OpARM64LessThan:
1174 return b2i(fc.lt())
1175 case OpARM64LessThanU:
1176 return b2i(fc.ult())
1177 case OpARM64GreaterThan:
1178 return b2i(fc.gt())
1179 case OpARM64GreaterThanU:
1180 return b2i(fc.ugt())
1181 case OpARM64LessEqual:
1182 return b2i(fc.le())
1183 case OpARM64LessEqualU:
1184 return b2i(fc.ule())
1185 case OpARM64GreaterEqual:
1186 return b2i(fc.ge())
1187 case OpARM64GreaterEqualU:
1188 return b2i(fc.uge())
1189 }
1190 return 0
1191 }
1192
1193
1194
1195 func logRule(s string) {
1196 if ruleFile == nil {
1197
1198
1199
1200
1201
1202
1203 w, err := os.OpenFile(filepath.Join(os.Getenv("GOROOT"), "src", "rulelog"),
1204 os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
1205 if err != nil {
1206 panic(err)
1207 }
1208 ruleFile = w
1209 }
1210 _, err := fmt.Fprintln(ruleFile, s)
1211 if err != nil {
1212 panic(err)
1213 }
1214 }
1215
1216 var ruleFile io.Writer
1217
1218 func isConstZero(v *Value) bool {
1219 switch v.Op {
1220 case OpConstNil:
1221 return true
1222 case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConst32F, OpConst64F:
1223 return v.AuxInt == 0
1224 case OpStringMake, OpIMake, OpComplexMake:
1225 return isConstZero(v.Args[0]) && isConstZero(v.Args[1])
1226 case OpSliceMake:
1227 return isConstZero(v.Args[0]) && isConstZero(v.Args[1]) && isConstZero(v.Args[2])
1228 case OpStringPtr, OpStringLen, OpSlicePtr, OpSliceLen, OpSliceCap, OpITab, OpIData, OpComplexReal, OpComplexImag:
1229 return isConstZero(v.Args[0])
1230 }
1231 return false
1232 }
1233
1234
1235 func reciprocalExact64(c float64) bool {
1236 b := math.Float64bits(c)
1237 man := b & (1<<52 - 1)
1238 if man != 0 {
1239 return false
1240 }
1241 exp := b >> 52 & (1<<11 - 1)
1242
1243
1244 switch exp {
1245 case 0:
1246 return false
1247 case 0x7ff:
1248 return false
1249 case 0x7fe:
1250 return false
1251 default:
1252 return true
1253 }
1254 }
1255
1256
1257 func reciprocalExact32(c float32) bool {
1258 b := math.Float32bits(c)
1259 man := b & (1<<23 - 1)
1260 if man != 0 {
1261 return false
1262 }
1263 exp := b >> 23 & (1<<8 - 1)
1264
1265
1266 switch exp {
1267 case 0:
1268 return false
1269 case 0xff:
1270 return false
1271 case 0xfe:
1272 return false
1273 default:
1274 return true
1275 }
1276 }
1277
1278
1279 func isARMImmRot(v uint32) bool {
1280 for i := 0; i < 16; i++ {
1281 if v&^0xff == 0 {
1282 return true
1283 }
1284 v = v<<2 | v>>30
1285 }
1286
1287 return false
1288 }
1289
1290
1291
1292 func overlap(offset1, size1, offset2, size2 int64) bool {
1293 if offset1 >= offset2 && offset2+size2 > offset1 {
1294 return true
1295 }
1296 if offset2 >= offset1 && offset1+size1 > offset2 {
1297 return true
1298 }
1299 return false
1300 }
1301
1302
1303
1304
1305 func zeroUpper32Bits(x *Value, depth int) bool {
1306 if x.Type.IsSigned() && x.Type.Size() < 8 {
1307
1308
1309 return false
1310 }
1311 switch x.Op {
1312 case OpAMD64MOVLconst, OpAMD64MOVLload, OpAMD64MOVLQZX, OpAMD64MOVLloadidx1,
1313 OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVBload, OpAMD64MOVBloadidx1,
1314 OpAMD64MOVLloadidx4, OpAMD64ADDLload, OpAMD64SUBLload, OpAMD64ANDLload,
1315 OpAMD64ORLload, OpAMD64XORLload, OpAMD64CVTTSD2SL,
1316 OpAMD64ADDL, OpAMD64ADDLconst, OpAMD64SUBL, OpAMD64SUBLconst,
1317 OpAMD64ANDL, OpAMD64ANDLconst, OpAMD64ORL, OpAMD64ORLconst,
1318 OpAMD64XORL, OpAMD64XORLconst, OpAMD64NEGL, OpAMD64NOTL,
1319 OpAMD64SHRL, OpAMD64SHRLconst, OpAMD64SARL, OpAMD64SARLconst,
1320 OpAMD64SHLL, OpAMD64SHLLconst:
1321 return true
1322 case OpARM64REV16W, OpARM64REVW, OpARM64RBITW, OpARM64CLZW, OpARM64EXTRWconst,
1323 OpARM64MULW, OpARM64MNEGW, OpARM64UDIVW, OpARM64DIVW, OpARM64UMODW,
1324 OpARM64MADDW, OpARM64MSUBW, OpARM64RORW, OpARM64RORWconst:
1325 return true
1326 case OpArg:
1327
1328
1329 return x.Type.Size() == 4 && x.Block.Func.Config.arch == "amd64"
1330 case OpPhi, OpSelect0, OpSelect1:
1331
1332
1333 if depth <= 0 {
1334 return false
1335 }
1336 for i := range x.Args {
1337 if !zeroUpper32Bits(x.Args[i], depth-1) {
1338 return false
1339 }
1340 }
1341 return true
1342
1343 }
1344 return false
1345 }
1346
1347
1348 func zeroUpper48Bits(x *Value, depth int) bool {
1349 if x.Type.IsSigned() && x.Type.Size() < 8 {
1350 return false
1351 }
1352 switch x.Op {
1353 case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2:
1354 return true
1355 case OpArg:
1356 return x.Type.Size() == 2 && x.Block.Func.Config.arch == "amd64"
1357 case OpPhi, OpSelect0, OpSelect1:
1358
1359
1360 if depth <= 0 {
1361 return false
1362 }
1363 for i := range x.Args {
1364 if !zeroUpper48Bits(x.Args[i], depth-1) {
1365 return false
1366 }
1367 }
1368 return true
1369
1370 }
1371 return false
1372 }
1373
1374
1375 func zeroUpper56Bits(x *Value, depth int) bool {
1376 if x.Type.IsSigned() && x.Type.Size() < 8 {
1377 return false
1378 }
1379 switch x.Op {
1380 case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1:
1381 return true
1382 case OpArg:
1383 return x.Type.Size() == 1 && x.Block.Func.Config.arch == "amd64"
1384 case OpPhi, OpSelect0, OpSelect1:
1385
1386
1387 if depth <= 0 {
1388 return false
1389 }
1390 for i := range x.Args {
1391 if !zeroUpper56Bits(x.Args[i], depth-1) {
1392 return false
1393 }
1394 }
1395 return true
1396
1397 }
1398 return false
1399 }
1400
1401 func isInlinableMemclr(c *Config, sz int64) bool {
1402 if sz < 0 {
1403 return false
1404 }
1405
1406
1407 switch c.arch {
1408 case "amd64", "arm64":
1409 return true
1410 case "ppc64le", "ppc64", "loong64":
1411 return sz < 512
1412 }
1413 return false
1414 }
1415
1416
1417
1418
1419
1420
1421 func isInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1422
1423
1424
1425
1426 switch c.arch {
1427 case "amd64":
1428 return sz <= 16 || (sz < 1024 && disjoint(dst, sz, src, sz))
1429 case "arm64":
1430 return sz <= 64 || (sz <= 1024 && disjoint(dst, sz, src, sz))
1431 case "386":
1432 return sz <= 8
1433 case "s390x", "ppc64", "ppc64le":
1434 return sz <= 8 || disjoint(dst, sz, src, sz)
1435 case "arm", "loong64", "mips", "mips64", "mipsle", "mips64le":
1436 return sz <= 4
1437 }
1438 return false
1439 }
1440 func IsInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1441 return isInlinableMemmove(dst, src, sz, c)
1442 }
1443
1444
1445
1446
1447 func logLargeCopy(v *Value, s int64) bool {
1448 if s < 128 {
1449 return true
1450 }
1451 if logopt.Enabled() {
1452 logopt.LogOpt(v.Pos, "copy", "lower", v.Block.Func.Name, fmt.Sprintf("%d bytes", s))
1453 }
1454 return true
1455 }
1456 func LogLargeCopy(funcName string, pos src.XPos, s int64) {
1457 if s < 128 {
1458 return
1459 }
1460 if logopt.Enabled() {
1461 logopt.LogOpt(pos, "copy", "lower", funcName, fmt.Sprintf("%d bytes", s))
1462 }
1463 }
1464
1465
1466
1467 func hasSmallRotate(c *Config) bool {
1468 switch c.arch {
1469 case "amd64", "386":
1470 return true
1471 default:
1472 return false
1473 }
1474 }
1475
1476 func supportsPPC64PCRel() bool {
1477
1478
1479 return buildcfg.GOPPC64 >= 10 && buildcfg.GOOS == "linux"
1480 }
1481
1482 func newPPC64ShiftAuxInt(sh, mb, me, sz int64) int32 {
1483 if sh < 0 || sh >= sz {
1484 panic("PPC64 shift arg sh out of range")
1485 }
1486 if mb < 0 || mb >= sz {
1487 panic("PPC64 shift arg mb out of range")
1488 }
1489 if me < 0 || me >= sz {
1490 panic("PPC64 shift arg me out of range")
1491 }
1492 return int32(sh<<16 | mb<<8 | me)
1493 }
1494
1495 func GetPPC64Shiftsh(auxint int64) int64 {
1496 return int64(int8(auxint >> 16))
1497 }
1498
1499 func GetPPC64Shiftmb(auxint int64) int64 {
1500 return int64(int8(auxint >> 8))
1501 }
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512 func isPPC64WordRotateMask(v64 int64) bool {
1513
1514 v := uint32(v64)
1515 vp := (v & -v) + v
1516
1517 vn := ^v
1518 vpn := (vn & -vn) + vn
1519 return (v&vp == 0 || vn&vpn == 0) && v != 0
1520 }
1521
1522
1523
1524
1525 func isPPC64WordRotateMaskNonWrapping(v64 int64) bool {
1526
1527 v := uint32(v64)
1528 vp := (v & -v) + v
1529 return (v&vp == 0) && v != 0 && uint64(uint32(v64)) == uint64(v64)
1530 }
1531
1532
1533
1534
1535 func encodePPC64RotateMask(rotate, mask, nbits int64) int64 {
1536 var mb, me, mbn, men int
1537
1538
1539 if mask == 0 || ^mask == 0 || rotate >= nbits {
1540 panic(fmt.Sprintf("invalid PPC64 rotate mask: %x %d %d", uint64(mask), rotate, nbits))
1541 } else if nbits == 32 {
1542 mb = bits.LeadingZeros32(uint32(mask))
1543 me = 32 - bits.TrailingZeros32(uint32(mask))
1544 mbn = bits.LeadingZeros32(^uint32(mask))
1545 men = 32 - bits.TrailingZeros32(^uint32(mask))
1546 } else {
1547 mb = bits.LeadingZeros64(uint64(mask))
1548 me = 64 - bits.TrailingZeros64(uint64(mask))
1549 mbn = bits.LeadingZeros64(^uint64(mask))
1550 men = 64 - bits.TrailingZeros64(^uint64(mask))
1551 }
1552
1553 if mb == 0 && me == int(nbits) {
1554
1555 mb, me = men, mbn
1556 }
1557
1558 return int64(me) | int64(mb<<8) | int64(rotate<<16) | int64(nbits<<24)
1559 }
1560
1561
1562
1563
1564
1565
1566 func mergePPC64RLDICLandSRDconst(encoded, s int64) int64 {
1567 mb := s
1568 r := 64 - s
1569
1570 if (encoded>>8)&0xFF < mb {
1571 encoded = (encoded &^ 0xFF00) | mb<<8
1572 }
1573
1574 if (encoded & 0xFF0000) != 0 {
1575 panic("non-zero rotate")
1576 }
1577 return encoded | r<<16
1578 }
1579
1580
1581
1582 func DecodePPC64RotateMask(sauxint int64) (rotate, mb, me int64, mask uint64) {
1583 auxint := uint64(sauxint)
1584 rotate = int64((auxint >> 16) & 0xFF)
1585 mb = int64((auxint >> 8) & 0xFF)
1586 me = int64((auxint >> 0) & 0xFF)
1587 nbits := int64((auxint >> 24) & 0xFF)
1588 mask = ((1 << uint(nbits-mb)) - 1) ^ ((1 << uint(nbits-me)) - 1)
1589 if mb > me {
1590 mask = ^mask
1591 }
1592 if nbits == 32 {
1593 mask = uint64(uint32(mask))
1594 }
1595
1596
1597
1598 me = (me - 1) & (nbits - 1)
1599 return
1600 }
1601
1602
1603
1604
1605 func isPPC64ValidShiftMask(v int64) bool {
1606 if (v != 0) && ((v+1)&v) == 0 {
1607 return true
1608 }
1609 return false
1610 }
1611
1612 func getPPC64ShiftMaskLength(v int64) int64 {
1613 return int64(bits.Len64(uint64(v)))
1614 }
1615
1616
1617
1618 func mergePPC64RShiftMask(m, s, nbits int64) int64 {
1619 smask := uint64((1<<uint(nbits))-1) >> uint(s)
1620 return m & int64(smask)
1621 }
1622
1623
1624 func mergePPC64AndSrwi(m, s int64) int64 {
1625 mask := mergePPC64RShiftMask(m, s, 32)
1626 if !isPPC64WordRotateMask(mask) {
1627 return 0
1628 }
1629 return encodePPC64RotateMask((32-s)&31, mask, 32)
1630 }
1631
1632
1633 func mergePPC64AndSrdi(m, s int64) int64 {
1634 mask := mergePPC64RShiftMask(m, s, 64)
1635
1636
1637 rv := bits.RotateLeft64(0xFFFFFFFF00000000, -int(s))
1638 if rv&uint64(mask) != 0 {
1639 return 0
1640 }
1641 if !isPPC64WordRotateMaskNonWrapping(mask) {
1642 return 0
1643 }
1644 return encodePPC64RotateMask((32-s)&31, mask, 32)
1645 }
1646
1647
1648 func mergePPC64AndSldi(m, s int64) int64 {
1649 mask := -1 << s & m
1650
1651
1652 rv := bits.RotateLeft64(0xFFFFFFFF00000000, int(s))
1653 if rv&uint64(mask) != 0 {
1654 return 0
1655 }
1656 if !isPPC64WordRotateMaskNonWrapping(mask) {
1657 return 0
1658 }
1659 return encodePPC64RotateMask(s&31, mask, 32)
1660 }
1661
1662
1663
1664 func mergePPC64ClrlsldiSrw(sld, srw int64) int64 {
1665 mask_1 := uint64(0xFFFFFFFF >> uint(srw))
1666
1667 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1668
1669
1670 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1671
1672 r_1 := 32 - srw
1673 r_2 := GetPPC64Shiftsh(sld)
1674 r_3 := (r_1 + r_2) & 31
1675
1676 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1677 return 0
1678 }
1679 return encodePPC64RotateMask(int64(r_3), int64(mask_3), 32)
1680 }
1681
1682
1683
1684 func mergePPC64ClrlsldiSrd(sld, srd int64) int64 {
1685 mask_1 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(srd)
1686
1687 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1688
1689
1690 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1691
1692 r_1 := 64 - srd
1693 r_2 := GetPPC64Shiftsh(sld)
1694 r_3 := (r_1 + r_2) & 63
1695
1696 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1697 return 0
1698 }
1699
1700 v1 := bits.RotateLeft64(0xFFFFFFFF00000000, int(r_3))
1701 if v1&mask_3 != 0 {
1702 return 0
1703 }
1704 return encodePPC64RotateMask(int64(r_3&31), int64(mask_3), 32)
1705 }
1706
1707
1708
1709 func mergePPC64ClrlsldiRlwinm(sld int32, rlw int64) int64 {
1710 r_1, _, _, mask_1 := DecodePPC64RotateMask(rlw)
1711
1712 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1713
1714
1715 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(int64(sld)))
1716 r_2 := GetPPC64Shiftsh(int64(sld))
1717 r_3 := (r_1 + r_2) & 31
1718
1719
1720 if !isPPC64WordRotateMask(int64(mask_3)) || uint64(uint32(mask_3)) != mask_3 {
1721 return 0
1722 }
1723 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1724 }
1725
1726
1727
1728 func mergePPC64AndRlwinm(mask uint32, rlw int64) int64 {
1729 r, _, _, mask_rlw := DecodePPC64RotateMask(rlw)
1730 mask_out := (mask_rlw & uint64(mask))
1731
1732
1733 if !isPPC64WordRotateMask(int64(mask_out)) {
1734 return 0
1735 }
1736 return encodePPC64RotateMask(r, int64(mask_out), 32)
1737 }
1738
1739
1740
1741 func mergePPC64MovwzregRlwinm(rlw int64) int64 {
1742 _, mb, me, _ := DecodePPC64RotateMask(rlw)
1743 if mb > me {
1744 return 0
1745 }
1746 return rlw
1747 }
1748
1749
1750
1751 func mergePPC64RlwinmAnd(rlw int64, mask uint32) int64 {
1752 r, _, _, mask_rlw := DecodePPC64RotateMask(rlw)
1753
1754
1755 r_mask := bits.RotateLeft32(mask, int(r))
1756
1757 mask_out := (mask_rlw & uint64(r_mask))
1758
1759
1760 if !isPPC64WordRotateMask(int64(mask_out)) {
1761 return 0
1762 }
1763 return encodePPC64RotateMask(r, int64(mask_out), 32)
1764 }
1765
1766
1767
1768 func mergePPC64SldiRlwinm(sldi, rlw int64) int64 {
1769 r_1, mb, me, mask_1 := DecodePPC64RotateMask(rlw)
1770 if mb > me || mb < sldi {
1771
1772
1773 return 0
1774 }
1775
1776 mask_3 := mask_1 << sldi
1777 r_3 := (r_1 + sldi) & 31
1778
1779
1780 if uint64(uint32(mask_3)) != mask_3 {
1781 return 0
1782 }
1783 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1784 }
1785
1786
1787
1788 func mergePPC64SldiSrw(sld, srw int64) int64 {
1789 if sld > srw || srw >= 32 {
1790 return 0
1791 }
1792 mask_r := uint32(0xFFFFFFFF) >> uint(srw)
1793 mask_l := uint32(0xFFFFFFFF) >> uint(sld)
1794 mask := (mask_r & mask_l) << uint(sld)
1795 return encodePPC64RotateMask((32-srw+sld)&31, int64(mask), 32)
1796 }
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823 func convertPPC64OpToOpCC(op *Value) *Value {
1824 ccOpMap := map[Op]Op{
1825 OpPPC64ADD: OpPPC64ADDCC,
1826 OpPPC64ADDconst: OpPPC64ADDCCconst,
1827 OpPPC64AND: OpPPC64ANDCC,
1828 OpPPC64ANDN: OpPPC64ANDNCC,
1829 OpPPC64ANDconst: OpPPC64ANDCCconst,
1830 OpPPC64CNTLZD: OpPPC64CNTLZDCC,
1831 OpPPC64MULHDU: OpPPC64MULHDUCC,
1832 OpPPC64NEG: OpPPC64NEGCC,
1833 OpPPC64NOR: OpPPC64NORCC,
1834 OpPPC64OR: OpPPC64ORCC,
1835 OpPPC64RLDICL: OpPPC64RLDICLCC,
1836 OpPPC64SUB: OpPPC64SUBCC,
1837 OpPPC64XOR: OpPPC64XORCC,
1838 }
1839 b := op.Block
1840 opCC := b.NewValue0I(op.Pos, ccOpMap[op.Op], types.NewTuple(op.Type, types.TypeFlags), op.AuxInt)
1841 opCC.AddArgs(op.Args...)
1842 op.reset(OpSelect0)
1843 op.AddArgs(opCC)
1844 return op
1845 }
1846
1847
1848 func convertPPC64RldiclAndccconst(sauxint int64) int64 {
1849 r, _, _, mask := DecodePPC64RotateMask(sauxint)
1850 if r != 0 || mask&0xFFFF != mask {
1851 return 0
1852 }
1853 return int64(mask)
1854 }
1855
1856
1857 func rotateLeft32(v, rotate int64) int64 {
1858 return int64(bits.RotateLeft32(uint32(v), int(rotate)))
1859 }
1860
1861 func rotateRight64(v, rotate int64) int64 {
1862 return int64(bits.RotateLeft64(uint64(v), int(-rotate)))
1863 }
1864
1865
1866 func armBFAuxInt(lsb, width int64) arm64BitField {
1867 if lsb < 0 || lsb > 63 {
1868 panic("ARM(64) bit field lsb constant out of range")
1869 }
1870 if width < 1 || lsb+width > 64 {
1871 panic("ARM(64) bit field width constant out of range")
1872 }
1873 return arm64BitField(width | lsb<<8)
1874 }
1875
1876
1877 func (bfc arm64BitField) lsb() int64 {
1878 return int64(uint64(bfc) >> 8)
1879 }
1880
1881
1882 func (bfc arm64BitField) width() int64 {
1883 return int64(bfc) & 0xff
1884 }
1885
1886
1887 func isARM64BFMask(lsb, mask, rshift int64) bool {
1888 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1889 return shiftedMask != 0 && isPowerOfTwo(shiftedMask+1) && nto(shiftedMask)+lsb < 64
1890 }
1891
1892
1893 func arm64BFWidth(mask, rshift int64) int64 {
1894 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1895 if shiftedMask == 0 {
1896 panic("ARM64 BF mask is zero")
1897 }
1898 return nto(shiftedMask)
1899 }
1900
1901
1902
1903
1904 func registerizable(b *Block, typ *types.Type) bool {
1905 if typ.IsPtrShaped() || typ.IsFloat() || typ.IsBoolean() {
1906 return true
1907 }
1908 if typ.IsInteger() {
1909 return typ.Size() <= b.Func.Config.RegSize
1910 }
1911 return false
1912 }
1913
1914
1915 func needRaceCleanup(sym *AuxCall, v *Value) bool {
1916 f := v.Block.Func
1917 if !f.Config.Race {
1918 return false
1919 }
1920 if !isSameCall(sym, "runtime.racefuncenter") && !isSameCall(sym, "runtime.racefuncexit") {
1921 return false
1922 }
1923 for _, b := range f.Blocks {
1924 for _, v := range b.Values {
1925 switch v.Op {
1926 case OpStaticCall, OpStaticLECall:
1927
1928
1929 s := v.Aux.(*AuxCall).Fn.String()
1930 switch s {
1931 case "runtime.racefuncenter", "runtime.racefuncexit",
1932 "runtime.panicdivide", "runtime.panicwrap",
1933 "runtime.panicshift":
1934 continue
1935 }
1936
1937
1938 return false
1939 case OpPanicBounds, OpPanicExtend:
1940
1941 case OpClosureCall, OpInterCall, OpClosureLECall, OpInterLECall:
1942
1943 return false
1944 }
1945 }
1946 }
1947 if isSameCall(sym, "runtime.racefuncenter") {
1948
1949
1950 if v.Args[0].Op != OpStore {
1951 if v.Op == OpStaticLECall {
1952
1953 return true
1954 }
1955 return false
1956 }
1957 mem := v.Args[0].Args[2]
1958 v.Args[0].reset(OpCopy)
1959 v.Args[0].AddArg(mem)
1960 }
1961 return true
1962 }
1963
1964
1965 func symIsRO(sym Sym) bool {
1966 lsym := sym.(*obj.LSym)
1967 return lsym.Type == objabi.SRODATA && len(lsym.R) == 0
1968 }
1969
1970
1971 func symIsROZero(sym Sym) bool {
1972 lsym := sym.(*obj.LSym)
1973 if lsym.Type != objabi.SRODATA || len(lsym.R) != 0 {
1974 return false
1975 }
1976 for _, b := range lsym.P {
1977 if b != 0 {
1978 return false
1979 }
1980 }
1981 return true
1982 }
1983
1984
1985
1986 func isFixed32(c *Config, sym Sym, off int64) bool {
1987 return isFixed(c, sym, off, 4)
1988 }
1989
1990
1991
1992 func isFixed(c *Config, sym Sym, off, size int64) bool {
1993 lsym := sym.(*obj.LSym)
1994 if lsym.Extra == nil {
1995 return false
1996 }
1997 if _, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
1998 if off == 2*c.PtrSize && size == 4 {
1999 return true
2000 }
2001 }
2002 return false
2003 }
2004 func fixed32(c *Config, sym Sym, off int64) int32 {
2005 lsym := sym.(*obj.LSym)
2006 if ti, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
2007 if off == 2*c.PtrSize {
2008 return int32(types.TypeHash(ti.Type.(*types.Type)))
2009 }
2010 }
2011 base.Fatalf("fixed32 data not known for %s:%d", sym, off)
2012 return 0
2013 }
2014
2015
2016
2017 func isFixedSym(sym Sym, off int64) bool {
2018 lsym := sym.(*obj.LSym)
2019 switch {
2020 case lsym.Type == objabi.SRODATA:
2021
2022 default:
2023 return false
2024 }
2025 for _, r := range lsym.R {
2026 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
2027 return true
2028 }
2029 }
2030 return false
2031 }
2032 func fixedSym(f *Func, sym Sym, off int64) Sym {
2033 lsym := sym.(*obj.LSym)
2034 for _, r := range lsym.R {
2035 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off {
2036 if strings.HasPrefix(r.Sym.Name, "type:") {
2037
2038
2039
2040
2041
2042 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
2043 } else if strings.HasPrefix(r.Sym.Name, "go:itab") {
2044
2045
2046 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
2047 }
2048 return r.Sym
2049 }
2050 }
2051 base.Fatalf("fixedSym data not known for %s:%d", sym, off)
2052 return nil
2053 }
2054
2055
2056 func read8(sym Sym, off int64) uint8 {
2057 lsym := sym.(*obj.LSym)
2058 if off >= int64(len(lsym.P)) || off < 0 {
2059
2060
2061
2062
2063 return 0
2064 }
2065 return lsym.P[off]
2066 }
2067
2068
2069 func read16(sym Sym, off int64, byteorder binary.ByteOrder) uint16 {
2070 lsym := sym.(*obj.LSym)
2071
2072
2073 var src []byte
2074 if 0 <= off && off < int64(len(lsym.P)) {
2075 src = lsym.P[off:]
2076 }
2077 buf := make([]byte, 2)
2078 copy(buf, src)
2079 return byteorder.Uint16(buf)
2080 }
2081
2082
2083 func read32(sym Sym, off int64, byteorder binary.ByteOrder) uint32 {
2084 lsym := sym.(*obj.LSym)
2085 var src []byte
2086 if 0 <= off && off < int64(len(lsym.P)) {
2087 src = lsym.P[off:]
2088 }
2089 buf := make([]byte, 4)
2090 copy(buf, src)
2091 return byteorder.Uint32(buf)
2092 }
2093
2094
2095 func read64(sym Sym, off int64, byteorder binary.ByteOrder) uint64 {
2096 lsym := sym.(*obj.LSym)
2097 var src []byte
2098 if 0 <= off && off < int64(len(lsym.P)) {
2099 src = lsym.P[off:]
2100 }
2101 buf := make([]byte, 8)
2102 copy(buf, src)
2103 return byteorder.Uint64(buf)
2104 }
2105
2106
2107 func sequentialAddresses(x, y *Value, n int64) bool {
2108 if x == y && n == 0 {
2109 return true
2110 }
2111 if x.Op == Op386ADDL && y.Op == Op386LEAL1 && y.AuxInt == n && y.Aux == nil &&
2112 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2113 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2114 return true
2115 }
2116 if x.Op == Op386LEAL1 && y.Op == Op386LEAL1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
2117 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2118 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2119 return true
2120 }
2121 if x.Op == OpAMD64ADDQ && y.Op == OpAMD64LEAQ1 && y.AuxInt == n && y.Aux == nil &&
2122 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2123 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2124 return true
2125 }
2126 if x.Op == OpAMD64LEAQ1 && y.Op == OpAMD64LEAQ1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
2127 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
2128 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
2129 return true
2130 }
2131 return false
2132 }
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146 type flagConstant uint8
2147
2148
2149 func (fc flagConstant) N() bool {
2150 return fc&1 != 0
2151 }
2152
2153
2154 func (fc flagConstant) Z() bool {
2155 return fc&2 != 0
2156 }
2157
2158
2159
2160 func (fc flagConstant) C() bool {
2161 return fc&4 != 0
2162 }
2163
2164
2165 func (fc flagConstant) V() bool {
2166 return fc&8 != 0
2167 }
2168
2169 func (fc flagConstant) eq() bool {
2170 return fc.Z()
2171 }
2172 func (fc flagConstant) ne() bool {
2173 return !fc.Z()
2174 }
2175 func (fc flagConstant) lt() bool {
2176 return fc.N() != fc.V()
2177 }
2178 func (fc flagConstant) le() bool {
2179 return fc.Z() || fc.lt()
2180 }
2181 func (fc flagConstant) gt() bool {
2182 return !fc.Z() && fc.ge()
2183 }
2184 func (fc flagConstant) ge() bool {
2185 return fc.N() == fc.V()
2186 }
2187 func (fc flagConstant) ult() bool {
2188 return !fc.C()
2189 }
2190 func (fc flagConstant) ule() bool {
2191 return fc.Z() || fc.ult()
2192 }
2193 func (fc flagConstant) ugt() bool {
2194 return !fc.Z() && fc.uge()
2195 }
2196 func (fc flagConstant) uge() bool {
2197 return fc.C()
2198 }
2199
2200 func (fc flagConstant) ltNoov() bool {
2201 return fc.lt() && !fc.V()
2202 }
2203 func (fc flagConstant) leNoov() bool {
2204 return fc.le() && !fc.V()
2205 }
2206 func (fc flagConstant) gtNoov() bool {
2207 return fc.gt() && !fc.V()
2208 }
2209 func (fc flagConstant) geNoov() bool {
2210 return fc.ge() && !fc.V()
2211 }
2212
2213 func (fc flagConstant) String() string {
2214 return fmt.Sprintf("N=%v,Z=%v,C=%v,V=%v", fc.N(), fc.Z(), fc.C(), fc.V())
2215 }
2216
2217 type flagConstantBuilder struct {
2218 N bool
2219 Z bool
2220 C bool
2221 V bool
2222 }
2223
2224 func (fcs flagConstantBuilder) encode() flagConstant {
2225 var fc flagConstant
2226 if fcs.N {
2227 fc |= 1
2228 }
2229 if fcs.Z {
2230 fc |= 2
2231 }
2232 if fcs.C {
2233 fc |= 4
2234 }
2235 if fcs.V {
2236 fc |= 8
2237 }
2238 return fc
2239 }
2240
2241
2242
2243
2244
2245
2246 func addFlags64(x, y int64) flagConstant {
2247 var fcb flagConstantBuilder
2248 fcb.Z = x+y == 0
2249 fcb.N = x+y < 0
2250 fcb.C = uint64(x+y) < uint64(x)
2251 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2252 return fcb.encode()
2253 }
2254
2255
2256 func subFlags64(x, y int64) flagConstant {
2257 var fcb flagConstantBuilder
2258 fcb.Z = x-y == 0
2259 fcb.N = x-y < 0
2260 fcb.C = uint64(y) <= uint64(x)
2261 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2262 return fcb.encode()
2263 }
2264
2265
2266 func addFlags32(x, y int32) flagConstant {
2267 var fcb flagConstantBuilder
2268 fcb.Z = x+y == 0
2269 fcb.N = x+y < 0
2270 fcb.C = uint32(x+y) < uint32(x)
2271 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2272 return fcb.encode()
2273 }
2274
2275
2276 func subFlags32(x, y int32) flagConstant {
2277 var fcb flagConstantBuilder
2278 fcb.Z = x-y == 0
2279 fcb.N = x-y < 0
2280 fcb.C = uint32(y) <= uint32(x)
2281 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2282 return fcb.encode()
2283 }
2284
2285
2286
2287 func logicFlags64(x int64) flagConstant {
2288 var fcb flagConstantBuilder
2289 fcb.Z = x == 0
2290 fcb.N = x < 0
2291 return fcb.encode()
2292 }
2293
2294
2295
2296 func logicFlags32(x int32) flagConstant {
2297 var fcb flagConstantBuilder
2298 fcb.Z = x == 0
2299 fcb.N = x < 0
2300 return fcb.encode()
2301 }
2302
2303 func makeJumpTableSym(b *Block) *obj.LSym {
2304 s := base.Ctxt.Lookup(fmt.Sprintf("%s.jump%d", b.Func.fe.Func().LSym.Name, b.ID))
2305
2306 s.Set(obj.AttrStatic, true)
2307 return s
2308 }
2309
2310
2311
2312 func canRotate(c *Config, bits int64) bool {
2313 if bits > c.PtrSize*8 {
2314
2315 return false
2316 }
2317 switch c.arch {
2318 case "386", "amd64", "arm64", "loong64", "riscv64":
2319 return true
2320 case "arm", "s390x", "ppc64", "ppc64le", "wasm":
2321 return bits >= 32
2322 default:
2323 return false
2324 }
2325 }
2326
2327
2328 func isARM64bitcon(x uint64) bool {
2329 if x == 1<<64-1 || x == 0 {
2330 return false
2331 }
2332
2333 switch {
2334 case x != x>>32|x<<32:
2335
2336
2337 case x != x>>16|x<<48:
2338
2339 x = uint64(int64(int32(x)))
2340 case x != x>>8|x<<56:
2341
2342 x = uint64(int64(int16(x)))
2343 case x != x>>4|x<<60:
2344
2345 x = uint64(int64(int8(x)))
2346 default:
2347
2348
2349
2350
2351
2352 return true
2353 }
2354 return sequenceOfOnes(x) || sequenceOfOnes(^x)
2355 }
2356
2357
2358 func sequenceOfOnes(x uint64) bool {
2359 y := x & -x
2360 y += x
2361 return (y-1)&y == 0
2362 }
2363
2364
2365 func isARM64addcon(v int64) bool {
2366
2367 if v < 0 {
2368 return false
2369 }
2370 if (v & 0xFFF) == 0 {
2371 v >>= 12
2372 }
2373 return v <= 0xFFF
2374 }
2375
2376
2377
2378
2379 func setPos(v *Value, pos src.XPos) bool {
2380 v.Pos = pos
2381 return true
2382 }
2383
2384
2385
2386
2387 func isNonNegative(v *Value) bool {
2388 if !v.Type.IsInteger() {
2389 v.Fatalf("isNonNegative bad type: %v", v.Type)
2390 }
2391
2392
2393
2394
2395 switch v.Op {
2396 case OpConst64:
2397 return v.AuxInt >= 0
2398
2399 case OpConst32:
2400 return int32(v.AuxInt) >= 0
2401
2402 case OpConst16:
2403 return int16(v.AuxInt) >= 0
2404
2405 case OpConst8:
2406 return int8(v.AuxInt) >= 0
2407
2408 case OpStringLen, OpSliceLen, OpSliceCap,
2409 OpZeroExt8to64, OpZeroExt16to64, OpZeroExt32to64,
2410 OpZeroExt8to32, OpZeroExt16to32, OpZeroExt8to16,
2411 OpCtz64, OpCtz32, OpCtz16, OpCtz8,
2412 OpCtz64NonZero, OpCtz32NonZero, OpCtz16NonZero, OpCtz8NonZero,
2413 OpBitLen64, OpBitLen32, OpBitLen16, OpBitLen8:
2414 return true
2415
2416 case OpRsh64Ux64, OpRsh32Ux64:
2417 by := v.Args[1]
2418 return by.Op == OpConst64 && by.AuxInt > 0
2419
2420 case OpRsh64x64, OpRsh32x64, OpRsh8x64, OpRsh16x64, OpRsh32x32, OpRsh64x32,
2421 OpSignExt32to64, OpSignExt16to64, OpSignExt8to64, OpSignExt16to32, OpSignExt8to32:
2422 return isNonNegative(v.Args[0])
2423
2424 case OpAnd64, OpAnd32, OpAnd16, OpAnd8:
2425 return isNonNegative(v.Args[0]) || isNonNegative(v.Args[1])
2426
2427 case OpMod64, OpMod32, OpMod16, OpMod8,
2428 OpDiv64, OpDiv32, OpDiv16, OpDiv8,
2429 OpOr64, OpOr32, OpOr16, OpOr8,
2430 OpXor64, OpXor32, OpXor16, OpXor8:
2431 return isNonNegative(v.Args[0]) && isNonNegative(v.Args[1])
2432
2433
2434
2435 }
2436 return false
2437 }
2438
2439 func rewriteStructLoad(v *Value) *Value {
2440 b := v.Block
2441 ptr := v.Args[0]
2442 mem := v.Args[1]
2443
2444 t := v.Type
2445 args := make([]*Value, t.NumFields())
2446 for i := range args {
2447 ft := t.FieldType(i)
2448 addr := b.NewValue1I(v.Pos, OpOffPtr, ft.PtrTo(), t.FieldOff(i), ptr)
2449 args[i] = b.NewValue2(v.Pos, OpLoad, ft, addr, mem)
2450 }
2451
2452 v.reset(OpStructMake)
2453 v.AddArgs(args...)
2454 return v
2455 }
2456
2457 func rewriteStructStore(v *Value) *Value {
2458 b := v.Block
2459 dst := v.Args[0]
2460 x := v.Args[1]
2461 if x.Op != OpStructMake {
2462 base.Fatalf("invalid struct store: %v", x)
2463 }
2464 mem := v.Args[2]
2465
2466 t := x.Type
2467 for i, arg := range x.Args {
2468 ft := t.FieldType(i)
2469
2470 addr := b.NewValue1I(v.Pos, OpOffPtr, ft.PtrTo(), t.FieldOff(i), dst)
2471 mem = b.NewValue3A(v.Pos, OpStore, types.TypeMem, typeToAux(ft), addr, arg, mem)
2472 }
2473
2474 return mem
2475 }
2476
2477
2478
2479
2480 func isDirectType(v *Value) bool {
2481 return isDirectType1(v)
2482 }
2483
2484
2485 func isDirectType1(v *Value) bool {
2486 switch v.Op {
2487 case OpITab:
2488 return isDirectType2(v.Args[0])
2489 case OpAddr:
2490 lsym := v.Aux.(*obj.LSym)
2491 if lsym.Extra == nil {
2492 return false
2493 }
2494 if ti, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
2495 return types.IsDirectIface(ti.Type.(*types.Type))
2496 }
2497 }
2498 return false
2499 }
2500
2501
2502 func isDirectType2(v *Value) bool {
2503 switch v.Op {
2504 case OpIMake:
2505 return isDirectType1(v.Args[0])
2506 }
2507 return false
2508 }
2509
2510
2511
2512
2513 func isDirectIface(v *Value) bool {
2514 return isDirectIface1(v, 9)
2515 }
2516
2517
2518 func isDirectIface1(v *Value, depth int) bool {
2519 if depth == 0 {
2520 return false
2521 }
2522 switch v.Op {
2523 case OpITab:
2524 return isDirectIface2(v.Args[0], depth-1)
2525 case OpAddr:
2526 lsym := v.Aux.(*obj.LSym)
2527 if lsym.Extra == nil {
2528 return false
2529 }
2530 if ii, ok := (*lsym.Extra).(*obj.ItabInfo); ok {
2531 return types.IsDirectIface(ii.Type.(*types.Type))
2532 }
2533 case OpConstNil:
2534
2535
2536 return true
2537 }
2538 return false
2539 }
2540
2541
2542 func isDirectIface2(v *Value, depth int) bool {
2543 if depth == 0 {
2544 return false
2545 }
2546 switch v.Op {
2547 case OpIMake:
2548 return isDirectIface1(v.Args[0], depth-1)
2549 case OpPhi:
2550 for _, a := range v.Args {
2551 if !isDirectIface2(a, depth-1) {
2552 return false
2553 }
2554 }
2555 return true
2556 }
2557 return false
2558 }
2559
2560 func bitsAdd64(x, y, carry int64) (r struct{ sum, carry int64 }) {
2561 s, c := bits.Add64(uint64(x), uint64(y), uint64(carry))
2562 r.sum, r.carry = int64(s), int64(c)
2563 return
2564 }
2565
2566 func bitsMulU64(x, y int64) (r struct{ hi, lo int64 }) {
2567 hi, lo := bits.Mul64(uint64(x), uint64(y))
2568 r.hi, r.lo = int64(hi), int64(lo)
2569 return
2570 }
2571 func bitsMulU32(x, y int32) (r struct{ hi, lo int32 }) {
2572 hi, lo := bits.Mul32(uint32(x), uint32(y))
2573 r.hi, r.lo = int32(hi), int32(lo)
2574 return
2575 }
2576
2577
2578 func flagify(v *Value) bool {
2579 var flagVersion Op
2580 switch v.Op {
2581 case OpAMD64ADDQconst:
2582 flagVersion = OpAMD64ADDQconstflags
2583 case OpAMD64ADDLconst:
2584 flagVersion = OpAMD64ADDLconstflags
2585 default:
2586 base.Fatalf("can't flagify op %s", v.Op)
2587 }
2588 inner := v.copyInto(v.Block)
2589 inner.Op = flagVersion
2590 inner.Type = types.NewTuple(v.Type, types.TypeFlags)
2591 v.reset(OpSelect0)
2592 v.AddArg(inner)
2593 return true
2594 }
2595
2596
2597 type PanicBoundsC struct {
2598 C int64
2599 }
2600
2601
2602 type PanicBoundsCC struct {
2603 Cx int64
2604 Cy int64
2605 }
2606
2607 func (p PanicBoundsC) CanBeAnSSAAux() {
2608 }
2609 func (p PanicBoundsCC) CanBeAnSSAAux() {
2610 }
2611
2612 func auxToPanicBoundsC(i Aux) PanicBoundsC {
2613 return i.(PanicBoundsC)
2614 }
2615 func auxToPanicBoundsCC(i Aux) PanicBoundsCC {
2616 return i.(PanicBoundsCC)
2617 }
2618 func panicBoundsCToAux(p PanicBoundsC) Aux {
2619 return p
2620 }
2621 func panicBoundsCCToAux(p PanicBoundsCC) Aux {
2622 return p
2623 }
2624
2625
2626
2627
2628 func imakeOfStructMake(v *Value) *Value {
2629 var arg *Value
2630 for _, a := range v.Args[1].Args {
2631 if a.Type.Size() > 0 {
2632 arg = a
2633 break
2634 }
2635 }
2636 return v.Block.NewValue2(v.Pos, OpIMake, v.Type, v.Args[0], arg)
2637 }
2638
View as plain text