1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/abi"
9 "cmd/compile/internal/base"
10 "cmd/compile/internal/ir"
11 "cmd/compile/internal/types"
12 "cmd/internal/src"
13 "fmt"
14 )
15
16 func postExpandCallsDecompose(f *Func) {
17 decomposeUser(f)
18 decomposeBuiltIn(f)
19 }
20
21 func expandCalls(f *Func) {
22
23
24
25
26
27
28 sp, _ := f.spSb()
29
30 x := &expandState{
31 f: f,
32 debug: f.pass.debug,
33 regSize: f.Config.RegSize,
34 sp: sp,
35 typs: &f.Config.Types,
36 wideSelects: make(map[*Value]*Value),
37 commonArgs: make(map[selKey]*Value),
38 commonSelectors: make(map[selKey]*Value),
39 memForCall: make(map[ID]*Value),
40 }
41
42
43 if f.Config.BigEndian {
44 x.firstOp = OpInt64Hi
45 x.secondOp = OpInt64Lo
46 x.firstType = x.typs.Int32
47 x.secondType = x.typs.UInt32
48 } else {
49 x.firstOp = OpInt64Lo
50 x.secondOp = OpInt64Hi
51 x.firstType = x.typs.UInt32
52 x.secondType = x.typs.Int32
53 }
54
55
56 var selects []*Value
57 var calls []*Value
58 var args []*Value
59 var exitBlocks []*Block
60
61 var m0 *Value
62
63
64
65
66
67 for _, b := range f.Blocks {
68 for _, v := range b.Values {
69 switch v.Op {
70 case OpInitMem:
71 m0 = v
72
73 case OpClosureLECall, OpInterLECall, OpStaticLECall, OpTailLECall:
74 calls = append(calls, v)
75
76 case OpArg:
77 args = append(args, v)
78
79 case OpStore:
80 if a := v.Args[1]; a.Op == OpSelectN && !CanSSA(a.Type) {
81 if a.Uses > 1 {
82 panic(fmt.Errorf("Saw double use of wide SelectN %s operand of Store %s",
83 a.LongString(), v.LongString()))
84 }
85 x.wideSelects[a] = v
86 }
87
88 case OpSelectN:
89 if v.Type == types.TypeMem {
90
91 call := v.Args[0]
92 aux := call.Aux.(*AuxCall)
93 mem := x.memForCall[call.ID]
94 if mem == nil {
95 v.AuxInt = int64(aux.abiInfo.OutRegistersUsed())
96 x.memForCall[call.ID] = v
97 } else {
98 panic(fmt.Errorf("Saw two memories for call %v, %v and %v", call, mem, v))
99 }
100 } else {
101 selects = append(selects, v)
102 }
103
104 case OpSelectNAddr:
105 call := v.Args[0]
106 which := v.AuxInt
107 aux := call.Aux.(*AuxCall)
108 pt := v.Type
109 off := x.offsetFrom(x.f.Entry, x.sp, aux.OffsetOfResult(which), pt)
110 v.copyOf(off)
111 }
112 }
113
114
115
116 if isBlockMultiValueExit(b) {
117 exitBlocks = append(exitBlocks, b)
118 }
119 }
120
121
122 for _, v := range args {
123 var rc registerCursor
124 a := x.prAssignForArg(v)
125 aux := x.f.OwnAux
126 regs := a.Registers
127 var offset int64
128 if len(regs) == 0 {
129 offset = a.FrameOffset(aux.abiInfo)
130 }
131 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
132 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
133 x.rewriteSelectOrArg(f.Entry.Pos, f.Entry, v, v, m0, v.Type, rc)
134 }
135
136
137 for _, v := range selects {
138 if v.Op == OpInvalid {
139 continue
140 }
141
142 call := v.Args[0]
143 aux := call.Aux.(*AuxCall)
144 mem := x.memForCall[call.ID]
145 if mem == nil {
146 mem = call.Block.NewValue1I(call.Pos, OpSelectN, types.TypeMem, int64(aux.abiInfo.OutRegistersUsed()), call)
147 x.memForCall[call.ID] = mem
148 }
149
150 i := v.AuxInt
151 regs := aux.RegsOfResult(i)
152
153
154 if store := x.wideSelects[v]; store != nil {
155
156 storeAddr := store.Args[0]
157 mem := store.Args[2]
158 if len(regs) > 0 {
159
160 var rc registerCursor
161 rc.init(regs, aux.abiInfo, nil, storeAddr, 0)
162 mem = x.rewriteWideSelectToStores(call.Pos, call.Block, v, mem, v.Type, rc)
163 store.copyOf(mem)
164 } else {
165
166 offset := aux.OffsetOfResult(i)
167 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
168
169
170 move := store.Block.NewValue3A(store.Pos, OpMove, types.TypeMem, v.Type, storeAddr, auxBase, mem)
171 move.AuxInt = v.Type.Size()
172 store.copyOf(move)
173 }
174 continue
175 }
176
177 var auxBase *Value
178 if len(regs) == 0 {
179 offset := aux.OffsetOfResult(i)
180 auxBase = x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
181 }
182 var rc registerCursor
183 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
184 x.rewriteSelectOrArg(call.Pos, call.Block, v, v, mem, v.Type, rc)
185 }
186
187 rewriteCall := func(v *Value, newOp Op, argStart int) {
188
189 x.rewriteCallArgs(v, argStart)
190 v.Op = newOp
191 rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
192 v.Type = types.NewResults(append(rts, types.TypeMem))
193 }
194
195
196 for _, v := range calls {
197 switch v.Op {
198 case OpStaticLECall:
199 rewriteCall(v, OpStaticCall, 0)
200 case OpTailLECall:
201 rewriteCall(v, OpTailCall, 0)
202 case OpClosureLECall:
203 rewriteCall(v, OpClosureCall, 2)
204 case OpInterLECall:
205 rewriteCall(v, OpInterCall, 1)
206 }
207 }
208
209
210 for _, b := range exitBlocks {
211 v := b.Controls[0]
212 x.rewriteFuncResults(v, b, f.OwnAux)
213 b.SetControl(v)
214 }
215
216 }
217
218 func (x *expandState) rewriteFuncResults(v *Value, b *Block, aux *AuxCall) {
219
220
221
222
223
224 m0 := v.MemoryArg()
225 mem := m0
226
227 allResults := []*Value{}
228 var oldArgs []*Value
229 argsWithoutMem := v.Args[:len(v.Args)-1]
230
231 for j, a := range argsWithoutMem {
232 oldArgs = append(oldArgs, a)
233 i := int64(j)
234 auxType := aux.TypeOfResult(i)
235 auxBase := b.NewValue2A(v.Pos, OpLocalAddr, types.NewPtr(auxType), aux.NameOfResult(i), x.sp, mem)
236 auxOffset := int64(0)
237 aRegs := aux.RegsOfResult(int64(j))
238 if a.Op == OpDereference {
239 a.Op = OpLoad
240 }
241 var rc registerCursor
242 var result *[]*Value
243 if len(aRegs) > 0 {
244 result = &allResults
245 } else {
246 if a.Op == OpLoad && a.Args[0].Op == OpLocalAddr && a.Args[0].Aux == aux.NameOfResult(i) {
247 continue
248 }
249 }
250 rc.init(aRegs, aux.abiInfo, result, auxBase, auxOffset)
251 mem = x.decomposeAsNecessary(v.Pos, b, a, mem, rc)
252 }
253 v.resetArgs()
254 v.AddArgs(allResults...)
255 v.AddArg(mem)
256 for _, a := range oldArgs {
257 if a.Uses == 0 {
258 if x.debug > 1 {
259 x.Printf("...marking %v unused\n", a.LongString())
260 }
261 x.invalidateRecursively(a)
262 }
263 }
264 v.Type = types.NewResults(append(abi.RegisterTypes(aux.abiInfo.OutParams()), types.TypeMem))
265 return
266 }
267
268 func (x *expandState) rewriteCallArgs(v *Value, firstArg int) {
269 if x.debug > 1 {
270 x.indent(3)
271 defer x.indent(-3)
272 x.Printf("rewriteCallArgs(%s; %d)\n", v.LongString(), firstArg)
273 }
274
275 aux := v.Aux.(*AuxCall)
276 m0 := v.MemoryArg()
277 mem := m0
278 allResults := []*Value{}
279 oldArgs := []*Value{}
280 argsWithoutMem := v.Args[firstArg : len(v.Args)-1]
281
282 sp := x.sp
283 if v.Op == OpTailLECall {
284
285
286 sp = v.Block.NewValue1(src.NoXPos, OpGetCallerSP, x.typs.Uintptr, mem)
287 }
288
289 for i, a := range argsWithoutMem {
290 oldArgs = append(oldArgs, a)
291 auxI := int64(i)
292 aRegs := aux.RegsOfArg(auxI)
293 aType := aux.TypeOfArg(auxI)
294
295 if a.Op == OpDereference {
296 a.Op = OpLoad
297 }
298 var rc registerCursor
299 var result *[]*Value
300 var aOffset int64
301 if len(aRegs) > 0 {
302 result = &allResults
303 } else {
304 aOffset = aux.OffsetOfArg(auxI)
305 }
306 if v.Op == OpTailLECall && a.Op == OpArg && a.AuxInt == 0 {
307
308
309 n := a.Aux.(*ir.Name)
310 if n.Class == ir.PPARAM && n.FrameOffset()+x.f.Config.ctxt.Arch.FixedFrameSize == aOffset {
311 continue
312 }
313 }
314 if x.debug > 1 {
315 x.Printf("...storeArg %s, %v, %d\n", a.LongString(), aType, aOffset)
316 }
317
318 rc.init(aRegs, aux.abiInfo, result, sp, aOffset)
319 mem = x.decomposeAsNecessary(v.Pos, v.Block, a, mem, rc)
320 }
321 var preArgStore [2]*Value
322 preArgs := append(preArgStore[:0], v.Args[0:firstArg]...)
323 v.resetArgs()
324 v.AddArgs(preArgs...)
325 v.AddArgs(allResults...)
326 v.AddArg(mem)
327 for _, a := range oldArgs {
328 if a.Uses == 0 {
329 x.invalidateRecursively(a)
330 }
331 }
332
333 return
334 }
335
336 func (x *expandState) decomposePair(pos src.XPos, b *Block, a, mem *Value, t0, t1 *types.Type, o0, o1 Op, rc *registerCursor) *Value {
337 e := b.NewValue1(pos, o0, t0, a)
338 pos = pos.WithNotStmt()
339 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
340 e = b.NewValue1(pos, o1, t1, a)
341 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t1))
342 return mem
343 }
344
345 func (x *expandState) decomposeOne(pos src.XPos, b *Block, a, mem *Value, t0 *types.Type, o0 Op, rc *registerCursor) *Value {
346 e := b.NewValue1(pos, o0, t0, a)
347 pos = pos.WithNotStmt()
348 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
349 return mem
350 }
351
352
353
354
355
356
357
358
359
360 func (x *expandState) decomposeAsNecessary(pos src.XPos, b *Block, a, m0 *Value, rc registerCursor) *Value {
361 if x.debug > 1 {
362 x.indent(3)
363 defer x.indent(-3)
364 }
365 at := a.Type
366 if at.Size() == 0 {
367 return m0
368 }
369 if a.Op == OpDereference {
370 a.Op = OpLoad
371 }
372
373 if !rc.hasRegs() && !CanSSA(at) {
374 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
375 if x.debug > 1 {
376 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
377 }
378 if a.Op == OpLoad {
379 m0 = b.NewValue3A(pos, OpMove, types.TypeMem, at, dst, a.Args[0], m0)
380 m0.AuxInt = at.Size()
381 return m0
382 } else {
383 panic(fmt.Errorf("Store of not a load"))
384 }
385 }
386
387 mem := m0
388 switch at.Kind() {
389 case types.TARRAY:
390 et := at.Elem()
391 for i := int64(0); i < at.NumElem(); i++ {
392 e := b.NewValue1I(pos, OpArraySelect, et, i, a)
393 pos = pos.WithNotStmt()
394 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
395 }
396 return mem
397
398 case types.TSTRUCT:
399 for i := 0; i < at.NumFields(); i++ {
400 et := at.Field(i).Type
401 e := b.NewValue1I(pos, OpStructSelect, et, int64(i), a)
402 pos = pos.WithNotStmt()
403 if x.debug > 1 {
404 x.Printf("...recur decompose %s, %v\n", e.LongString(), et)
405 }
406 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
407 }
408 return mem
409
410 case types.TSLICE:
411 mem = x.decomposeOne(pos, b, a, mem, at.Elem().PtrTo(), OpSlicePtr, &rc)
412 pos = pos.WithNotStmt()
413 mem = x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceLen, &rc)
414 return x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceCap, &rc)
415
416 case types.TSTRING:
417 return x.decomposePair(pos, b, a, mem, x.typs.BytePtr, x.typs.Int, OpStringPtr, OpStringLen, &rc)
418
419 case types.TINTER:
420 mem = x.decomposeOne(pos, b, a, mem, x.typs.Uintptr, OpITab, &rc)
421 pos = pos.WithNotStmt()
422
423 if a.Op == OpIMake {
424 data := a.Args[1]
425 for data.Op == OpStructMake || data.Op == OpArrayMake1 {
426
427
428 for _, a := range data.Args {
429 if a.Type.Size() > 0 {
430 data = a
431 break
432 }
433 }
434 }
435 return x.decomposeAsNecessary(pos, b, data, mem, rc.next(data.Type))
436 }
437 return x.decomposeOne(pos, b, a, mem, x.typs.BytePtr, OpIData, &rc)
438
439 case types.TCOMPLEX64:
440 return x.decomposePair(pos, b, a, mem, x.typs.Float32, x.typs.Float32, OpComplexReal, OpComplexImag, &rc)
441
442 case types.TCOMPLEX128:
443 return x.decomposePair(pos, b, a, mem, x.typs.Float64, x.typs.Float64, OpComplexReal, OpComplexImag, &rc)
444
445 case types.TINT64:
446 if at.Size() > x.regSize {
447 return x.decomposePair(pos, b, a, mem, x.firstType, x.secondType, x.firstOp, x.secondOp, &rc)
448 }
449 case types.TUINT64:
450 if at.Size() > x.regSize {
451 return x.decomposePair(pos, b, a, mem, x.typs.UInt32, x.typs.UInt32, x.firstOp, x.secondOp, &rc)
452 }
453 }
454
455
456
457 if rc.hasRegs() {
458 if x.debug > 1 {
459 x.Printf("...recur addArg %s\n", a.LongString())
460 }
461 rc.addArg(a)
462 } else {
463 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
464 if x.debug > 1 {
465 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
466 }
467 mem = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, mem)
468 }
469
470 return mem
471 }
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486 func (x *expandState) rewriteSelectOrArg(pos src.XPos, b *Block, container, a, m0 *Value, at *types.Type, rc registerCursor) *Value {
487
488 if at == types.TypeMem {
489 a.copyOf(m0)
490 return a
491 }
492
493 makeOf := func(a *Value, op Op, args []*Value) *Value {
494 if a == nil {
495 a = b.NewValue0(pos, op, at)
496 a.AddArgs(args...)
497 } else {
498 a.resetArgs()
499 a.Aux, a.AuxInt = nil, 0
500 a.Pos, a.Op, a.Type = pos, op, at
501 a.AddArgs(args...)
502 }
503 return a
504 }
505
506 if at.Size() == 0 {
507
508 if at.IsArray() {
509 return makeOf(a, OpArrayMake0, nil)
510 }
511 if at.IsStruct() {
512 return makeOf(a, OpStructMake, nil)
513 }
514 return a
515 }
516
517 sk := selKey{from: container, size: 0, offsetOrIndex: rc.storeOffset, typ: at}
518 dupe := x.commonSelectors[sk]
519 if dupe != nil {
520 if a == nil {
521 return dupe
522 }
523 a.copyOf(dupe)
524 return a
525 }
526
527 var argStore [10]*Value
528 args := argStore[:0]
529
530 addArg := func(a0 *Value) {
531 if a0 == nil {
532 as := "<nil>"
533 if a != nil {
534 as = a.LongString()
535 }
536 panic(fmt.Errorf("a0 should not be nil, a=%v, container=%v, at=%v", as, container.LongString(), at))
537 }
538 args = append(args, a0)
539 }
540
541 switch at.Kind() {
542 case types.TARRAY:
543 et := at.Elem()
544 for i := int64(0); i < at.NumElem(); i++ {
545 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
546 addArg(e)
547 }
548 a = makeOf(a, OpArrayMake1, args)
549 x.commonSelectors[sk] = a
550 return a
551
552 case types.TSTRUCT:
553
554 for i := 0; i < at.NumFields(); i++ {
555 et := at.Field(i).Type
556 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
557 if e == nil {
558 panic(fmt.Errorf("nil e, et=%v, et.Size()=%d, i=%d", et, et.Size(), i))
559 }
560 addArg(e)
561 pos = pos.WithNotStmt()
562 }
563 if at.NumFields() > 4 {
564 panic(fmt.Errorf("Too many fields (%d, %d bytes), container=%s", at.NumFields(), at.Size(), container.LongString()))
565 }
566 a = makeOf(a, OpStructMake, args)
567 x.commonSelectors[sk] = a
568 return a
569
570 case types.TSLICE:
571 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr)))
572 pos = pos.WithNotStmt()
573 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
574 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
575 a = makeOf(a, OpSliceMake, args)
576 x.commonSelectors[sk] = a
577 return a
578
579 case types.TSTRING:
580 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
581 pos = pos.WithNotStmt()
582 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
583 a = makeOf(a, OpStringMake, args)
584 x.commonSelectors[sk] = a
585 return a
586
587 case types.TINTER:
588 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr)))
589 pos = pos.WithNotStmt()
590 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
591 a = makeOf(a, OpIMake, args)
592 x.commonSelectors[sk] = a
593 return a
594
595 case types.TCOMPLEX64:
596 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
597 pos = pos.WithNotStmt()
598 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
599 a = makeOf(a, OpComplexMake, args)
600 x.commonSelectors[sk] = a
601 return a
602
603 case types.TCOMPLEX128:
604 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
605 pos = pos.WithNotStmt()
606 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
607 a = makeOf(a, OpComplexMake, args)
608 x.commonSelectors[sk] = a
609 return a
610
611 case types.TINT64:
612 if at.Size() > x.regSize {
613 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.firstType, rc.next(x.firstType)))
614 pos = pos.WithNotStmt()
615 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.secondType, rc.next(x.secondType)))
616 if !x.f.Config.BigEndian {
617
618 args[0], args[1] = args[1], args[0]
619 }
620 a = makeOf(a, OpInt64Make, args)
621 x.commonSelectors[sk] = a
622 return a
623 }
624 case types.TUINT64:
625 if at.Size() > x.regSize {
626 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
627 pos = pos.WithNotStmt()
628 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
629 if !x.f.Config.BigEndian {
630
631 args[0], args[1] = args[1], args[0]
632 }
633 a = makeOf(a, OpInt64Make, args)
634 x.commonSelectors[sk] = a
635 return a
636 }
637 }
638
639
640
641
642
643 if container.Op == OpArg {
644 if rc.hasRegs() {
645 op, i := rc.ArgOpAndRegisterFor()
646 name := container.Aux.(*ir.Name)
647 a = makeOf(a, op, nil)
648 a.AuxInt = i
649 a.Aux = &AuxNameOffset{name, rc.storeOffset}
650 } else {
651 key := selKey{container, rc.storeOffset, at.Size(), at}
652 w := x.commonArgs[key]
653 if w != nil && w.Uses != 0 {
654 if a == nil {
655 a = w
656 } else {
657 a.copyOf(w)
658 }
659 } else {
660 if a == nil {
661 aux := container.Aux
662 auxInt := container.AuxInt + rc.storeOffset
663 a = container.Block.NewValue0IA(container.Pos, OpArg, at, auxInt, aux)
664 } else {
665
666 }
667 x.commonArgs[key] = a
668 }
669 }
670 } else if container.Op == OpSelectN {
671 call := container.Args[0]
672 aux := call.Aux.(*AuxCall)
673 which := container.AuxInt
674
675 if at == types.TypeMem {
676 if a != m0 || a != x.memForCall[call.ID] {
677 panic(fmt.Errorf("Memories %s, %s, and %s should all be equal after %s", a.LongString(), m0.LongString(), x.memForCall[call.ID], call.LongString()))
678 }
679 } else if rc.hasRegs() {
680 firstReg := uint32(0)
681 for i := 0; i < int(which); i++ {
682 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
683 }
684 reg := int64(rc.nextSlice + Abi1RO(firstReg))
685 a = makeOf(a, OpSelectN, []*Value{call})
686 a.AuxInt = reg
687 } else {
688 off := x.offsetFrom(x.f.Entry, x.sp, rc.storeOffset+aux.OffsetOfResult(which), types.NewPtr(at))
689 a = makeOf(a, OpLoad, []*Value{off, m0})
690 }
691
692 } else {
693 panic(fmt.Errorf("Expected container OpArg or OpSelectN, saw %v instead", container.LongString()))
694 }
695
696 x.commonSelectors[sk] = a
697 return a
698 }
699
700
701
702
703
704 func (x *expandState) rewriteWideSelectToStores(pos src.XPos, b *Block, container, m0 *Value, at *types.Type, rc registerCursor) *Value {
705
706 if at.Size() == 0 {
707 return m0
708 }
709
710 switch at.Kind() {
711 case types.TARRAY:
712 et := at.Elem()
713 for i := int64(0); i < at.NumElem(); i++ {
714 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
715 }
716 return m0
717
718 case types.TSTRUCT:
719
720 for i := 0; i < at.NumFields(); i++ {
721 et := at.Field(i).Type
722 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
723 pos = pos.WithNotStmt()
724 }
725 return m0
726
727 case types.TSLICE:
728 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr))
729 pos = pos.WithNotStmt()
730 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
731 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
732 return m0
733
734 case types.TSTRING:
735 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
736 pos = pos.WithNotStmt()
737 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
738 return m0
739
740 case types.TINTER:
741 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr))
742 pos = pos.WithNotStmt()
743 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
744 return m0
745
746 case types.TCOMPLEX64:
747 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
748 pos = pos.WithNotStmt()
749 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
750 return m0
751
752 case types.TCOMPLEX128:
753 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
754 pos = pos.WithNotStmt()
755 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
756 return m0
757
758 case types.TINT64:
759 if at.Size() > x.regSize {
760 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.firstType, rc.next(x.firstType))
761 pos = pos.WithNotStmt()
762 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.secondType, rc.next(x.secondType))
763 return m0
764 }
765 case types.TUINT64:
766 if at.Size() > x.regSize {
767 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
768 pos = pos.WithNotStmt()
769 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
770 return m0
771 }
772 }
773
774
775 if container.Op == OpSelectN {
776 call := container.Args[0]
777 aux := call.Aux.(*AuxCall)
778 which := container.AuxInt
779
780 if rc.hasRegs() {
781 firstReg := uint32(0)
782 for i := 0; i < int(which); i++ {
783 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
784 }
785 reg := int64(rc.nextSlice + Abi1RO(firstReg))
786 a := b.NewValue1I(pos, OpSelectN, at, reg, call)
787 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
788 m0 = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, m0)
789 } else {
790 panic(fmt.Errorf("Expected rc to have registers"))
791 }
792 } else {
793 panic(fmt.Errorf("Expected container OpSelectN, saw %v instead", container.LongString()))
794 }
795 return m0
796 }
797
798 func isBlockMultiValueExit(b *Block) bool {
799 return (b.Kind == BlockRet || b.Kind == BlockRetJmp) && b.Controls[0] != nil && b.Controls[0].Op == OpMakeResult
800 }
801
802 type Abi1RO uint8
803
804
805 type registerCursor struct {
806 storeDest *Value
807 storeOffset int64
808 regs []abi.RegIndex
809 nextSlice Abi1RO
810 config *abi.ABIConfig
811 regValues *[]*Value
812 }
813
814 func (c *registerCursor) String() string {
815 dest := "<none>"
816 if c.storeDest != nil {
817 dest = fmt.Sprintf("%s+%d", c.storeDest.String(), c.storeOffset)
818 }
819 regs := "<none>"
820 if c.regValues != nil {
821 regs = ""
822 for i, x := range *c.regValues {
823 if i > 0 {
824 regs = regs + "; "
825 }
826 regs = regs + x.LongString()
827 }
828 }
829
830
831 return fmt.Sprintf("RCSR{storeDest=%v, regsLen=%d, nextSlice=%d, regValues=[%s]}", dest, len(c.regs), c.nextSlice, regs)
832 }
833
834
835
836 func (c *registerCursor) next(t *types.Type) registerCursor {
837 c.storeOffset = types.RoundUp(c.storeOffset, t.Alignment())
838 rc := *c
839 c.storeOffset = types.RoundUp(c.storeOffset+t.Size(), t.Alignment())
840 if int(c.nextSlice) < len(c.regs) {
841 w := c.config.NumParamRegs(t)
842 c.nextSlice += Abi1RO(w)
843 }
844 return rc
845 }
846
847
848 func (c *registerCursor) plus(regWidth Abi1RO) registerCursor {
849 rc := *c
850 rc.nextSlice += regWidth
851 return rc
852 }
853
854 func (c *registerCursor) init(regs []abi.RegIndex, info *abi.ABIParamResultInfo, result *[]*Value, storeDest *Value, storeOffset int64) {
855 c.regs = regs
856 c.nextSlice = 0
857 c.storeOffset = storeOffset
858 c.storeDest = storeDest
859 c.config = info.Config()
860 c.regValues = result
861 }
862
863 func (c *registerCursor) addArg(v *Value) {
864 *c.regValues = append(*c.regValues, v)
865 }
866
867 func (c *registerCursor) hasRegs() bool {
868 return len(c.regs) > 0
869 }
870
871 func (c *registerCursor) ArgOpAndRegisterFor() (Op, int64) {
872 r := c.regs[c.nextSlice]
873 return ArgOpAndRegisterFor(r, c.config)
874 }
875
876
877
878 func ArgOpAndRegisterFor(r abi.RegIndex, abiConfig *abi.ABIConfig) (Op, int64) {
879 i := abiConfig.FloatIndexFor(r)
880 if i >= 0 {
881 return OpArgFloatReg, i
882 }
883 return OpArgIntReg, int64(r)
884 }
885
886 type selKey struct {
887 from *Value
888 offsetOrIndex int64
889 size int64
890 typ *types.Type
891 }
892
893 type expandState struct {
894 f *Func
895 debug int
896 regSize int64
897 sp *Value
898 typs *Types
899
900 firstOp Op
901 secondOp Op
902 firstType *types.Type
903 secondType *types.Type
904
905 wideSelects map[*Value]*Value
906 commonSelectors map[selKey]*Value
907 commonArgs map[selKey]*Value
908 memForCall map[ID]*Value
909 indentLevel int
910 }
911
912
913 func (x *expandState) offsetFrom(b *Block, from *Value, offset int64, pt *types.Type) *Value {
914 ft := from.Type
915 if offset == 0 {
916 if ft == pt {
917 return from
918 }
919
920 if (ft.IsPtr() || ft.IsUnsafePtr()) && pt.IsPtr() {
921 return from
922 }
923 }
924
925 for from.Op == OpOffPtr {
926 offset += from.AuxInt
927 from = from.Args[0]
928 }
929 if from == x.sp {
930 return x.f.ConstOffPtrSP(pt, offset, x.sp)
931 }
932 return b.NewValue1I(from.Pos.WithNotStmt(), OpOffPtr, pt, offset, from)
933 }
934
935
936 func (x *expandState) prAssignForArg(v *Value) *abi.ABIParamAssignment {
937 if v.Op != OpArg {
938 panic(fmt.Errorf("Wanted OpArg, instead saw %s", v.LongString()))
939 }
940 return ParamAssignmentForArgName(x.f, v.Aux.(*ir.Name))
941 }
942
943
944 func ParamAssignmentForArgName(f *Func, name *ir.Name) *abi.ABIParamAssignment {
945 abiInfo := f.OwnAux.abiInfo
946 ip := abiInfo.InParams()
947 for i, a := range ip {
948 if a.Name == name {
949 return &ip[i]
950 }
951 }
952 panic(fmt.Errorf("Did not match param %v in prInfo %+v", name, abiInfo.InParams()))
953 }
954
955
956 func (x *expandState) indent(n int) {
957 x.indentLevel += n
958 }
959
960
961 func (x *expandState) Printf(format string, a ...interface{}) (n int, err error) {
962 if x.indentLevel > 0 {
963 fmt.Printf("%[1]*s", x.indentLevel, "")
964 }
965 return fmt.Printf(format, a...)
966 }
967
968 func (x *expandState) invalidateRecursively(a *Value) {
969 var s string
970 if x.debug > 0 {
971 plus := " "
972 if a.Pos.IsStmt() == src.PosIsStmt {
973 plus = " +"
974 }
975 s = a.String() + plus + a.Pos.LineNumber() + " " + a.LongString()
976 if x.debug > 1 {
977 x.Printf("...marking %v unused\n", s)
978 }
979 }
980 lost := a.invalidateRecursively()
981 if x.debug&1 != 0 && lost {
982 x.Printf("Lost statement marker in %s on former %s\n", base.Ctxt.Pkgpath+"."+x.f.Name, s)
983 }
984 }
985
View as plain text