1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "go/token"
11 "internal/abi"
12 "strings"
13
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/escape"
16 "cmd/compile/internal/ir"
17 "cmd/compile/internal/reflectdata"
18 "cmd/compile/internal/typecheck"
19 "cmd/compile/internal/types"
20 )
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44 func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
45 if !ir.SameSafeExpr(dst, n.Args[0]) {
46 n.Args[0] = safeExpr(n.Args[0], init)
47 n.Args[0] = walkExpr(n.Args[0], init)
48 }
49 walkExprListSafe(n.Args[1:], init)
50
51 nsrc := n.Args[0]
52
53
54
55
56
57
58
59 ls := n.Args[1:]
60 for i, n := range ls {
61 n = cheapExpr(n, init)
62 if !types.Identical(n.Type(), nsrc.Type().Elem()) {
63 n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
64 n = walkExpr(n, init)
65 }
66 ls[i] = n
67 }
68
69 argc := len(n.Args) - 1
70 if argc < 1 {
71 return nsrc
72 }
73
74
75
76 if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
77 return n
78 }
79
80 var l []ir.Node
81
82
83 s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type())
84 l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))
85
86
87 num := ir.NewInt(base.Pos, int64(argc))
88
89
90 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
91 l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num)))
92
93
94 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
95 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]))
96 nif.Likely = true
97
98
99 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
100 slice.SetBounded(true)
101 nif.Body = []ir.Node{
102 ir.NewAssignStmt(base.Pos, s, slice),
103 }
104
105
106 nif.Else = []ir.Node{
107 ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
108 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
109 newLen,
110 ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
111 num)),
112 }
113
114 l = append(l, nif)
115
116 ls = n.Args[1:]
117 for i, n := range ls {
118
119 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i))))
120 ix.SetBounded(true)
121 l = append(l, ir.NewAssignStmt(base.Pos, ix, n))
122 }
123
124 typecheck.Stmts(l)
125 walkStmtList(l)
126 init.Append(l...)
127 return s
128 }
129
130
131 func walkGrowslice(slice *ir.Name, init *ir.Nodes, oldPtr, newLen, oldCap, num ir.Node) *ir.CallExpr {
132 elemtype := slice.Type().Elem()
133 fn := typecheck.LookupRuntime("growslice", elemtype, elemtype)
134 elemtypeptr := reflectdata.TypePtrAt(base.Pos, elemtype)
135 return mkcall1(fn, slice.Type(), init, oldPtr, newLen, oldCap, num, elemtypeptr)
136 }
137
138
139 func walkClear(n *ir.UnaryExpr) ir.Node {
140 typ := n.X.Type()
141 switch {
142 case typ.IsSlice():
143 if n := arrayClear(n.X.Pos(), n.X, nil); n != nil {
144 return n
145 }
146
147 return ir.NewBlockStmt(n.Pos(), nil)
148 case typ.IsMap():
149 return mapClear(n.X, reflectdata.TypePtrAt(n.X.Pos(), n.X.Type()))
150 }
151 panic("unreachable")
152 }
153
154
155 func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
156 return mkcall1(chanfn("closechan", 1, n.X.Type()), nil, init, n.X)
157 }
158
159
160
161
162
163
164
165
166
167
168
169 func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
170 if n.X.Type().Elem().HasPointers() {
171 ir.CurFunc.SetWBPos(n.Pos())
172 fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
173 n.X = cheapExpr(n.X, init)
174 ptrL, lenL := backingArrayPtrLen(n.X)
175 n.Y = cheapExpr(n.Y, init)
176 ptrR, lenR := backingArrayPtrLen(n.Y)
177 return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR)
178 }
179
180 if runtimecall {
181
182
183
184
185 n.X = cheapExpr(n.X, init)
186 ptrL, lenL := backingArrayPtrLen(n.X)
187 n.Y = cheapExpr(n.Y, init)
188 ptrR, lenR := backingArrayPtrLen(n.Y)
189
190 fn := typecheck.LookupRuntime("slicecopy", ptrL.Type().Elem(), ptrR.Type().Elem())
191
192 return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size()))
193 }
194
195 n.X = walkExpr(n.X, init)
196 n.Y = walkExpr(n.Y, init)
197 nl := typecheck.TempAt(base.Pos, ir.CurFunc, n.X.Type())
198 nr := typecheck.TempAt(base.Pos, ir.CurFunc, n.Y.Type())
199 var l []ir.Node
200 l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
201 l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
202
203 nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
204 nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
205
206 nlen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
207
208
209 l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
210
211
212 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
213
214 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
215 nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
216 l = append(l, nif)
217
218
219 ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
220 ne.Likely = true
221 l = append(l, ne)
222
223 fn := typecheck.LookupRuntime("memmove", nl.Type().Elem(), nl.Type().Elem())
224 nwid := ir.Node(typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR]))
225 setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
226 ne.Body.Append(setwid)
227 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size()))
228 call := mkcall1(fn, nil, init, nto, nfrm, nwid)
229 ne.Body.Append(call)
230
231 typecheck.Stmts(l)
232 walkStmtList(l)
233 init.Append(l...)
234 return nlen
235 }
236
237
238 func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
239 init.Append(ir.TakeInit(n)...)
240 map_ := n.Args[0]
241 key := n.Args[1]
242 map_ = walkExpr(map_, init)
243 key = walkExpr(key, init)
244
245 t := map_.Type()
246 fast := mapfast(t)
247 key = mapKeyArg(fast, n, key, false)
248 return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key)
249 }
250
251
252 func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
253 if isRuneCount(n) {
254
255 return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
256 }
257 if isByteCount(n) {
258 conv := n.X.(*ir.ConvExpr)
259 walkStmtList(conv.Init())
260 init.Append(ir.TakeInit(conv)...)
261 _, len := backingArrayPtrLen(cheapExpr(conv.X, init))
262 return len
263 }
264 if isChanLenCap(n) {
265 name := "chanlen"
266 if n.Op() == ir.OCAP {
267 name = "chancap"
268 }
269
270
271 fn := typecheck.LookupRuntime(name, n.X.Type())
272 return mkcall1(fn, n.Type(), init, n.X)
273 }
274
275 n.X = walkExpr(n.X, init)
276
277
278
279 t := n.X.Type()
280 if t.IsPtr() {
281 t = t.Elem()
282 }
283 if t.IsArray() {
284
285 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.BlankNode, n.X))
286
287 con := ir.NewConstExpr(constant.MakeInt64(t.NumElem()), n)
288 con.SetTypecheck(1)
289 return con
290 }
291 return n
292 }
293
294
295 func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
296
297
298 size := n.Len
299 fnname := "makechan64"
300 argtype := types.Types[types.TINT64]
301
302
303
304
305 if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() {
306 fnname = "makechan"
307 argtype = types.Types[types.TINT]
308 }
309
310 return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype))
311 }
312
313
314 func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
315 t := n.Type()
316 mapType := reflectdata.MapType()
317 hint := n.Len
318
319
320 var m ir.Node
321 if n.Esc() == ir.EscNone {
322
323
324
325
326 m = stackTempAddr(init, mapType)
327
328
329
330
331
332
333 if !ir.IsConst(hint, constant.Int) ||
334 constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.MapGroupSlots)) {
335
336
337
338
339
340
341
342
343
344
345
346
347 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, abi.MapGroupSlots)), nil, nil)
348 nif.Likely = true
349
350 groupType := reflectdata.MapGroupType(t)
351
352
353
354 g := stackTempAddr(&nif.Body, groupType)
355
356
357
358 empty := ir.NewBasicLit(base.Pos, types.UntypedInt, constant.MakeUint64(abi.MapCtrlEmpty))
359
360
361 csym := groupType.Field(0).Sym
362 ca := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, g, csym), empty)
363 nif.Body.Append(ca)
364
365
366 dsym := mapType.Field(2).Sym
367 na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, m, dsym), typecheck.ConvNop(g, types.Types[types.TUNSAFEPTR]))
368 nif.Body.Append(na)
369 appendWalkStmt(init, nif)
370 }
371 }
372
373 if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.MapGroupSlots)) {
374
375
376
377
378
379
380
381 if n.Esc() == ir.EscNone {
382
383
384
385 rand := mkcall("rand", types.Types[types.TUINT64], init)
386 seedSym := mapType.Field(1).Sym
387 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, m, seedSym), typecheck.Conv(rand, types.Types[types.TUINTPTR])))
388 return typecheck.ConvNop(m, t)
389 }
390
391
392 fn := typecheck.LookupRuntime("makemap_small", t.Key(), t.Elem())
393 return mkcall1(fn, n.Type(), init)
394 }
395
396 if n.Esc() != ir.EscNone {
397 m = typecheck.NodNil()
398 }
399
400
401
402
403
404
405
406
407 fnname := "makemap64"
408 argtype := types.Types[types.TINT64]
409
410
411
412
413
414 if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
415 fnname = "makemap"
416 argtype = types.Types[types.TINT]
417 }
418
419 fn := typecheck.LookupRuntime(fnname, mapType, t.Key(), t.Elem())
420 return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), m)
421 }
422
423
424 func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
425 len := n.Len
426 cap := n.Cap
427 len = safeExpr(len, init)
428 if cap != nil {
429 cap = safeExpr(cap, init)
430 } else {
431 cap = len
432 }
433 t := n.Type()
434 if t.Elem().NotInHeap() {
435 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
436 }
437
438 tryStack := false
439 if n.Esc() == ir.EscNone {
440 if why := escape.HeapAllocReason(n); why != "" {
441 base.Fatalf("%v has EscNone, but %v", n, why)
442 }
443 if ir.IsSmallIntConst(cap) {
444
445 cap := typecheck.IndexConst(cap)
446
447
448
449
450
451
452
453
454 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINT64]), ir.NewInt(base.Pos, cap)), nil, nil)
455 niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, len, ir.NewInt(base.Pos, 0)), nil, nil)
456 niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
457 nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
458 init.Append(typecheck.Stmt(nif))
459
460
461
462 t := types.NewArray(t.Elem(), cap)
463 arr := typecheck.TempAt(base.Pos, ir.CurFunc, t)
464 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, arr, nil))
465 s := ir.NewSliceExpr(base.Pos, ir.OSLICE, arr, nil, len, nil)
466
467 return walkExpr(typecheck.Expr(typecheck.Conv(s, n.Type())), init)
468 }
469
470 tryStack = base.Flag.N == 0 && base.VariableMakeHash.MatchPos(n.Pos(), nil)
471 }
472
473
474 slice := typecheck.TempAt(base.Pos, ir.CurFunc, n.Type())
475
476 if tryStack {
477
478
479
480
481
482
483
484 maxStackSize := int64(base.Debug.VariableMakeThreshold)
485 K := maxStackSize / t.Elem().Size()
486 if K > 0 {
487 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(cap, types.Types[types.TUINT64]), ir.NewInt(base.Pos, K)), nil, nil)
488
489
490
491
492
493 lenCap := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINT64]), typecheck.Conv(cap, types.Types[types.TUINT64])), nil, nil)
494 lenZero := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, len, ir.NewInt(base.Pos, 0)), nil, nil)
495 lenZero.Body.Append(mkcall("panicmakeslicelen", nil, &lenZero.Body))
496 lenCap.Body.Append(lenZero)
497 lenCap.Body.Append(mkcall("panicmakeslicecap", nil, &lenCap.Body))
498 nif.Body.Append(lenCap)
499
500 t := types.NewArray(t.Elem(), K)
501
502
503
504
505
506 field := typecheck.Lookup("arr")
507 t = types.NewStruct([]*types.Field{
508 {Sym: types.BlankSym, Type: types.NewArray(types.Types[types.TUINTPTR], 0)},
509 {Sym: field, Type: t},
510 })
511 t.SetNoalg(true)
512 store := typecheck.TempAt(base.Pos, ir.CurFunc, t)
513 nif.Body.Append(ir.NewAssignStmt(base.Pos, store, nil))
514 arr := ir.NewSelectorExpr(base.Pos, ir.ODOT, store, field)
515 s := ir.NewSliceExpr(base.Pos, ir.OSLICE, arr, nil, len, cap)
516 nif.Body.Append(ir.NewAssignStmt(base.Pos, slice, s))
517
518 appendWalkStmt(init, typecheck.Stmt(nif))
519
520
521 init = &nif.Else
522 }
523 }
524
525
526
527
528 fnname := "makeslice64"
529 argtype := types.Types[types.TINT64]
530
531
532
533
534 if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) &&
535 (cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) {
536 fnname = "makeslice"
537 argtype = types.Types[types.TINT]
538 }
539 fn := typecheck.LookupRuntime(fnname)
540 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
541 ptr.MarkNonNil()
542 len = typecheck.Conv(len, types.Types[types.TINT])
543 cap = typecheck.Conv(cap, types.Types[types.TINT])
544 s := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap)
545 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, slice, s))
546
547 return slice
548 }
549
550
551 func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
552 if n.Esc() == ir.EscNone {
553 base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
554 }
555
556 t := n.Type()
557 if t.Elem().NotInHeap() {
558 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
559 }
560
561 length := typecheck.Conv(n.Len, types.Types[types.TINT])
562 copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
563 copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
564
565 if !t.Elem().HasPointers() && n.Bounded() {
566
567
568
569
570
571
572 size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR]))
573
574
575 fn := typecheck.LookupRuntime("mallocgc")
576 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false))
577 ptr.MarkNonNil()
578 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
579
580 s := typecheck.TempAt(base.Pos, ir.CurFunc, t)
581 r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
582 r = walkExpr(r, init)
583 init.Append(r)
584
585
586 fn = typecheck.LookupRuntime("memmove", t.Elem(), t.Elem())
587 ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
588 init.Append(walkExpr(typecheck.Stmt(ncopy), init))
589
590 return s
591 }
592
593
594 fn := typecheck.LookupRuntime("makeslicecopy")
595 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
596 ptr.MarkNonNil()
597 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
598 return walkExpr(typecheck.Expr(sh), init)
599 }
600
601
602 func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
603 t := n.Type().Elem()
604 if t.NotInHeap() {
605 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem())
606 }
607 if n.Esc() == ir.EscNone {
608 if t.Size() > ir.MaxImplicitStackVarSize {
609 base.Fatalf("large ONEW with EscNone: %v", n)
610 }
611 return stackTempAddr(init, t)
612 }
613 types.CalcSize(t)
614 n.MarkNonNil()
615 return n
616 }
617
618 func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node {
619 init.Append(ir.TakeInit(n)...)
620 walkExprList(n.Args, init)
621 return n
622 }
623
624
625 func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
626
627 walkExprListCheap(nn.Args, init)
628
629
630 if nn.Op() == ir.OPRINTLN {
631 s := nn.Args
632 t := make([]ir.Node, 0, len(s)*2)
633 for i, n := range s {
634 if i != 0 {
635 t = append(t, ir.NewString(base.Pos, " "))
636 }
637 t = append(t, n)
638 }
639 t = append(t, ir.NewString(base.Pos, "\n"))
640 nn.Args = t
641 }
642
643
644 s := nn.Args
645 t := make([]ir.Node, 0, len(s))
646 for i := 0; i < len(s); {
647 var strs []string
648 for i < len(s) && ir.IsConst(s[i], constant.String) {
649 strs = append(strs, ir.StringVal(s[i]))
650 i++
651 }
652 if len(strs) > 0 {
653 t = append(t, ir.NewString(base.Pos, strings.Join(strs, "")))
654 }
655 if i < len(s) {
656 t = append(t, s[i])
657 i++
658 }
659 }
660 nn.Args = t
661
662 calls := []ir.Node{mkcall("printlock", nil, init)}
663 for i, n := range nn.Args {
664 if n.Op() == ir.OLITERAL {
665 if n.Type() == types.UntypedRune {
666 n = typecheck.DefaultLit(n, types.RuneType)
667 }
668
669 switch n.Val().Kind() {
670 case constant.Int:
671 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
672
673 case constant.Float:
674 n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
675 }
676 }
677
678 if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
679 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
680 }
681 n = typecheck.DefaultLit(n, nil)
682 nn.Args[i] = n
683 if n.Type() == nil || n.Type().Kind() == types.TFORW {
684 continue
685 }
686
687 var on *ir.Name
688 switch n.Type().Kind() {
689 case types.TINTER:
690 if n.Type().IsEmptyInterface() {
691 on = typecheck.LookupRuntime("printeface", n.Type())
692 } else {
693 on = typecheck.LookupRuntime("printiface", n.Type())
694 }
695 case types.TPTR:
696 if n.Type().Elem().NotInHeap() {
697 on = typecheck.LookupRuntime("printuintptr")
698 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
699 n.SetType(types.Types[types.TUNSAFEPTR])
700 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
701 n.SetType(types.Types[types.TUINTPTR])
702 break
703 }
704 fallthrough
705 case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
706 on = typecheck.LookupRuntime("printpointer", n.Type())
707 case types.TSLICE:
708 on = typecheck.LookupRuntime("printslice", n.Type())
709 case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
710 if types.RuntimeSymName(n.Type().Sym()) == "hex" {
711 on = typecheck.LookupRuntime("printhex")
712 } else {
713 on = typecheck.LookupRuntime("printuint")
714 }
715 case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
716 on = typecheck.LookupRuntime("printint")
717 case types.TFLOAT32, types.TFLOAT64:
718 on = typecheck.LookupRuntime("printfloat")
719 case types.TCOMPLEX64, types.TCOMPLEX128:
720 on = typecheck.LookupRuntime("printcomplex")
721 case types.TBOOL:
722 on = typecheck.LookupRuntime("printbool")
723 case types.TSTRING:
724 cs := ""
725 if ir.IsConst(n, constant.String) {
726 cs = ir.StringVal(n)
727 }
728 switch cs {
729 case " ":
730 on = typecheck.LookupRuntime("printsp")
731 case "\n":
732 on = typecheck.LookupRuntime("printnl")
733 default:
734 on = typecheck.LookupRuntime("printstring")
735 }
736 default:
737 badtype(ir.OPRINT, n.Type(), nil)
738 continue
739 }
740
741 r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil)
742 if params := on.Type().Params(); len(params) > 0 {
743 t := params[0].Type
744 n = typecheck.Conv(n, t)
745 r.Args.Append(n)
746 }
747 calls = append(calls, r)
748 }
749
750 calls = append(calls, mkcall("printunlock", nil, init))
751
752 typecheck.Stmts(calls)
753 walkExprList(calls, init)
754
755 r := ir.NewBlockStmt(base.Pos, nil)
756 r.List = calls
757 return walkStmt(typecheck.Stmt(r))
758 }
759
760
761 func walkRecover(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
762 return mkcall("gorecover", nn.Type(), init)
763 }
764
765
766 func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
767 slice := walkExpr(n.X, init)
768 res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice))
769 res.SetType(n.Type())
770 return walkExpr(res, init)
771 }
772
773 func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
774 ptr := safeExpr(n.X, init)
775 len := safeExpr(n.Y, init)
776 sliceType := n.Type()
777
778 lenType := types.Types[types.TINT64]
779 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
780
781
782
783
784
785
786 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
787 fnname := "unsafeslicecheckptr"
788 fn := typecheck.LookupRuntime(fnname)
789 init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType)))
790 } else {
791
792
793 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
794 lenType = types.Types[types.TINT]
795 } else {
796
797
798
799
800 len64 := typecheck.Conv(len, lenType)
801 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
802 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
803 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
804 appendWalkStmt(init, nif)
805 }
806
807
808 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
809 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
810 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
811 appendWalkStmt(init, nif)
812
813 if sliceType.Elem().Size() == 0 {
814
815
816
817 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
818 isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
819 gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
820 nifPtr.Cond =
821 ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero)
822 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
823 appendWalkStmt(init, nifPtr)
824
825 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
826 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
827 typecheck.Conv(len, types.Types[types.TINT]),
828 typecheck.Conv(len, types.Types[types.TINT]))
829 return walkExpr(typecheck.Expr(h), init)
830 }
831
832
833 mem := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
834 overflow := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
835
836 decl := types.NewSignature(nil,
837 []*types.Field{
838 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
839 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
840 },
841 []*types.Field{
842 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
843 types.NewField(base.Pos, nil, types.Types[types.TBOOL]),
844 })
845
846 fn := ir.NewFunc(n.Pos(), n.Pos(), math_MulUintptr, decl)
847
848 call := mkcall1(fn.Nname, fn.Type().ResultsTuple(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR]))
849 appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call}))
850
851
852
853
854
855
856
857 nif = ir.NewIfStmt(base.Pos, nil, nil, nil)
858 memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
859 nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond)
860 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
861 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
862 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
863 nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body))
864 appendWalkStmt(init, nif)
865 }
866
867 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
868 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
869 typecheck.Conv(len, types.Types[types.TINT]),
870 typecheck.Conv(len, types.Types[types.TINT]))
871 return walkExpr(typecheck.Expr(h), init)
872 }
873
874 var math_MulUintptr = &types.Sym{Pkg: types.NewPkg("internal/runtime/math", "math"), Name: "MulUintptr"}
875
876 func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
877 ptr := safeExpr(n.X, init)
878 len := safeExpr(n.Y, init)
879
880 lenType := types.Types[types.TINT64]
881 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
882
883
884
885
886 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
887 fnname := "unsafestringcheckptr"
888 fn := typecheck.LookupRuntime(fnname)
889 init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType)))
890 } else {
891
892
893 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
894 lenType = types.Types[types.TINT]
895 } else {
896
897
898
899
900 len64 := typecheck.Conv(len, lenType)
901 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
902 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
903 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
904 appendWalkStmt(init, nif)
905 }
906
907
908 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
909 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
910 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
911 appendWalkStmt(init, nif)
912
913
914
915
916
917
918
919 nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil)
920 nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
921 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
922 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
923 nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body))
924 nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body))
925 appendWalkStmt(init, nifLen)
926 }
927 h := ir.NewStringHeaderExpr(n.Pos(),
928 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
929 typecheck.Conv(len, types.Types[types.TINT]),
930 )
931 return walkExpr(typecheck.Expr(h), init)
932 }
933
934 func badtype(op ir.Op, tl, tr *types.Type) {
935 var s string
936 if tl != nil {
937 s += fmt.Sprintf("\n\t%v", tl)
938 }
939 if tr != nil {
940 s += fmt.Sprintf("\n\t%v", tr)
941 }
942
943
944 if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
945 if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
946 s += "\n\t(*struct vs *interface)"
947 } else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
948 s += "\n\t(*interface vs *struct)"
949 }
950 }
951
952 base.Errorf("illegal types for operand: %v%s", op, s)
953 }
954
955 func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
956 return typecheck.LookupRuntime(name, l, r)
957 }
958
959
960
961 func isRuneCount(n ir.Node) bool {
962 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
963 }
964
965
966 func isByteCount(n ir.Node) bool {
967 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN &&
968 (n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STR || n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STRTMP)
969 }
970
971
972
973
974 func isChanLenCap(n ir.Node) bool {
975 return (n.Op() == ir.OLEN || n.Op() == ir.OCAP) && n.(*ir.UnaryExpr).X.Type().IsChan()
976 }
977
View as plain text