1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "os"
16 "path/filepath"
17 "slices"
18 "strings"
19
20 "cmd/compile/internal/abi"
21 "cmd/compile/internal/base"
22 "cmd/compile/internal/ir"
23 "cmd/compile/internal/liveness"
24 "cmd/compile/internal/objw"
25 "cmd/compile/internal/reflectdata"
26 "cmd/compile/internal/rttype"
27 "cmd/compile/internal/ssa"
28 "cmd/compile/internal/staticdata"
29 "cmd/compile/internal/typecheck"
30 "cmd/compile/internal/types"
31 "cmd/internal/obj"
32 "cmd/internal/objabi"
33 "cmd/internal/src"
34 "cmd/internal/sys"
35
36 rtabi "internal/abi"
37 )
38
39 var ssaConfig *ssa.Config
40 var ssaCaches []ssa.Cache
41
42 var ssaDump string
43 var ssaDir string
44 var ssaDumpStdout bool
45 var ssaDumpCFG string
46 const ssaDumpFile = "ssa.html"
47
48
49 var ssaDumpInlined []*ir.Func
50
51
52
53
54 const maxAggregatedHeapAllocation = 16
55
56 func DumpInline(fn *ir.Func) {
57 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
58 ssaDumpInlined = append(ssaDumpInlined, fn)
59 }
60 }
61
62 func InitEnv() {
63 ssaDump = os.Getenv("GOSSAFUNC")
64 ssaDir = os.Getenv("GOSSADIR")
65 if ssaDump != "" {
66 if strings.HasSuffix(ssaDump, "+") {
67 ssaDump = ssaDump[:len(ssaDump)-1]
68 ssaDumpStdout = true
69 }
70 spl := strings.Split(ssaDump, ":")
71 if len(spl) > 1 {
72 ssaDump = spl[0]
73 ssaDumpCFG = spl[1]
74 }
75 }
76 }
77
78 func InitConfig() {
79 types_ := ssa.NewTypes()
80
81 if Arch.SoftFloat {
82 softfloatInit()
83 }
84
85
86
87 _ = types.NewPtr(types.Types[types.TINTER])
88 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
89 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
90 _ = types.NewPtr(types.NewPtr(types.ByteType))
91 _ = types.NewPtr(types.NewSlice(types.ByteType))
92 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
93 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
94 _ = types.NewPtr(types.Types[types.TINT16])
95 _ = types.NewPtr(types.Types[types.TINT64])
96 _ = types.NewPtr(types.ErrorType)
97 _ = types.NewPtr(reflectdata.MapType())
98 _ = types.NewPtr(deferstruct())
99 types.NewPtrCacheEnabled = false
100 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
101 ssaConfig.Race = base.Flag.Race
102 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
103
104
105 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
106 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
107 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
108 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
109 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
110 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
111 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
112 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
113 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
114 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
115 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
116 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
117 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
118 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
119 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
120 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
121 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
122 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
123 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
124 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
125 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
126 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
127 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
128 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
129 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
130 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
131 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
132 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
133 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
134 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
135 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
136 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
137 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
138 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
139 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
140 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
141 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
142 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
143 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
144 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
145 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
146 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
147 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
148 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
149 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
150 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
151 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
152 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
153 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
154 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
155 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
156 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
157 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
158 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
159 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
160 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
161 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
162 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
163 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
164 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
165 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
166 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
167 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
168
169 if Arch.LinkArch.Family == sys.Wasm {
170 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
171 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
172 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
173 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
174 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
175 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
176 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
177 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
178 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
179 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
180 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
181 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
182 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
183 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
184 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
185 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
186 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
187 }
188
189
190 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
191 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
192 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
193 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
194 }
195
196 func InitTables() {
197 initIntrinsics(nil)
198 }
199
200
201
202
203
204
205
206
207 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
208 return ssaConfig.ABI0.Copy()
209 }
210
211
212
213 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
214 if buildcfg.Experiment.RegabiArgs {
215
216 if fn == nil {
217 return abi1
218 }
219 switch fn.ABI {
220 case obj.ABI0:
221 return abi0
222 case obj.ABIInternal:
223
224
225 return abi1
226 }
227 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
228 panic("not reachable")
229 }
230
231 a := abi0
232 if fn != nil {
233 if fn.Pragma&ir.RegisterParams != 0 {
234 a = abi1
235 }
236 }
237 return a
238 }
239
240
241
242
243
244
245
246
247
248
249
250
251 func (s *state) emitOpenDeferInfo() {
252 firstOffset := s.openDefers[0].closureNode.FrameOffset()
253
254
255 for i, r := range s.openDefers {
256 have := r.closureNode.FrameOffset()
257 want := firstOffset + int64(i)*int64(types.PtrSize)
258 if have != want {
259 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
260 }
261 }
262
263 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
264 x.Set(obj.AttrContentAddressable, true)
265 s.curfn.LSym.Func().OpenCodedDeferInfo = x
266
267 off := 0
268 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
269 off = objw.Uvarint(x, off, uint64(-firstOffset))
270 }
271
272
273
274 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
275 name := ir.FuncName(fn)
276
277 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
278
279 printssa := false
280
281
282 if strings.Contains(ssaDump, name) {
283 nameOptABI := name
284 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
285 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
286 } else if strings.HasSuffix(ssaDump, ">") {
287 l := len(ssaDump)
288 if l >= 3 && ssaDump[l-3] == '<' {
289 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
290 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
291 }
292 }
293 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
294 printssa = nameOptABI == ssaDump ||
295 pkgDotName == ssaDump ||
296 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
297 }
298
299 var astBuf *bytes.Buffer
300 if printssa {
301 astBuf = &bytes.Buffer{}
302 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
303 if ssaDumpStdout {
304 fmt.Println("generating SSA for", name)
305 fmt.Print(astBuf.String())
306 }
307 }
308
309 var s state
310 s.pushLine(fn.Pos())
311 defer s.popLine()
312
313 s.hasdefer = fn.HasDefer()
314 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
315 s.cgoUnsafeArgs = true
316 }
317 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
318
319 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
320 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
321 s.instrumentMemory = true
322 }
323 if base.Flag.Race {
324 s.instrumentEnterExit = true
325 }
326 }
327
328 fe := ssafn{
329 curfn: fn,
330 log: printssa && ssaDumpStdout,
331 }
332 s.curfn = fn
333
334 cache := &ssaCaches[worker]
335 cache.Reset()
336
337 s.f = ssaConfig.NewFunc(&fe, cache)
338 s.config = ssaConfig
339 s.f.Type = fn.Type()
340 s.f.Name = name
341 s.f.PrintOrHtmlSSA = printssa
342 if fn.Pragma&ir.Nosplit != 0 {
343 s.f.NoSplit = true
344 }
345 s.f.ABI0 = ssaConfig.ABI0
346 s.f.ABI1 = ssaConfig.ABI1
347 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
348 s.f.ABISelf = abiSelf
349
350 s.panics = map[funcLine]*ssa.Block{}
351 s.softFloat = s.config.SoftFloat
352
353
354 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
355 s.f.Entry.Pos = fn.Pos()
356 s.f.IsPgoHot = isPgoHot
357
358 if printssa {
359 ssaDF := ssaDumpFile
360 if ssaDir != "" {
361 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
362 ssaD := filepath.Dir(ssaDF)
363 os.MkdirAll(ssaD, 0755)
364 }
365 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
366
367 dumpSourcesColumn(s.f.HTMLWriter, fn)
368 s.f.HTMLWriter.WriteAST("AST", astBuf)
369 }
370
371
372 s.labels = map[string]*ssaLabel{}
373 s.fwdVars = map[ir.Node]*ssa.Value{}
374 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
375
376 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
377 switch {
378 case base.Debug.NoOpenDefer != 0:
379 s.hasOpenDefers = false
380 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
381
382
383
384
385
386 s.hasOpenDefers = false
387 }
388 if s.hasOpenDefers && s.instrumentEnterExit {
389
390
391
392 s.hasOpenDefers = false
393 }
394 if s.hasOpenDefers {
395
396
397 for _, f := range s.curfn.Type().Results() {
398 if !f.Nname.(*ir.Name).OnStack() {
399 s.hasOpenDefers = false
400 break
401 }
402 }
403 }
404 if s.hasOpenDefers &&
405 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
406
407
408
409
410
411 s.hasOpenDefers = false
412 }
413
414 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
415 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
416
417 s.startBlock(s.f.Entry)
418 s.vars[memVar] = s.startmem
419 if s.hasOpenDefers {
420
421
422
423 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
424 deferBitsTemp.SetAddrtaken(true)
425 s.deferBitsTemp = deferBitsTemp
426
427 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
428 s.vars[deferBitsVar] = startDeferBits
429 s.deferBitsAddr = s.addr(deferBitsTemp)
430 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
431
432
433
434
435
436 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
437 }
438
439 var params *abi.ABIParamResultInfo
440 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
441
442
443
444
445
446
447 var debugInfo ssa.FuncDebug
448 for _, n := range fn.Dcl {
449 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
450 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
451 }
452 }
453 fn.DebugInfo = &debugInfo
454
455
456 s.decladdrs = map[*ir.Name]*ssa.Value{}
457 for _, n := range fn.Dcl {
458 switch n.Class {
459 case ir.PPARAM:
460
461 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
462 case ir.PPARAMOUT:
463 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
464 case ir.PAUTO:
465
466
467 default:
468 s.Fatalf("local variable with class %v unimplemented", n.Class)
469 }
470 }
471
472 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
473
474
475 for _, n := range fn.Dcl {
476 if n.Class == ir.PPARAM {
477 if s.canSSA(n) {
478 v := s.newValue0A(ssa.OpArg, n.Type(), n)
479 s.vars[n] = v
480 s.addNamedValue(n, v)
481 } else {
482 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
483 if len(paramAssignment.Registers) > 0 {
484 if ssa.CanSSA(n.Type()) {
485 v := s.newValue0A(ssa.OpArg, n.Type(), n)
486 s.store(n.Type(), s.decladdrs[n], v)
487 } else {
488
489
490 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
491 }
492 }
493 }
494 }
495 }
496
497
498 if fn.Needctxt() {
499 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
500 if fn.RangeParent != nil && base.Flag.N != 0 {
501
502
503
504 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
505 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
506 cloSlot.SetUsed(true)
507 cloSlot.SetEsc(ir.EscNever)
508 cloSlot.SetAddrtaken(true)
509 s.f.CloSlot = cloSlot
510 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
511 addr := s.addr(cloSlot)
512 s.store(s.f.Config.Types.BytePtr, addr, clo)
513
514 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
515 }
516 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
517 for {
518 n, typ, offset := csiter.Next()
519 if n == nil {
520 break
521 }
522
523 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
524
525
526
527
528
529
530
531
532
533 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
534 n.Class = ir.PAUTO
535 fn.Dcl = append(fn.Dcl, n)
536 s.assign(n, s.load(n.Type(), ptr), false, 0)
537 continue
538 }
539
540 if !n.Byval() {
541 ptr = s.load(typ, ptr)
542 }
543 s.setHeapaddr(fn.Pos(), n, ptr)
544 }
545 }
546
547
548 if s.instrumentEnterExit {
549 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
550 }
551 s.zeroResults()
552 s.paramsToHeap()
553 s.stmtList(fn.Body)
554
555
556 if s.curBlock != nil {
557 s.pushLine(fn.Endlineno)
558 s.exit()
559 s.popLine()
560 }
561
562 for _, b := range s.f.Blocks {
563 if b.Pos != src.NoXPos {
564 s.updateUnsetPredPos(b)
565 }
566 }
567
568 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
569
570 s.insertPhis()
571
572
573 ssa.Compile(s.f)
574
575 fe.AllocFrame(s.f)
576
577 if len(s.openDefers) != 0 {
578 s.emitOpenDeferInfo()
579 }
580
581
582
583
584
585
586 for _, p := range params.InParams() {
587 typs, offs := p.RegisterTypesAndOffsets()
588 for i, t := range typs {
589 o := offs[i]
590 fo := p.FrameOffset(params)
591 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
592 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
593 }
594 }
595
596 return s.f
597 }
598
599 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
600 typs, offs := paramAssignment.RegisterTypesAndOffsets()
601 for i, t := range typs {
602 if pointersOnly && !t.IsPtrShaped() {
603 continue
604 }
605 r := paramAssignment.Registers[i]
606 o := offs[i]
607 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
608 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
609 v := s.newValue0I(op, t, reg)
610 v.Aux = aux
611 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
612 s.store(t, p, v)
613 }
614 }
615
616
617
618
619
620
621
622 func (s *state) zeroResults() {
623 for _, f := range s.curfn.Type().Results() {
624 n := f.Nname.(*ir.Name)
625 if !n.OnStack() {
626
627
628
629 continue
630 }
631
632 if typ := n.Type(); ssa.CanSSA(typ) {
633 s.assign(n, s.zeroVal(typ), false, 0)
634 } else {
635 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
636 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
637 }
638 s.zero(n.Type(), s.decladdrs[n])
639 }
640 }
641 }
642
643
644
645 func (s *state) paramsToHeap() {
646 do := func(params []*types.Field) {
647 for _, f := range params {
648 if f.Nname == nil {
649 continue
650 }
651 n := f.Nname.(*ir.Name)
652 if ir.IsBlank(n) || n.OnStack() {
653 continue
654 }
655 s.newHeapaddr(n)
656 if n.Class == ir.PPARAM {
657 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
658 }
659 }
660 }
661
662 typ := s.curfn.Type()
663 do(typ.Recvs())
664 do(typ.Params())
665 do(typ.Results())
666 }
667
668
669
670
671 func allocSizeAndAlign(t *types.Type) (int64, int64) {
672 size, align := t.Size(), t.Alignment()
673 if types.PtrSize == 4 && align == 4 && size >= 8 {
674
675 size = types.RoundUp(size, 8)
676 align = 8
677 }
678 return size, align
679 }
680 func allocSize(t *types.Type) int64 {
681 size, _ := allocSizeAndAlign(t)
682 return size
683 }
684 func allocAlign(t *types.Type) int64 {
685 _, align := allocSizeAndAlign(t)
686 return align
687 }
688
689
690 func (s *state) newHeapaddr(n *ir.Name) {
691 size := allocSize(n.Type())
692 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
693 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
694 return
695 }
696
697
698
699 var used int64
700 for _, v := range s.pendingHeapAllocations {
701 used += allocSize(v.Type.Elem())
702 }
703 if used+size > maxAggregatedHeapAllocation {
704 s.flushPendingHeapAllocations()
705 }
706
707 var allocCall *ssa.Value
708 if len(s.pendingHeapAllocations) == 0 {
709
710
711
712 allocCall = s.newObject(n.Type(), nil)
713 } else {
714 allocCall = s.pendingHeapAllocations[0].Args[0]
715 }
716
717 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
718
719
720 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
721
722
723 s.setHeapaddr(n.Pos(), n, v)
724 }
725
726 func (s *state) flushPendingHeapAllocations() {
727 pending := s.pendingHeapAllocations
728 if len(pending) == 0 {
729 return
730 }
731 s.pendingHeapAllocations = nil
732 ptr := pending[0].Args[0]
733 call := ptr.Args[0]
734
735 if len(pending) == 1 {
736
737 v := pending[0]
738 v.Op = ssa.OpCopy
739 return
740 }
741
742
743
744
745 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
746 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
747 })
748
749
750 var size int64
751 for _, v := range pending {
752 v.AuxInt = size
753 size += allocSize(v.Type.Elem())
754 }
755 align := allocAlign(pending[0].Type.Elem())
756 size = types.RoundUp(size, align)
757
758
759 args := []*ssa.Value{
760 s.constInt(types.Types[types.TUINTPTR], size),
761 s.constNil(call.Args[0].Type),
762 s.constBool(true),
763 call.Args[1],
764 }
765 call.Aux = ssa.StaticAuxCall(ir.Syms.MallocGC, s.f.ABIDefault.ABIAnalyzeTypes(
766 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
767 []*types.Type{types.Types[types.TUNSAFEPTR]},
768 ))
769 call.AuxInt = 4 * s.config.PtrSize
770 call.SetArgs4(args[0], args[1], args[2], args[3])
771
772
773 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
774 ptr.Type = types.Types[types.TUNSAFEPTR]
775 }
776
777
778
779 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
780 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
781 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
782 }
783
784
785 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
786 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
787 addr.SetUsed(true)
788 types.CalcSize(addr.Type())
789
790 if n.Class == ir.PPARAMOUT {
791 addr.SetIsOutputParamHeapAddr(true)
792 }
793
794 n.Heapaddr = addr
795 s.assign(addr, ptr, false, 0)
796 }
797
798
799 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
800 if typ.Size() == 0 {
801 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
802 }
803 if rtype == nil {
804 rtype = s.reflectType(typ)
805 }
806 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
807 }
808
809 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
810 if !n.Type().IsPtr() {
811 s.Fatalf("expected pointer type: %v", n.Type())
812 }
813 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
814 if count != nil {
815 if !elem.IsArray() {
816 s.Fatalf("expected array type: %v", elem)
817 }
818 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
819 }
820 size := elem.Size()
821
822 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
823 return
824 }
825 if count == nil {
826 count = s.constInt(types.Types[types.TUINTPTR], 1)
827 }
828 if count.Type.Size() != s.config.PtrSize {
829 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
830 }
831 var rtype *ssa.Value
832 if rtypeExpr != nil {
833 rtype = s.expr(rtypeExpr)
834 } else {
835 rtype = s.reflectType(elem)
836 }
837 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
838 }
839
840
841
842 func (s *state) reflectType(typ *types.Type) *ssa.Value {
843
844
845 lsym := reflectdata.TypeLinksym(typ)
846 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
847 }
848
849 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
850
851 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
852 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
853 if err != nil {
854 writer.Logf("cannot read sources for function %v: %v", fn, err)
855 }
856
857
858 var inlFns []*ssa.FuncLines
859 for _, fi := range ssaDumpInlined {
860 elno := fi.Endlineno
861 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
862 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
863 if err != nil {
864 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
865 continue
866 }
867 inlFns = append(inlFns, fnLines)
868 }
869
870 slices.SortFunc(inlFns, ssa.ByTopoCmp)
871 if targetFn != nil {
872 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
873 }
874
875 writer.WriteSources("sources", inlFns)
876 }
877
878 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
879 f, err := os.Open(os.ExpandEnv(file))
880 if err != nil {
881 return nil, err
882 }
883 defer f.Close()
884 var lines []string
885 ln := uint(1)
886 scanner := bufio.NewScanner(f)
887 for scanner.Scan() && ln <= end {
888 if ln >= start {
889 lines = append(lines, scanner.Text())
890 }
891 ln++
892 }
893 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
894 }
895
896
897
898
899 func (s *state) updateUnsetPredPos(b *ssa.Block) {
900 if b.Pos == src.NoXPos {
901 s.Fatalf("Block %s should have a position", b)
902 }
903 bestPos := src.NoXPos
904 for _, e := range b.Preds {
905 p := e.Block()
906 if !p.LackingPos() {
907 continue
908 }
909 if bestPos == src.NoXPos {
910 bestPos = b.Pos
911 for _, v := range b.Values {
912 if v.LackingPos() {
913 continue
914 }
915 if v.Pos != src.NoXPos {
916
917
918 bestPos = v.Pos
919 break
920 }
921 }
922 }
923 p.Pos = bestPos
924 s.updateUnsetPredPos(p)
925 }
926 }
927
928
929 type openDeferInfo struct {
930
931 n *ir.CallExpr
932
933
934 closure *ssa.Value
935
936
937
938 closureNode *ir.Name
939 }
940
941 type state struct {
942
943 config *ssa.Config
944
945
946 f *ssa.Func
947
948
949 curfn *ir.Func
950
951
952 labels map[string]*ssaLabel
953
954
955 breakTo *ssa.Block
956 continueTo *ssa.Block
957
958
959 curBlock *ssa.Block
960
961
962
963
964 vars map[ir.Node]*ssa.Value
965
966
967
968
969 fwdVars map[ir.Node]*ssa.Value
970
971
972 defvars []map[ir.Node]*ssa.Value
973
974
975 decladdrs map[*ir.Name]*ssa.Value
976
977
978 startmem *ssa.Value
979 sp *ssa.Value
980 sb *ssa.Value
981
982 deferBitsAddr *ssa.Value
983 deferBitsTemp *ir.Name
984
985
986 line []src.XPos
987
988 lastPos src.XPos
989
990
991
992 panics map[funcLine]*ssa.Block
993
994 cgoUnsafeArgs bool
995 hasdefer bool
996 softFloat bool
997 hasOpenDefers bool
998 checkPtrEnabled bool
999 instrumentEnterExit bool
1000 instrumentMemory bool
1001
1002
1003
1004
1005 openDefers []*openDeferInfo
1006
1007
1008
1009
1010 lastDeferExit *ssa.Block
1011 lastDeferFinalBlock *ssa.Block
1012 lastDeferCount int
1013
1014 prevCall *ssa.Value
1015
1016
1017
1018
1019 pendingHeapAllocations []*ssa.Value
1020
1021
1022 appendTargets map[ir.Node]bool
1023 }
1024
1025 type funcLine struct {
1026 f *obj.LSym
1027 base *src.PosBase
1028 line uint
1029 }
1030
1031 type ssaLabel struct {
1032 target *ssa.Block
1033 breakTarget *ssa.Block
1034 continueTarget *ssa.Block
1035 }
1036
1037
1038 func (s *state) label(sym *types.Sym) *ssaLabel {
1039 lab := s.labels[sym.Name]
1040 if lab == nil {
1041 lab = new(ssaLabel)
1042 s.labels[sym.Name] = lab
1043 }
1044 return lab
1045 }
1046
1047 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
1048 func (s *state) Log() bool { return s.f.Log() }
1049 func (s *state) Fatalf(msg string, args ...interface{}) {
1050 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1051 }
1052 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
1053 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1054
1055 func ssaMarker(name string) *ir.Name {
1056 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1057 }
1058
1059 var (
1060
1061 memVar = ssaMarker("mem")
1062
1063
1064 ptrVar = ssaMarker("ptr")
1065 lenVar = ssaMarker("len")
1066 capVar = ssaMarker("cap")
1067 typVar = ssaMarker("typ")
1068 okVar = ssaMarker("ok")
1069 deferBitsVar = ssaMarker("deferBits")
1070 hashVar = ssaMarker("hash")
1071 )
1072
1073
1074 func (s *state) startBlock(b *ssa.Block) {
1075 if s.curBlock != nil {
1076 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1077 }
1078 s.curBlock = b
1079 s.vars = map[ir.Node]*ssa.Value{}
1080 clear(s.fwdVars)
1081 }
1082
1083
1084
1085
1086 func (s *state) endBlock() *ssa.Block {
1087 b := s.curBlock
1088 if b == nil {
1089 return nil
1090 }
1091
1092 s.flushPendingHeapAllocations()
1093
1094 for len(s.defvars) <= int(b.ID) {
1095 s.defvars = append(s.defvars, nil)
1096 }
1097 s.defvars[b.ID] = s.vars
1098 s.curBlock = nil
1099 s.vars = nil
1100 if b.LackingPos() {
1101
1102
1103
1104 b.Pos = src.NoXPos
1105 } else {
1106 b.Pos = s.lastPos
1107 }
1108 return b
1109 }
1110
1111
1112 func (s *state) pushLine(line src.XPos) {
1113 if !line.IsKnown() {
1114
1115
1116 line = s.peekPos()
1117 if base.Flag.K != 0 {
1118 base.Warn("buildssa: unknown position (line 0)")
1119 }
1120 } else {
1121 s.lastPos = line
1122 }
1123
1124 s.line = append(s.line, line)
1125 }
1126
1127
1128 func (s *state) popLine() {
1129 s.line = s.line[:len(s.line)-1]
1130 }
1131
1132
1133 func (s *state) peekPos() src.XPos {
1134 return s.line[len(s.line)-1]
1135 }
1136
1137
1138 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1139 return s.curBlock.NewValue0(s.peekPos(), op, t)
1140 }
1141
1142
1143 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1144 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1145 }
1146
1147
1148 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1149 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1150 }
1151
1152
1153 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1154 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1155 }
1156
1157
1158 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1159 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1160 }
1161
1162
1163
1164
1165 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1166 if isStmt {
1167 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1168 }
1169 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1170 }
1171
1172
1173 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1174 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1175 }
1176
1177
1178 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1179 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1180 }
1181
1182
1183 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1184 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1185 }
1186
1187
1188
1189
1190 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1191 if isStmt {
1192 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1193 }
1194 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1195 }
1196
1197
1198 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1199 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1200 }
1201
1202
1203 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1204 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1205 }
1206
1207
1208 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1209 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1210 }
1211
1212
1213 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1214 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1215 }
1216
1217
1218
1219
1220 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1221 if isStmt {
1222 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1223 }
1224 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1225 }
1226
1227
1228 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1229 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1230 }
1231
1232
1233 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1234 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1235 }
1236
1237 func (s *state) entryBlock() *ssa.Block {
1238 b := s.f.Entry
1239 if base.Flag.N > 0 && s.curBlock != nil {
1240
1241
1242
1243
1244 b = s.curBlock
1245 }
1246 return b
1247 }
1248
1249
1250 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1251 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1252 }
1253
1254
1255 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1256 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1257 }
1258
1259
1260 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1261 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1262 }
1263
1264
1265 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1266 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1267 }
1268
1269
1270 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1271 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1272 }
1273
1274
1275 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1276 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1277 }
1278
1279
1280 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1281 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1282 }
1283
1284
1285 func (s *state) constSlice(t *types.Type) *ssa.Value {
1286 return s.f.ConstSlice(t)
1287 }
1288 func (s *state) constInterface(t *types.Type) *ssa.Value {
1289 return s.f.ConstInterface(t)
1290 }
1291 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1292 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1293 return s.f.ConstEmptyString(t)
1294 }
1295 func (s *state) constBool(c bool) *ssa.Value {
1296 return s.f.ConstBool(types.Types[types.TBOOL], c)
1297 }
1298 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1299 return s.f.ConstInt8(t, c)
1300 }
1301 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1302 return s.f.ConstInt16(t, c)
1303 }
1304 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1305 return s.f.ConstInt32(t, c)
1306 }
1307 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1308 return s.f.ConstInt64(t, c)
1309 }
1310 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1311 return s.f.ConstFloat32(t, c)
1312 }
1313 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1314 return s.f.ConstFloat64(t, c)
1315 }
1316 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1317 if s.config.PtrSize == 8 {
1318 return s.constInt64(t, c)
1319 }
1320 if int64(int32(c)) != c {
1321 s.Fatalf("integer constant too big %d", c)
1322 }
1323 return s.constInt32(t, int32(c))
1324 }
1325
1326
1327
1328 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1329 if s.softFloat {
1330 if c, ok := s.sfcall(op, arg); ok {
1331 return c
1332 }
1333 }
1334 return s.newValue1(op, t, arg)
1335 }
1336 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1337 if s.softFloat {
1338 if c, ok := s.sfcall(op, arg0, arg1); ok {
1339 return c
1340 }
1341 }
1342 return s.newValue2(op, t, arg0, arg1)
1343 }
1344
1345 type instrumentKind uint8
1346
1347 const (
1348 instrumentRead = iota
1349 instrumentWrite
1350 instrumentMove
1351 )
1352
1353 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1354 s.instrument2(t, addr, nil, kind)
1355 }
1356
1357
1358
1359
1360 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1361 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1362 s.instrument(t, addr, kind)
1363 return
1364 }
1365 for _, f := range t.Fields() {
1366 if f.Sym.IsBlank() {
1367 continue
1368 }
1369 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1370 s.instrumentFields(f.Type, offptr, kind)
1371 }
1372 }
1373
1374 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1375 if base.Flag.MSan {
1376 s.instrument2(t, dst, src, instrumentMove)
1377 } else {
1378 s.instrument(t, src, instrumentRead)
1379 s.instrument(t, dst, instrumentWrite)
1380 }
1381 }
1382
1383 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1384 if !s.instrumentMemory {
1385 return
1386 }
1387
1388 w := t.Size()
1389 if w == 0 {
1390 return
1391 }
1392
1393 if ssa.IsSanitizerSafeAddr(addr) {
1394 return
1395 }
1396
1397 var fn *obj.LSym
1398 needWidth := false
1399
1400 if addr2 != nil && kind != instrumentMove {
1401 panic("instrument2: non-nil addr2 for non-move instrumentation")
1402 }
1403
1404 if base.Flag.MSan {
1405 switch kind {
1406 case instrumentRead:
1407 fn = ir.Syms.Msanread
1408 case instrumentWrite:
1409 fn = ir.Syms.Msanwrite
1410 case instrumentMove:
1411 fn = ir.Syms.Msanmove
1412 default:
1413 panic("unreachable")
1414 }
1415 needWidth = true
1416 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1417
1418
1419
1420 switch kind {
1421 case instrumentRead:
1422 fn = ir.Syms.Racereadrange
1423 case instrumentWrite:
1424 fn = ir.Syms.Racewriterange
1425 default:
1426 panic("unreachable")
1427 }
1428 needWidth = true
1429 } else if base.Flag.Race {
1430
1431
1432 switch kind {
1433 case instrumentRead:
1434 fn = ir.Syms.Raceread
1435 case instrumentWrite:
1436 fn = ir.Syms.Racewrite
1437 default:
1438 panic("unreachable")
1439 }
1440 } else if base.Flag.ASan {
1441 switch kind {
1442 case instrumentRead:
1443 fn = ir.Syms.Asanread
1444 case instrumentWrite:
1445 fn = ir.Syms.Asanwrite
1446 default:
1447 panic("unreachable")
1448 }
1449 needWidth = true
1450 } else {
1451 panic("unreachable")
1452 }
1453
1454 args := []*ssa.Value{addr}
1455 if addr2 != nil {
1456 args = append(args, addr2)
1457 }
1458 if needWidth {
1459 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1460 }
1461 s.rtcall(fn, true, nil, args...)
1462 }
1463
1464 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1465 s.instrumentFields(t, src, instrumentRead)
1466 return s.rawLoad(t, src)
1467 }
1468
1469 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1470 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1471 }
1472
1473 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1474 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1475 }
1476
1477 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1478 s.instrument(t, dst, instrumentWrite)
1479 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1480 store.Aux = t
1481 s.vars[memVar] = store
1482 }
1483
1484 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1485 s.moveWhichMayOverlap(t, dst, src, false)
1486 }
1487 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1488 s.instrumentMove(t, dst, src)
1489 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513 if t.HasPointers() {
1514 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1515
1516
1517
1518
1519 s.curfn.SetWBPos(s.peekPos())
1520 } else {
1521 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1522 }
1523 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1524 return
1525 }
1526 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1527 store.Aux = t
1528 s.vars[memVar] = store
1529 }
1530
1531
1532 func (s *state) stmtList(l ir.Nodes) {
1533 for _, n := range l {
1534 s.stmt(n)
1535 }
1536 }
1537
1538
1539 func (s *state) stmt(n ir.Node) {
1540 s.pushLine(n.Pos())
1541 defer s.popLine()
1542
1543
1544
1545 if s.curBlock == nil && n.Op() != ir.OLABEL {
1546 return
1547 }
1548
1549 s.stmtList(n.Init())
1550 switch n.Op() {
1551
1552 case ir.OBLOCK:
1553 n := n.(*ir.BlockStmt)
1554 s.stmtList(n.List)
1555
1556 case ir.OFALL:
1557
1558
1559 case ir.OCALLFUNC:
1560 n := n.(*ir.CallExpr)
1561 if ir.IsIntrinsicCall(n) {
1562 s.intrinsicCall(n)
1563 return
1564 }
1565 fallthrough
1566
1567 case ir.OCALLINTER:
1568 n := n.(*ir.CallExpr)
1569 s.callResult(n, callNormal)
1570 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1571 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1572 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1573 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1574 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1575 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1576 fn == "panicrangestate") {
1577 m := s.mem()
1578 b := s.endBlock()
1579 b.Kind = ssa.BlockExit
1580 b.SetControl(m)
1581
1582
1583
1584 }
1585 }
1586 case ir.ODEFER:
1587 n := n.(*ir.GoDeferStmt)
1588 if base.Debug.Defer > 0 {
1589 var defertype string
1590 if s.hasOpenDefers {
1591 defertype = "open-coded"
1592 } else if n.Esc() == ir.EscNever {
1593 defertype = "stack-allocated"
1594 } else {
1595 defertype = "heap-allocated"
1596 }
1597 base.WarnfAt(n.Pos(), "%s defer", defertype)
1598 }
1599 if s.hasOpenDefers {
1600 s.openDeferRecord(n.Call.(*ir.CallExpr))
1601 } else {
1602 d := callDefer
1603 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1604 d = callDeferStack
1605 }
1606 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1607 }
1608 case ir.OGO:
1609 n := n.(*ir.GoDeferStmt)
1610 s.callResult(n.Call.(*ir.CallExpr), callGo)
1611
1612 case ir.OAS2DOTTYPE:
1613 n := n.(*ir.AssignListStmt)
1614 var res, resok *ssa.Value
1615 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1616 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1617 } else {
1618 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1619 }
1620 deref := false
1621 if !ssa.CanSSA(n.Rhs[0].Type()) {
1622 if res.Op != ssa.OpLoad {
1623 s.Fatalf("dottype of non-load")
1624 }
1625 mem := s.mem()
1626 if res.Args[1] != mem {
1627 s.Fatalf("memory no longer live from 2-result dottype load")
1628 }
1629 deref = true
1630 res = res.Args[0]
1631 }
1632 s.assign(n.Lhs[0], res, deref, 0)
1633 s.assign(n.Lhs[1], resok, false, 0)
1634 return
1635
1636 case ir.OAS2FUNC:
1637
1638 n := n.(*ir.AssignListStmt)
1639 call := n.Rhs[0].(*ir.CallExpr)
1640 if !ir.IsIntrinsicCall(call) {
1641 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1642 }
1643 v := s.intrinsicCall(call)
1644 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1645 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1646 s.assign(n.Lhs[0], v1, false, 0)
1647 s.assign(n.Lhs[1], v2, false, 0)
1648 return
1649
1650 case ir.ODCL:
1651 n := n.(*ir.Decl)
1652 if v := n.X; v.Esc() == ir.EscHeap {
1653 s.newHeapaddr(v)
1654 }
1655
1656 case ir.OLABEL:
1657 n := n.(*ir.LabelStmt)
1658 sym := n.Label
1659 if sym.IsBlank() {
1660
1661 break
1662 }
1663 lab := s.label(sym)
1664
1665
1666 if lab.target == nil {
1667 lab.target = s.f.NewBlock(ssa.BlockPlain)
1668 }
1669
1670
1671
1672 if s.curBlock != nil {
1673 b := s.endBlock()
1674 b.AddEdgeTo(lab.target)
1675 }
1676 s.startBlock(lab.target)
1677
1678 case ir.OGOTO:
1679 n := n.(*ir.BranchStmt)
1680 sym := n.Label
1681
1682 lab := s.label(sym)
1683 if lab.target == nil {
1684 lab.target = s.f.NewBlock(ssa.BlockPlain)
1685 }
1686
1687 b := s.endBlock()
1688 b.Pos = s.lastPos.WithIsStmt()
1689 b.AddEdgeTo(lab.target)
1690
1691 case ir.OAS:
1692 n := n.(*ir.AssignStmt)
1693 if n.X == n.Y && n.X.Op() == ir.ONAME {
1694
1695
1696
1697
1698
1699
1700
1701 return
1702 }
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1714 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1715 p := n.Y.(*ir.StarExpr).X
1716 for p.Op() == ir.OCONVNOP {
1717 p = p.(*ir.ConvExpr).X
1718 }
1719 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1720
1721
1722 mayOverlap = false
1723 }
1724 }
1725
1726
1727 rhs := n.Y
1728 if rhs != nil {
1729 switch rhs.Op() {
1730 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1731
1732
1733
1734 if !ir.IsZero(rhs) {
1735 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1736 }
1737 rhs = nil
1738 case ir.OAPPEND:
1739 rhs := rhs.(*ir.CallExpr)
1740
1741
1742
1743 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1744 break
1745 }
1746
1747
1748
1749 if s.canSSA(n.X) {
1750 if base.Debug.Append > 0 {
1751 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1752 }
1753 break
1754 }
1755 if base.Debug.Append > 0 {
1756 base.WarnfAt(n.Pos(), "append: len-only update")
1757 }
1758 s.append(rhs, true)
1759 return
1760 }
1761 }
1762
1763 if ir.IsBlank(n.X) {
1764
1765
1766 if rhs != nil {
1767 s.expr(rhs)
1768 }
1769 return
1770 }
1771
1772 var t *types.Type
1773 if n.Y != nil {
1774 t = n.Y.Type()
1775 } else {
1776 t = n.X.Type()
1777 }
1778
1779 var r *ssa.Value
1780 deref := !ssa.CanSSA(t)
1781 if deref {
1782 if rhs == nil {
1783 r = nil
1784 } else {
1785 r = s.addr(rhs)
1786 }
1787 } else {
1788 if rhs == nil {
1789 r = s.zeroVal(t)
1790 } else {
1791 r = s.expr(rhs)
1792 }
1793 }
1794
1795 var skip skipMask
1796 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1797
1798
1799 rhs := rhs.(*ir.SliceExpr)
1800 i, j, k := rhs.Low, rhs.High, rhs.Max
1801 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1802
1803 i = nil
1804 }
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815 if i == nil {
1816 skip |= skipPtr
1817 if j == nil {
1818 skip |= skipLen
1819 }
1820 if k == nil {
1821 skip |= skipCap
1822 }
1823 }
1824 }
1825
1826 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1827
1828 case ir.OIF:
1829 n := n.(*ir.IfStmt)
1830 if ir.IsConst(n.Cond, constant.Bool) {
1831 s.stmtList(n.Cond.Init())
1832 if ir.BoolVal(n.Cond) {
1833 s.stmtList(n.Body)
1834 } else {
1835 s.stmtList(n.Else)
1836 }
1837 break
1838 }
1839
1840 bEnd := s.f.NewBlock(ssa.BlockPlain)
1841 var likely int8
1842 if n.Likely {
1843 likely = 1
1844 }
1845 var bThen *ssa.Block
1846 if len(n.Body) != 0 {
1847 bThen = s.f.NewBlock(ssa.BlockPlain)
1848 } else {
1849 bThen = bEnd
1850 }
1851 var bElse *ssa.Block
1852 if len(n.Else) != 0 {
1853 bElse = s.f.NewBlock(ssa.BlockPlain)
1854 } else {
1855 bElse = bEnd
1856 }
1857 s.condBranch(n.Cond, bThen, bElse, likely)
1858
1859 if len(n.Body) != 0 {
1860 s.startBlock(bThen)
1861 s.stmtList(n.Body)
1862 if b := s.endBlock(); b != nil {
1863 b.AddEdgeTo(bEnd)
1864 }
1865 }
1866 if len(n.Else) != 0 {
1867 s.startBlock(bElse)
1868 s.stmtList(n.Else)
1869 if b := s.endBlock(); b != nil {
1870 b.AddEdgeTo(bEnd)
1871 }
1872 }
1873 s.startBlock(bEnd)
1874
1875 case ir.ORETURN:
1876 n := n.(*ir.ReturnStmt)
1877 s.stmtList(n.Results)
1878 b := s.exit()
1879 b.Pos = s.lastPos.WithIsStmt()
1880
1881 case ir.OTAILCALL:
1882 n := n.(*ir.TailCallStmt)
1883 s.callResult(n.Call, callTail)
1884 call := s.mem()
1885 b := s.endBlock()
1886 b.Kind = ssa.BlockRetJmp
1887 b.SetControl(call)
1888
1889 case ir.OCONTINUE, ir.OBREAK:
1890 n := n.(*ir.BranchStmt)
1891 var to *ssa.Block
1892 if n.Label == nil {
1893
1894 switch n.Op() {
1895 case ir.OCONTINUE:
1896 to = s.continueTo
1897 case ir.OBREAK:
1898 to = s.breakTo
1899 }
1900 } else {
1901
1902 sym := n.Label
1903 lab := s.label(sym)
1904 switch n.Op() {
1905 case ir.OCONTINUE:
1906 to = lab.continueTarget
1907 case ir.OBREAK:
1908 to = lab.breakTarget
1909 }
1910 }
1911
1912 b := s.endBlock()
1913 b.Pos = s.lastPos.WithIsStmt()
1914 b.AddEdgeTo(to)
1915
1916 case ir.OFOR:
1917
1918
1919 n := n.(*ir.ForStmt)
1920 base.Assert(!n.DistinctVars)
1921 bCond := s.f.NewBlock(ssa.BlockPlain)
1922 bBody := s.f.NewBlock(ssa.BlockPlain)
1923 bIncr := s.f.NewBlock(ssa.BlockPlain)
1924 bEnd := s.f.NewBlock(ssa.BlockPlain)
1925
1926
1927 bBody.Pos = n.Pos()
1928
1929
1930 b := s.endBlock()
1931 b.AddEdgeTo(bCond)
1932
1933
1934 s.startBlock(bCond)
1935 if n.Cond != nil {
1936 s.condBranch(n.Cond, bBody, bEnd, 1)
1937 } else {
1938 b := s.endBlock()
1939 b.Kind = ssa.BlockPlain
1940 b.AddEdgeTo(bBody)
1941 }
1942
1943
1944 prevContinue := s.continueTo
1945 prevBreak := s.breakTo
1946 s.continueTo = bIncr
1947 s.breakTo = bEnd
1948 var lab *ssaLabel
1949 if sym := n.Label; sym != nil {
1950
1951 lab = s.label(sym)
1952 lab.continueTarget = bIncr
1953 lab.breakTarget = bEnd
1954 }
1955
1956
1957 s.startBlock(bBody)
1958 s.stmtList(n.Body)
1959
1960
1961 s.continueTo = prevContinue
1962 s.breakTo = prevBreak
1963 if lab != nil {
1964 lab.continueTarget = nil
1965 lab.breakTarget = nil
1966 }
1967
1968
1969 if b := s.endBlock(); b != nil {
1970 b.AddEdgeTo(bIncr)
1971 }
1972
1973
1974 s.startBlock(bIncr)
1975 if n.Post != nil {
1976 s.stmt(n.Post)
1977 }
1978 if b := s.endBlock(); b != nil {
1979 b.AddEdgeTo(bCond)
1980
1981
1982 if b.Pos == src.NoXPos {
1983 b.Pos = bCond.Pos
1984 }
1985 }
1986
1987 s.startBlock(bEnd)
1988
1989 case ir.OSWITCH, ir.OSELECT:
1990
1991
1992 bEnd := s.f.NewBlock(ssa.BlockPlain)
1993
1994 prevBreak := s.breakTo
1995 s.breakTo = bEnd
1996 var sym *types.Sym
1997 var body ir.Nodes
1998 if n.Op() == ir.OSWITCH {
1999 n := n.(*ir.SwitchStmt)
2000 sym = n.Label
2001 body = n.Compiled
2002 } else {
2003 n := n.(*ir.SelectStmt)
2004 sym = n.Label
2005 body = n.Compiled
2006 }
2007
2008 var lab *ssaLabel
2009 if sym != nil {
2010
2011 lab = s.label(sym)
2012 lab.breakTarget = bEnd
2013 }
2014
2015
2016 s.stmtList(body)
2017
2018 s.breakTo = prevBreak
2019 if lab != nil {
2020 lab.breakTarget = nil
2021 }
2022
2023
2024
2025 if s.curBlock != nil {
2026 m := s.mem()
2027 b := s.endBlock()
2028 b.Kind = ssa.BlockExit
2029 b.SetControl(m)
2030 }
2031 s.startBlock(bEnd)
2032
2033 case ir.OJUMPTABLE:
2034 n := n.(*ir.JumpTableStmt)
2035
2036
2037 jt := s.f.NewBlock(ssa.BlockJumpTable)
2038 bEnd := s.f.NewBlock(ssa.BlockPlain)
2039
2040
2041 idx := s.expr(n.Idx)
2042 unsigned := idx.Type.IsUnsigned()
2043
2044
2045 t := types.Types[types.TUINTPTR]
2046 idx = s.conv(nil, idx, idx.Type, t)
2047
2048
2049
2050
2051
2052
2053
2054 var min, max uint64
2055 if unsigned {
2056 min, _ = constant.Uint64Val(n.Cases[0])
2057 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2058 } else {
2059 mn, _ := constant.Int64Val(n.Cases[0])
2060 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2061 min = uint64(mn)
2062 max = uint64(mx)
2063 }
2064
2065 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2066 width := s.uintptrConstant(max - min)
2067 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2068 b := s.endBlock()
2069 b.Kind = ssa.BlockIf
2070 b.SetControl(cmp)
2071 b.AddEdgeTo(jt)
2072 b.AddEdgeTo(bEnd)
2073 b.Likely = ssa.BranchLikely
2074
2075
2076 s.startBlock(jt)
2077 jt.Pos = n.Pos()
2078 if base.Flag.Cfg.SpectreIndex {
2079 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2080 }
2081 jt.SetControl(idx)
2082
2083
2084 table := make([]*ssa.Block, max-min+1)
2085 for i := range table {
2086 table[i] = bEnd
2087 }
2088 for i := range n.Targets {
2089 c := n.Cases[i]
2090 lab := s.label(n.Targets[i])
2091 if lab.target == nil {
2092 lab.target = s.f.NewBlock(ssa.BlockPlain)
2093 }
2094 var val uint64
2095 if unsigned {
2096 val, _ = constant.Uint64Val(c)
2097 } else {
2098 vl, _ := constant.Int64Val(c)
2099 val = uint64(vl)
2100 }
2101
2102 table[val-min] = lab.target
2103 }
2104 for _, t := range table {
2105 jt.AddEdgeTo(t)
2106 }
2107 s.endBlock()
2108
2109 s.startBlock(bEnd)
2110
2111 case ir.OINTERFACESWITCH:
2112 n := n.(*ir.InterfaceSwitchStmt)
2113 typs := s.f.Config.Types
2114
2115 t := s.expr(n.RuntimeType)
2116 h := s.expr(n.Hash)
2117 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2118
2119
2120 var merge *ssa.Block
2121 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2122
2123
2124 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2125 s.Fatalf("atomic load not available")
2126 }
2127 merge = s.f.NewBlock(ssa.BlockPlain)
2128 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2129 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2130 loopHead := s.f.NewBlock(ssa.BlockPlain)
2131 loopBody := s.f.NewBlock(ssa.BlockPlain)
2132
2133
2134 var mul, and, add, zext ssa.Op
2135 if s.config.PtrSize == 4 {
2136 mul = ssa.OpMul32
2137 and = ssa.OpAnd32
2138 add = ssa.OpAdd32
2139 zext = ssa.OpCopy
2140 } else {
2141 mul = ssa.OpMul64
2142 and = ssa.OpAnd64
2143 add = ssa.OpAdd64
2144 zext = ssa.OpZeroExt32to64
2145 }
2146
2147
2148
2149 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2150 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2151 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2152
2153
2154 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2155
2156
2157 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2158
2159 b := s.endBlock()
2160 b.AddEdgeTo(loopHead)
2161
2162
2163
2164 s.startBlock(loopHead)
2165 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2166 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2167 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2168 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2169
2170 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2171
2172
2173
2174 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2175 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2176 b = s.endBlock()
2177 b.Kind = ssa.BlockIf
2178 b.SetControl(cmp1)
2179 b.AddEdgeTo(cacheHit)
2180 b.AddEdgeTo(loopBody)
2181
2182
2183
2184 s.startBlock(loopBody)
2185 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2186 b = s.endBlock()
2187 b.Kind = ssa.BlockIf
2188 b.SetControl(cmp2)
2189 b.AddEdgeTo(cacheMiss)
2190 b.AddEdgeTo(loopHead)
2191
2192
2193
2194
2195 s.startBlock(cacheHit)
2196 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2197 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2198 s.assign(n.Case, eCase, false, 0)
2199 s.assign(n.Itab, eItab, false, 0)
2200 b = s.endBlock()
2201 b.AddEdgeTo(merge)
2202
2203
2204 s.startBlock(cacheMiss)
2205 }
2206
2207 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2208 s.assign(n.Case, r[0], false, 0)
2209 s.assign(n.Itab, r[1], false, 0)
2210
2211 if merge != nil {
2212
2213 b := s.endBlock()
2214 b.Kind = ssa.BlockPlain
2215 b.AddEdgeTo(merge)
2216 s.startBlock(merge)
2217 }
2218
2219 case ir.OCHECKNIL:
2220 n := n.(*ir.UnaryExpr)
2221 p := s.expr(n.X)
2222 _ = s.nilCheck(p)
2223
2224
2225 case ir.OINLMARK:
2226 n := n.(*ir.InlineMarkStmt)
2227 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2228
2229 default:
2230 s.Fatalf("unhandled stmt %v", n.Op())
2231 }
2232 }
2233
2234
2235
2236 const shareDeferExits = false
2237
2238
2239
2240
2241 func (s *state) exit() *ssa.Block {
2242 if s.hasdefer {
2243 if s.hasOpenDefers {
2244 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2245 if s.curBlock.Kind != ssa.BlockPlain {
2246 panic("Block for an exit should be BlockPlain")
2247 }
2248 s.curBlock.AddEdgeTo(s.lastDeferExit)
2249 s.endBlock()
2250 return s.lastDeferFinalBlock
2251 }
2252 s.openDeferExit()
2253 } else {
2254
2255
2256
2257
2258
2259
2260
2261
2262 s.pushLine(s.curfn.Endlineno)
2263 s.rtcall(ir.Syms.Deferreturn, true, nil)
2264 s.popLine()
2265 }
2266 }
2267
2268
2269
2270 resultFields := s.curfn.Type().Results()
2271 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2272
2273 for i, f := range resultFields {
2274 n := f.Nname.(*ir.Name)
2275 if s.canSSA(n) {
2276 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2277
2278 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2279 }
2280 results[i] = s.variable(n, n.Type())
2281 } else if !n.OnStack() {
2282
2283 if n.Type().HasPointers() {
2284 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2285 }
2286 ha := s.expr(n.Heapaddr)
2287 s.instrumentFields(n.Type(), ha, instrumentRead)
2288 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2289 } else {
2290
2291
2292
2293 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2294 }
2295 }
2296
2297
2298
2299
2300 if s.instrumentEnterExit {
2301 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2302 }
2303
2304 results[len(results)-1] = s.mem()
2305 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2306 m.AddArgs(results...)
2307
2308 b := s.endBlock()
2309 b.Kind = ssa.BlockRet
2310 b.SetControl(m)
2311 if s.hasdefer && s.hasOpenDefers {
2312 s.lastDeferFinalBlock = b
2313 }
2314 return b
2315 }
2316
2317 type opAndType struct {
2318 op ir.Op
2319 etype types.Kind
2320 }
2321
2322 var opToSSA = map[opAndType]ssa.Op{
2323 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2324 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2325 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2326 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2327 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2328 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2329 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2330 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2331 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2332 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2333
2334 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2335 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2336 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2337 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2338 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2339 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2340 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2341 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2342 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2343 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2344
2345 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2346
2347 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2348 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2349 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2350 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2351 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2352 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2353 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2354 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2355 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2356 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2357
2358 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2359 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2360 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2361 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2362 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2363 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2364 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2365 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2366
2367 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2368 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2369 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2370 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2371
2372 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2373 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2374 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2375 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2376 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2377 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2378 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2379 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2380 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2381 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2382
2383 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2384 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2385
2386 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2387 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2388 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2389 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2390 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2391 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2392 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2393 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2394
2395 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2396 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2397 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2398 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2399 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2400 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2401 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2402 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2403
2404 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2405 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2406 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2407 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2408 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2409 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2410 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2411 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2412
2413 {ir.OOR, types.TINT8}: ssa.OpOr8,
2414 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2415 {ir.OOR, types.TINT16}: ssa.OpOr16,
2416 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2417 {ir.OOR, types.TINT32}: ssa.OpOr32,
2418 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2419 {ir.OOR, types.TINT64}: ssa.OpOr64,
2420 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2421
2422 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2423 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2424 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2425 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2426 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2427 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2428 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2429 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2430
2431 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2432 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2433 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2434 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2435 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2436 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2437 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2438 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2439 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2440 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2441 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2442 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2443 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2444 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2445 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2446 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2447 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2448 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2449 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2450
2451 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2452 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2453 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2454 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2455 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2456 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2457 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2458 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2459 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2460 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2461 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2462 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2463 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2464 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2465 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2466 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2467 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2468 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2469 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2470
2471 {ir.OLT, types.TINT8}: ssa.OpLess8,
2472 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2473 {ir.OLT, types.TINT16}: ssa.OpLess16,
2474 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2475 {ir.OLT, types.TINT32}: ssa.OpLess32,
2476 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2477 {ir.OLT, types.TINT64}: ssa.OpLess64,
2478 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2479 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2480 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2481
2482 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2483 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2484 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2485 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2486 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2487 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2488 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2489 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2490 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2491 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2492 }
2493
2494 func (s *state) concreteEtype(t *types.Type) types.Kind {
2495 e := t.Kind()
2496 switch e {
2497 default:
2498 return e
2499 case types.TINT:
2500 if s.config.PtrSize == 8 {
2501 return types.TINT64
2502 }
2503 return types.TINT32
2504 case types.TUINT:
2505 if s.config.PtrSize == 8 {
2506 return types.TUINT64
2507 }
2508 return types.TUINT32
2509 case types.TUINTPTR:
2510 if s.config.PtrSize == 8 {
2511 return types.TUINT64
2512 }
2513 return types.TUINT32
2514 }
2515 }
2516
2517 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2518 etype := s.concreteEtype(t)
2519 x, ok := opToSSA[opAndType{op, etype}]
2520 if !ok {
2521 s.Fatalf("unhandled binary op %v %s", op, etype)
2522 }
2523 return x
2524 }
2525
2526 type opAndTwoTypes struct {
2527 op ir.Op
2528 etype1 types.Kind
2529 etype2 types.Kind
2530 }
2531
2532 type twoTypes struct {
2533 etype1 types.Kind
2534 etype2 types.Kind
2535 }
2536
2537 type twoOpsAndType struct {
2538 op1 ssa.Op
2539 op2 ssa.Op
2540 intermediateType types.Kind
2541 }
2542
2543 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2544
2545 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2546 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2547 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2548 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2549
2550 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2551 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2552 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2553 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2554
2555 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2556 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2557 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2558 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2559
2560 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2561 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2562 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2563 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2564
2565 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2566 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2567 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2568 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2569
2570 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2571 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2572 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2573 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2574
2575 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2576 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2577 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2578 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2579
2580 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2581 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2582 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2583 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2584
2585
2586 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2587 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2588 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2589 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2590 }
2591
2592
2593
2594 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2595 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2596 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2597 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2598 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2599 }
2600
2601
2602 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2603 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2604 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2605 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2606 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2607 }
2608
2609 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2610 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2611 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2612 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2613 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2614 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2615 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2616 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2617 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2618
2619 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2620 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2621 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2622 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2623 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2624 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2625 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2626 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2627
2628 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2629 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2630 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2631 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2632 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2633 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2634 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2635 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2636
2637 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2638 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2639 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2640 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2641 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2642 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2643 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2644 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2645
2646 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2647 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2648 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2649 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2650 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2651 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2652 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2653 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2654
2655 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2656 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2657 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2658 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2659 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2660 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2661 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2662 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2663
2664 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2665 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2666 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2667 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2668 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2669 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2670 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2671 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2672
2673 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2674 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2675 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2676 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2677 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2678 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2679 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2680 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2681 }
2682
2683 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2684 etype1 := s.concreteEtype(t)
2685 etype2 := s.concreteEtype(u)
2686 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2687 if !ok {
2688 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2689 }
2690 return x
2691 }
2692
2693 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2694 if s.config.PtrSize == 4 {
2695 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2696 }
2697 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2698 }
2699
2700 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2701 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2702
2703 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2704 }
2705 if ft.IsInteger() && tt.IsInteger() {
2706 var op ssa.Op
2707 if tt.Size() == ft.Size() {
2708 op = ssa.OpCopy
2709 } else if tt.Size() < ft.Size() {
2710
2711 switch 10*ft.Size() + tt.Size() {
2712 case 21:
2713 op = ssa.OpTrunc16to8
2714 case 41:
2715 op = ssa.OpTrunc32to8
2716 case 42:
2717 op = ssa.OpTrunc32to16
2718 case 81:
2719 op = ssa.OpTrunc64to8
2720 case 82:
2721 op = ssa.OpTrunc64to16
2722 case 84:
2723 op = ssa.OpTrunc64to32
2724 default:
2725 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2726 }
2727 } else if ft.IsSigned() {
2728
2729 switch 10*ft.Size() + tt.Size() {
2730 case 12:
2731 op = ssa.OpSignExt8to16
2732 case 14:
2733 op = ssa.OpSignExt8to32
2734 case 18:
2735 op = ssa.OpSignExt8to64
2736 case 24:
2737 op = ssa.OpSignExt16to32
2738 case 28:
2739 op = ssa.OpSignExt16to64
2740 case 48:
2741 op = ssa.OpSignExt32to64
2742 default:
2743 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2744 }
2745 } else {
2746
2747 switch 10*ft.Size() + tt.Size() {
2748 case 12:
2749 op = ssa.OpZeroExt8to16
2750 case 14:
2751 op = ssa.OpZeroExt8to32
2752 case 18:
2753 op = ssa.OpZeroExt8to64
2754 case 24:
2755 op = ssa.OpZeroExt16to32
2756 case 28:
2757 op = ssa.OpZeroExt16to64
2758 case 48:
2759 op = ssa.OpZeroExt32to64
2760 default:
2761 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2762 }
2763 }
2764 return s.newValue1(op, tt, v)
2765 }
2766
2767 if ft.IsComplex() && tt.IsComplex() {
2768 var op ssa.Op
2769 if ft.Size() == tt.Size() {
2770 switch ft.Size() {
2771 case 8:
2772 op = ssa.OpRound32F
2773 case 16:
2774 op = ssa.OpRound64F
2775 default:
2776 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2777 }
2778 } else if ft.Size() == 8 && tt.Size() == 16 {
2779 op = ssa.OpCvt32Fto64F
2780 } else if ft.Size() == 16 && tt.Size() == 8 {
2781 op = ssa.OpCvt64Fto32F
2782 } else {
2783 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2784 }
2785 ftp := types.FloatForComplex(ft)
2786 ttp := types.FloatForComplex(tt)
2787 return s.newValue2(ssa.OpComplexMake, tt,
2788 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2789 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2790 }
2791
2792 if tt.IsComplex() {
2793
2794 et := types.FloatForComplex(tt)
2795 v = s.conv(n, v, ft, et)
2796 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2797 }
2798
2799 if ft.IsFloat() || tt.IsFloat() {
2800 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2801 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2802 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2803 conv = conv1
2804 }
2805 }
2806 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2807 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2808 conv = conv1
2809 }
2810 }
2811
2812 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2813 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2814
2815 if tt.Size() == 4 {
2816 return s.uint32Tofloat32(n, v, ft, tt)
2817 }
2818 if tt.Size() == 8 {
2819 return s.uint32Tofloat64(n, v, ft, tt)
2820 }
2821 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2822
2823 if ft.Size() == 4 {
2824 return s.float32ToUint32(n, v, ft, tt)
2825 }
2826 if ft.Size() == 8 {
2827 return s.float64ToUint32(n, v, ft, tt)
2828 }
2829 }
2830 }
2831
2832 if !ok {
2833 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2834 }
2835 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2836
2837 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2838
2839 if op1 == ssa.OpCopy {
2840 if op2 == ssa.OpCopy {
2841 return v
2842 }
2843 return s.newValueOrSfCall1(op2, tt, v)
2844 }
2845 if op2 == ssa.OpCopy {
2846 return s.newValueOrSfCall1(op1, tt, v)
2847 }
2848 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2849 }
2850
2851 if ft.IsInteger() {
2852
2853 if tt.Size() == 4 {
2854 return s.uint64Tofloat32(n, v, ft, tt)
2855 }
2856 if tt.Size() == 8 {
2857 return s.uint64Tofloat64(n, v, ft, tt)
2858 }
2859 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2860 }
2861
2862 if ft.Size() == 4 {
2863 return s.float32ToUint64(n, v, ft, tt)
2864 }
2865 if ft.Size() == 8 {
2866 return s.float64ToUint64(n, v, ft, tt)
2867 }
2868 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2869 return nil
2870 }
2871
2872 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2873 return nil
2874 }
2875
2876
2877 func (s *state) expr(n ir.Node) *ssa.Value {
2878 return s.exprCheckPtr(n, true)
2879 }
2880
2881 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2882 if ir.HasUniquePos(n) {
2883
2884
2885 s.pushLine(n.Pos())
2886 defer s.popLine()
2887 }
2888
2889 s.stmtList(n.Init())
2890 switch n.Op() {
2891 case ir.OBYTES2STRTMP:
2892 n := n.(*ir.ConvExpr)
2893 slice := s.expr(n.X)
2894 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2895 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2896 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2897 case ir.OSTR2BYTESTMP:
2898 n := n.(*ir.ConvExpr)
2899 str := s.expr(n.X)
2900 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2901 if !n.NonNil() {
2902
2903
2904
2905 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2906 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2907 ptr = s.ternary(cond, ptr, zerobase)
2908 }
2909 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2910 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2911 case ir.OCFUNC:
2912 n := n.(*ir.UnaryExpr)
2913 aux := n.X.(*ir.Name).Linksym()
2914
2915
2916 if aux.ABI() != obj.ABIInternal {
2917 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2918 }
2919 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2920 case ir.ONAME:
2921 n := n.(*ir.Name)
2922 if n.Class == ir.PFUNC {
2923
2924 sym := staticdata.FuncLinksym(n)
2925 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2926 }
2927 if s.canSSA(n) {
2928 return s.variable(n, n.Type())
2929 }
2930 return s.load(n.Type(), s.addr(n))
2931 case ir.OLINKSYMOFFSET:
2932 n := n.(*ir.LinksymOffsetExpr)
2933 return s.load(n.Type(), s.addr(n))
2934 case ir.ONIL:
2935 n := n.(*ir.NilExpr)
2936 t := n.Type()
2937 switch {
2938 case t.IsSlice():
2939 return s.constSlice(t)
2940 case t.IsInterface():
2941 return s.constInterface(t)
2942 default:
2943 return s.constNil(t)
2944 }
2945 case ir.OLITERAL:
2946 switch u := n.Val(); u.Kind() {
2947 case constant.Int:
2948 i := ir.IntVal(n.Type(), u)
2949 switch n.Type().Size() {
2950 case 1:
2951 return s.constInt8(n.Type(), int8(i))
2952 case 2:
2953 return s.constInt16(n.Type(), int16(i))
2954 case 4:
2955 return s.constInt32(n.Type(), int32(i))
2956 case 8:
2957 return s.constInt64(n.Type(), i)
2958 default:
2959 s.Fatalf("bad integer size %d", n.Type().Size())
2960 return nil
2961 }
2962 case constant.String:
2963 i := constant.StringVal(u)
2964 if i == "" {
2965 return s.constEmptyString(n.Type())
2966 }
2967 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2968 case constant.Bool:
2969 return s.constBool(constant.BoolVal(u))
2970 case constant.Float:
2971 f, _ := constant.Float64Val(u)
2972 switch n.Type().Size() {
2973 case 4:
2974 return s.constFloat32(n.Type(), f)
2975 case 8:
2976 return s.constFloat64(n.Type(), f)
2977 default:
2978 s.Fatalf("bad float size %d", n.Type().Size())
2979 return nil
2980 }
2981 case constant.Complex:
2982 re, _ := constant.Float64Val(constant.Real(u))
2983 im, _ := constant.Float64Val(constant.Imag(u))
2984 switch n.Type().Size() {
2985 case 8:
2986 pt := types.Types[types.TFLOAT32]
2987 return s.newValue2(ssa.OpComplexMake, n.Type(),
2988 s.constFloat32(pt, re),
2989 s.constFloat32(pt, im))
2990 case 16:
2991 pt := types.Types[types.TFLOAT64]
2992 return s.newValue2(ssa.OpComplexMake, n.Type(),
2993 s.constFloat64(pt, re),
2994 s.constFloat64(pt, im))
2995 default:
2996 s.Fatalf("bad complex size %d", n.Type().Size())
2997 return nil
2998 }
2999 default:
3000 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3001 return nil
3002 }
3003 case ir.OCONVNOP:
3004 n := n.(*ir.ConvExpr)
3005 to := n.Type()
3006 from := n.X.Type()
3007
3008
3009
3010 x := s.expr(n.X)
3011 if to == from {
3012 return x
3013 }
3014
3015
3016
3017
3018
3019 if to.IsPtrShaped() != from.IsPtrShaped() {
3020 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3021 }
3022
3023 v := s.newValue1(ssa.OpCopy, to, x)
3024
3025
3026 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3027 return v
3028 }
3029
3030
3031 if from.Kind() == to.Kind() {
3032 return v
3033 }
3034
3035
3036 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3037 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3038 s.checkPtrAlignment(n, v, nil)
3039 }
3040 return v
3041 }
3042
3043
3044 mt := types.NewPtr(reflectdata.MapType())
3045 if to.Kind() == types.TMAP && from == mt {
3046 return v
3047 }
3048
3049 types.CalcSize(from)
3050 types.CalcSize(to)
3051 if from.Size() != to.Size() {
3052 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3053 return nil
3054 }
3055 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3056 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3057 return nil
3058 }
3059
3060 if base.Flag.Cfg.Instrumenting {
3061
3062
3063
3064 return v
3065 }
3066
3067 if etypesign(from.Kind()) == 0 {
3068 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3069 return nil
3070 }
3071
3072
3073 return v
3074
3075 case ir.OCONV:
3076 n := n.(*ir.ConvExpr)
3077 x := s.expr(n.X)
3078 return s.conv(n, x, n.X.Type(), n.Type())
3079
3080 case ir.ODOTTYPE:
3081 n := n.(*ir.TypeAssertExpr)
3082 res, _ := s.dottype(n, false)
3083 return res
3084
3085 case ir.ODYNAMICDOTTYPE:
3086 n := n.(*ir.DynamicTypeAssertExpr)
3087 res, _ := s.dynamicDottype(n, false)
3088 return res
3089
3090
3091 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3092 n := n.(*ir.BinaryExpr)
3093 a := s.expr(n.X)
3094 b := s.expr(n.Y)
3095 if n.X.Type().IsComplex() {
3096 pt := types.FloatForComplex(n.X.Type())
3097 op := s.ssaOp(ir.OEQ, pt)
3098 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3099 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3100 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3101 switch n.Op() {
3102 case ir.OEQ:
3103 return c
3104 case ir.ONE:
3105 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3106 default:
3107 s.Fatalf("ordered complex compare %v", n.Op())
3108 }
3109 }
3110
3111
3112 op := n.Op()
3113 switch op {
3114 case ir.OGE:
3115 op, a, b = ir.OLE, b, a
3116 case ir.OGT:
3117 op, a, b = ir.OLT, b, a
3118 }
3119 if n.X.Type().IsFloat() {
3120
3121 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3122 }
3123
3124 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3125 case ir.OMUL:
3126 n := n.(*ir.BinaryExpr)
3127 a := s.expr(n.X)
3128 b := s.expr(n.Y)
3129 if n.Type().IsComplex() {
3130 mulop := ssa.OpMul64F
3131 addop := ssa.OpAdd64F
3132 subop := ssa.OpSub64F
3133 pt := types.FloatForComplex(n.Type())
3134 wt := types.Types[types.TFLOAT64]
3135
3136 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3137 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3138 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3139 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3140
3141 if pt != wt {
3142 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3143 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3144 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3145 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3146 }
3147
3148 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3149 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3150
3151 if pt != wt {
3152 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3153 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3154 }
3155
3156 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3157 }
3158
3159 if n.Type().IsFloat() {
3160 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3161 }
3162
3163 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3164
3165 case ir.ODIV:
3166 n := n.(*ir.BinaryExpr)
3167 a := s.expr(n.X)
3168 b := s.expr(n.Y)
3169 if n.Type().IsComplex() {
3170
3171
3172
3173 mulop := ssa.OpMul64F
3174 addop := ssa.OpAdd64F
3175 subop := ssa.OpSub64F
3176 divop := ssa.OpDiv64F
3177 pt := types.FloatForComplex(n.Type())
3178 wt := types.Types[types.TFLOAT64]
3179
3180 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3181 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3182 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3183 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3184
3185 if pt != wt {
3186 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3187 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3188 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3189 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3190 }
3191
3192 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3193 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3194 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3195
3196
3197
3198
3199
3200 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3201 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3202
3203 if pt != wt {
3204 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3205 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3206 }
3207 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3208 }
3209 if n.Type().IsFloat() {
3210 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3211 }
3212 return s.intDivide(n, a, b)
3213 case ir.OMOD:
3214 n := n.(*ir.BinaryExpr)
3215 a := s.expr(n.X)
3216 b := s.expr(n.Y)
3217 return s.intDivide(n, a, b)
3218 case ir.OADD, ir.OSUB:
3219 n := n.(*ir.BinaryExpr)
3220 a := s.expr(n.X)
3221 b := s.expr(n.Y)
3222 if n.Type().IsComplex() {
3223 pt := types.FloatForComplex(n.Type())
3224 op := s.ssaOp(n.Op(), pt)
3225 return s.newValue2(ssa.OpComplexMake, n.Type(),
3226 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3227 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3228 }
3229 if n.Type().IsFloat() {
3230 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3231 }
3232 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3233 case ir.OAND, ir.OOR, ir.OXOR:
3234 n := n.(*ir.BinaryExpr)
3235 a := s.expr(n.X)
3236 b := s.expr(n.Y)
3237 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3238 case ir.OANDNOT:
3239 n := n.(*ir.BinaryExpr)
3240 a := s.expr(n.X)
3241 b := s.expr(n.Y)
3242 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3243 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3244 case ir.OLSH, ir.ORSH:
3245 n := n.(*ir.BinaryExpr)
3246 a := s.expr(n.X)
3247 b := s.expr(n.Y)
3248 bt := b.Type
3249 if bt.IsSigned() {
3250 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3251 s.check(cmp, ir.Syms.Panicshift)
3252 bt = bt.ToUnsigned()
3253 }
3254 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3255 case ir.OANDAND, ir.OOROR:
3256
3257
3258
3259
3260
3261
3262
3263
3264
3265
3266
3267
3268
3269 n := n.(*ir.LogicalExpr)
3270 el := s.expr(n.X)
3271 s.vars[n] = el
3272
3273 b := s.endBlock()
3274 b.Kind = ssa.BlockIf
3275 b.SetControl(el)
3276
3277
3278
3279
3280
3281 bRight := s.f.NewBlock(ssa.BlockPlain)
3282 bResult := s.f.NewBlock(ssa.BlockPlain)
3283 if n.Op() == ir.OANDAND {
3284 b.AddEdgeTo(bRight)
3285 b.AddEdgeTo(bResult)
3286 } else if n.Op() == ir.OOROR {
3287 b.AddEdgeTo(bResult)
3288 b.AddEdgeTo(bRight)
3289 }
3290
3291 s.startBlock(bRight)
3292 er := s.expr(n.Y)
3293 s.vars[n] = er
3294
3295 b = s.endBlock()
3296 b.AddEdgeTo(bResult)
3297
3298 s.startBlock(bResult)
3299 return s.variable(n, types.Types[types.TBOOL])
3300 case ir.OCOMPLEX:
3301 n := n.(*ir.BinaryExpr)
3302 r := s.expr(n.X)
3303 i := s.expr(n.Y)
3304 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3305
3306
3307 case ir.ONEG:
3308 n := n.(*ir.UnaryExpr)
3309 a := s.expr(n.X)
3310 if n.Type().IsComplex() {
3311 tp := types.FloatForComplex(n.Type())
3312 negop := s.ssaOp(n.Op(), tp)
3313 return s.newValue2(ssa.OpComplexMake, n.Type(),
3314 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3315 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3316 }
3317 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3318 case ir.ONOT, ir.OBITNOT:
3319 n := n.(*ir.UnaryExpr)
3320 a := s.expr(n.X)
3321 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3322 case ir.OIMAG, ir.OREAL:
3323 n := n.(*ir.UnaryExpr)
3324 a := s.expr(n.X)
3325 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3326 case ir.OPLUS:
3327 n := n.(*ir.UnaryExpr)
3328 return s.expr(n.X)
3329
3330 case ir.OADDR:
3331 n := n.(*ir.AddrExpr)
3332 return s.addr(n.X)
3333
3334 case ir.ORESULT:
3335 n := n.(*ir.ResultExpr)
3336 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3337 panic("Expected to see a previous call")
3338 }
3339 which := n.Index
3340 if which == -1 {
3341 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3342 }
3343 return s.resultOfCall(s.prevCall, which, n.Type())
3344
3345 case ir.ODEREF:
3346 n := n.(*ir.StarExpr)
3347 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3348 return s.load(n.Type(), p)
3349
3350 case ir.ODOT:
3351 n := n.(*ir.SelectorExpr)
3352 if n.X.Op() == ir.OSTRUCTLIT {
3353
3354
3355
3356 if !ir.IsZero(n.X) {
3357 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3358 }
3359 return s.zeroVal(n.Type())
3360 }
3361
3362
3363
3364
3365 if ir.IsAddressable(n) && !s.canSSA(n) {
3366 p := s.addr(n)
3367 return s.load(n.Type(), p)
3368 }
3369 v := s.expr(n.X)
3370 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3371
3372 case ir.ODOTPTR:
3373 n := n.(*ir.SelectorExpr)
3374 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3375 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3376 return s.load(n.Type(), p)
3377
3378 case ir.OINDEX:
3379 n := n.(*ir.IndexExpr)
3380 switch {
3381 case n.X.Type().IsString():
3382 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3383
3384
3385
3386 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3387 }
3388 a := s.expr(n.X)
3389 i := s.expr(n.Index)
3390 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3391 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3392 ptrtyp := s.f.Config.Types.BytePtr
3393 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3394 if ir.IsConst(n.Index, constant.Int) {
3395 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3396 } else {
3397 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3398 }
3399 return s.load(types.Types[types.TUINT8], ptr)
3400 case n.X.Type().IsSlice():
3401 p := s.addr(n)
3402 return s.load(n.X.Type().Elem(), p)
3403 case n.X.Type().IsArray():
3404 if ssa.CanSSA(n.X.Type()) {
3405
3406 bound := n.X.Type().NumElem()
3407 a := s.expr(n.X)
3408 i := s.expr(n.Index)
3409 if bound == 0 {
3410
3411
3412 z := s.constInt(types.Types[types.TINT], 0)
3413 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3414
3415
3416 return s.zeroVal(n.Type())
3417 }
3418 len := s.constInt(types.Types[types.TINT], bound)
3419 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3420 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3421 }
3422 p := s.addr(n)
3423 return s.load(n.X.Type().Elem(), p)
3424 default:
3425 s.Fatalf("bad type for index %v", n.X.Type())
3426 return nil
3427 }
3428
3429 case ir.OLEN, ir.OCAP:
3430 n := n.(*ir.UnaryExpr)
3431
3432
3433 a := s.expr(n.X)
3434 t := n.X.Type()
3435 switch {
3436 case t.IsSlice():
3437 op := ssa.OpSliceLen
3438 if n.Op() == ir.OCAP {
3439 op = ssa.OpSliceCap
3440 }
3441 return s.newValue1(op, types.Types[types.TINT], a)
3442 case t.IsString():
3443 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3444 case t.IsMap(), t.IsChan():
3445 return s.referenceTypeBuiltin(n, a)
3446 case t.IsArray():
3447 return s.constInt(types.Types[types.TINT], t.NumElem())
3448 case t.IsPtr() && t.Elem().IsArray():
3449 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3450 default:
3451 s.Fatalf("bad type in len/cap: %v", t)
3452 return nil
3453 }
3454
3455 case ir.OSPTR:
3456 n := n.(*ir.UnaryExpr)
3457 a := s.expr(n.X)
3458 if n.X.Type().IsSlice() {
3459 if n.Bounded() {
3460 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3461 }
3462 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3463 } else {
3464 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3465 }
3466
3467 case ir.OITAB:
3468 n := n.(*ir.UnaryExpr)
3469 a := s.expr(n.X)
3470 return s.newValue1(ssa.OpITab, n.Type(), a)
3471
3472 case ir.OIDATA:
3473 n := n.(*ir.UnaryExpr)
3474 a := s.expr(n.X)
3475 return s.newValue1(ssa.OpIData, n.Type(), a)
3476
3477 case ir.OMAKEFACE:
3478 n := n.(*ir.BinaryExpr)
3479 tab := s.expr(n.X)
3480 data := s.expr(n.Y)
3481 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3482
3483 case ir.OSLICEHEADER:
3484 n := n.(*ir.SliceHeaderExpr)
3485 p := s.expr(n.Ptr)
3486 l := s.expr(n.Len)
3487 c := s.expr(n.Cap)
3488 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3489
3490 case ir.OSTRINGHEADER:
3491 n := n.(*ir.StringHeaderExpr)
3492 p := s.expr(n.Ptr)
3493 l := s.expr(n.Len)
3494 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3495
3496 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3497 n := n.(*ir.SliceExpr)
3498 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3499 v := s.exprCheckPtr(n.X, !check)
3500 var i, j, k *ssa.Value
3501 if n.Low != nil {
3502 i = s.expr(n.Low)
3503 }
3504 if n.High != nil {
3505 j = s.expr(n.High)
3506 }
3507 if n.Max != nil {
3508 k = s.expr(n.Max)
3509 }
3510 p, l, c := s.slice(v, i, j, k, n.Bounded())
3511 if check {
3512
3513 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3514 }
3515 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3516
3517 case ir.OSLICESTR:
3518 n := n.(*ir.SliceExpr)
3519 v := s.expr(n.X)
3520 var i, j *ssa.Value
3521 if n.Low != nil {
3522 i = s.expr(n.Low)
3523 }
3524 if n.High != nil {
3525 j = s.expr(n.High)
3526 }
3527 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3528 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3529
3530 case ir.OSLICE2ARRPTR:
3531
3532
3533
3534
3535 n := n.(*ir.ConvExpr)
3536 v := s.expr(n.X)
3537 nelem := n.Type().Elem().NumElem()
3538 arrlen := s.constInt(types.Types[types.TINT], nelem)
3539 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3540 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3541 op := ssa.OpSlicePtr
3542 if nelem == 0 {
3543 op = ssa.OpSlicePtrUnchecked
3544 }
3545 return s.newValue1(op, n.Type(), v)
3546
3547 case ir.OCALLFUNC:
3548 n := n.(*ir.CallExpr)
3549 if ir.IsIntrinsicCall(n) {
3550 return s.intrinsicCall(n)
3551 }
3552 fallthrough
3553
3554 case ir.OCALLINTER:
3555 n := n.(*ir.CallExpr)
3556 return s.callResult(n, callNormal)
3557
3558 case ir.OGETG:
3559 n := n.(*ir.CallExpr)
3560 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3561
3562 case ir.OGETCALLERSP:
3563 n := n.(*ir.CallExpr)
3564 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3565
3566 case ir.OAPPEND:
3567 return s.append(n.(*ir.CallExpr), false)
3568
3569 case ir.OMIN, ir.OMAX:
3570 return s.minMax(n.(*ir.CallExpr))
3571
3572 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3573
3574
3575
3576 n := n.(*ir.CompLitExpr)
3577 if !ir.IsZero(n) {
3578 s.Fatalf("literal with nonzero value in SSA: %v", n)
3579 }
3580 return s.zeroVal(n.Type())
3581
3582 case ir.ONEW:
3583 n := n.(*ir.UnaryExpr)
3584 var rtype *ssa.Value
3585 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3586 rtype = s.expr(x.RType)
3587 }
3588 return s.newObject(n.Type().Elem(), rtype)
3589
3590 case ir.OUNSAFEADD:
3591 n := n.(*ir.BinaryExpr)
3592 ptr := s.expr(n.X)
3593 len := s.expr(n.Y)
3594
3595
3596
3597 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3598
3599 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3600
3601 default:
3602 s.Fatalf("unhandled expr %v", n.Op())
3603 return nil
3604 }
3605 }
3606
3607 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3608 aux := c.Aux.(*ssa.AuxCall)
3609 pa := aux.ParamAssignmentForResult(which)
3610
3611
3612 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3613 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3614 return s.rawLoad(t, addr)
3615 }
3616 return s.newValue1I(ssa.OpSelectN, t, which, c)
3617 }
3618
3619 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3620 aux := c.Aux.(*ssa.AuxCall)
3621 pa := aux.ParamAssignmentForResult(which)
3622 if len(pa.Registers) == 0 {
3623 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3624 }
3625 _, addr := s.temp(c.Pos, t)
3626 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3627 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3628 return addr
3629 }
3630
3631
3632
3633
3634
3635
3636
3637
3638
3639 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3640
3641
3642
3643
3644
3645
3646
3647
3648
3649
3650
3651
3652
3653
3654
3655
3656
3657
3658
3659
3660
3661
3662
3663
3664
3665
3666
3667
3668
3669
3670
3671
3672 et := n.Type().Elem()
3673 pt := types.NewPtr(et)
3674
3675
3676 sn := n.Args[0]
3677 var slice, addr *ssa.Value
3678 if inplace {
3679 addr = s.addr(sn)
3680 slice = s.load(n.Type(), addr)
3681 } else {
3682 slice = s.expr(sn)
3683 }
3684
3685
3686 grow := s.f.NewBlock(ssa.BlockPlain)
3687 assign := s.f.NewBlock(ssa.BlockPlain)
3688
3689
3690 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3691 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3692 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3693
3694
3695 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3696 oldLen := l
3697 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3698
3699
3700 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3701
3702
3703 s.vars[ptrVar] = p
3704 s.vars[lenVar] = l
3705 if !inplace {
3706 s.vars[capVar] = c
3707 }
3708
3709 b := s.endBlock()
3710 b.Kind = ssa.BlockIf
3711 b.Likely = ssa.BranchUnlikely
3712 b.SetControl(cmp)
3713 b.AddEdgeTo(grow)
3714 b.AddEdgeTo(assign)
3715
3716
3717
3718
3719
3720
3721
3722 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3723 if !inplace && n.Esc() == ir.EscNone && et.Size() > 0 && et.Size() <= maxStackSize && base.Flag.N == 0 && base.VariableMakeHash.MatchPos(n.Pos(), nil) && !s.appendTargets[sn] {
3724
3725
3726
3727
3728
3729
3730
3731
3732
3733
3734
3735
3736
3737
3738
3739
3740
3741
3742
3743
3744
3745
3746
3747 if s.appendTargets == nil {
3748 s.appendTargets = map[ir.Node]bool{}
3749 }
3750 s.appendTargets[sn] = true
3751
3752 K := maxStackSize / et.Size()
3753 KT := types.NewArray(et, K)
3754 KT.SetNoalg(true)
3755 types.CalcArraySize(KT)
3756
3757 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3758 types.CalcArraySize(align)
3759 storeTyp := types.NewStruct([]*types.Field{
3760 {Sym: types.BlankSym, Type: align},
3761 {Sym: types.BlankSym, Type: KT},
3762 })
3763 storeTyp.SetNoalg(true)
3764 types.CalcStructSize(storeTyp)
3765
3766 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3767 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3768 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3769 growSlice := s.f.NewBlock(ssa.BlockPlain)
3770
3771
3772 tBool := types.Types[types.TBOOL]
3773 used := typecheck.TempAt(n.Pos(), s.curfn, tBool)
3774 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3775
3776
3777 tInt := types.Types[types.TINT]
3778 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3779 backingStore.SetAddrtaken(true)
3780
3781
3782 s.startBlock(grow)
3783 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, K))
3784 b := s.endBlock()
3785 b.Kind = ssa.BlockIf
3786 b.SetControl(kTest)
3787 b.AddEdgeTo(usedTestBlock)
3788 b.AddEdgeTo(growSlice)
3789 b.Likely = ssa.BranchLikely
3790
3791
3792 s.startBlock(usedTestBlock)
3793 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(used))
3794 b = s.endBlock()
3795 b.Kind = ssa.BlockIf
3796 b.SetControl(usedTest)
3797 b.AddEdgeTo(oldLenTestBlock)
3798 b.AddEdgeTo(growSlice)
3799 b.Likely = ssa.BranchLikely
3800
3801
3802 s.startBlock(oldLenTestBlock)
3803 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
3804 b = s.endBlock()
3805 b.Kind = ssa.BlockIf
3806 b.SetControl(oldLenTest)
3807 b.AddEdgeTo(bodyBlock)
3808 b.AddEdgeTo(growSlice)
3809 b.Likely = ssa.BranchLikely
3810
3811
3812 s.startBlock(bodyBlock)
3813 if et.HasPointers() {
3814 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, backingStore, s.mem())
3815 }
3816 addr := s.addr(backingStore)
3817 s.zero(storeTyp, addr)
3818
3819
3820 s.vars[ptrVar] = addr
3821 s.vars[lenVar] = l
3822 s.vars[capVar] = s.constInt(tInt, K)
3823
3824
3825 s.assign(used, s.constBool(true), false, 0)
3826 b = s.endBlock()
3827 b.AddEdgeTo(assign)
3828
3829
3830 grow = growSlice
3831 }
3832
3833
3834 s.startBlock(grow)
3835 taddr := s.expr(n.Fun)
3836 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3837
3838
3839 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3840 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3841 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3842
3843 s.vars[ptrVar] = p
3844 s.vars[lenVar] = l
3845 s.vars[capVar] = c
3846 if inplace {
3847 if sn.Op() == ir.ONAME {
3848 sn := sn.(*ir.Name)
3849 if sn.Class != ir.PEXTERN {
3850
3851 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3852 }
3853 }
3854 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3855 s.store(types.Types[types.TINT], capaddr, c)
3856 s.store(pt, addr, p)
3857 }
3858
3859 b = s.endBlock()
3860 b.AddEdgeTo(assign)
3861
3862
3863 s.startBlock(assign)
3864 p = s.variable(ptrVar, pt)
3865 l = s.variable(lenVar, types.Types[types.TINT])
3866 if !inplace {
3867 c = s.variable(capVar, types.Types[types.TINT])
3868 }
3869
3870 if inplace {
3871
3872
3873 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3874 s.store(types.Types[types.TINT], lenaddr, l)
3875 }
3876
3877
3878 type argRec struct {
3879
3880
3881 v *ssa.Value
3882 store bool
3883 }
3884 args := make([]argRec, 0, len(n.Args[1:]))
3885 for _, n := range n.Args[1:] {
3886 if ssa.CanSSA(n.Type()) {
3887 args = append(args, argRec{v: s.expr(n), store: true})
3888 } else {
3889 v := s.addr(n)
3890 args = append(args, argRec{v: v})
3891 }
3892 }
3893
3894
3895 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3896 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3897 for i, arg := range args {
3898 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3899 if arg.store {
3900 s.storeType(et, addr, arg.v, 0, true)
3901 } else {
3902 s.move(et, addr, arg.v)
3903 }
3904 }
3905
3906
3907
3908
3909
3910 delete(s.vars, ptrVar)
3911 delete(s.vars, lenVar)
3912 if !inplace {
3913 delete(s.vars, capVar)
3914 }
3915
3916
3917 if inplace {
3918 return nil
3919 }
3920 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3921 }
3922
3923
3924 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3925
3926
3927
3928 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3929 x := s.expr(n.Args[0])
3930 for _, arg := range n.Args[1:] {
3931 x = op(x, s.expr(arg))
3932 }
3933 return x
3934 }
3935
3936 typ := n.Type()
3937
3938 if typ.IsFloat() || typ.IsString() {
3939
3940
3941
3942
3943
3944
3945
3946
3947 if typ.IsFloat() {
3948 hasIntrinsic := false
3949 switch Arch.LinkArch.Family {
3950 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64, sys.S390X:
3951 hasIntrinsic = true
3952 case sys.PPC64:
3953 hasIntrinsic = buildcfg.GOPPC64 >= 9
3954 }
3955
3956 if hasIntrinsic {
3957 var op ssa.Op
3958 switch {
3959 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3960 op = ssa.OpMin64F
3961 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3962 op = ssa.OpMax64F
3963 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3964 op = ssa.OpMin32F
3965 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3966 op = ssa.OpMax32F
3967 }
3968 return fold(func(x, a *ssa.Value) *ssa.Value {
3969 return s.newValue2(op, typ, x, a)
3970 })
3971 }
3972 }
3973 var name string
3974 switch typ.Kind() {
3975 case types.TFLOAT32:
3976 switch n.Op() {
3977 case ir.OMIN:
3978 name = "fmin32"
3979 case ir.OMAX:
3980 name = "fmax32"
3981 }
3982 case types.TFLOAT64:
3983 switch n.Op() {
3984 case ir.OMIN:
3985 name = "fmin64"
3986 case ir.OMAX:
3987 name = "fmax64"
3988 }
3989 case types.TSTRING:
3990 switch n.Op() {
3991 case ir.OMIN:
3992 name = "strmin"
3993 case ir.OMAX:
3994 name = "strmax"
3995 }
3996 }
3997 fn := typecheck.LookupRuntimeFunc(name)
3998
3999 return fold(func(x, a *ssa.Value) *ssa.Value {
4000 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4001 })
4002 }
4003
4004 if typ.IsInteger() {
4005 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4006 var op ssa.Op
4007 switch {
4008 case typ.IsSigned() && n.Op() == ir.OMIN:
4009 op = ssa.OpMin64
4010 case typ.IsSigned() && n.Op() == ir.OMAX:
4011 op = ssa.OpMax64
4012 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4013 op = ssa.OpMin64u
4014 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4015 op = ssa.OpMax64u
4016 }
4017 return fold(func(x, a *ssa.Value) *ssa.Value {
4018 return s.newValue2(op, typ, x, a)
4019 })
4020 }
4021 }
4022
4023 lt := s.ssaOp(ir.OLT, typ)
4024
4025 return fold(func(x, a *ssa.Value) *ssa.Value {
4026 switch n.Op() {
4027 case ir.OMIN:
4028
4029 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4030 case ir.OMAX:
4031
4032 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4033 }
4034 panic("unreachable")
4035 })
4036 }
4037
4038
4039 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4040
4041
4042 ternaryVar := ssaMarker("ternary")
4043
4044 bThen := s.f.NewBlock(ssa.BlockPlain)
4045 bElse := s.f.NewBlock(ssa.BlockPlain)
4046 bEnd := s.f.NewBlock(ssa.BlockPlain)
4047
4048 b := s.endBlock()
4049 b.Kind = ssa.BlockIf
4050 b.SetControl(cond)
4051 b.AddEdgeTo(bThen)
4052 b.AddEdgeTo(bElse)
4053
4054 s.startBlock(bThen)
4055 s.vars[ternaryVar] = x
4056 s.endBlock().AddEdgeTo(bEnd)
4057
4058 s.startBlock(bElse)
4059 s.vars[ternaryVar] = y
4060 s.endBlock().AddEdgeTo(bEnd)
4061
4062 s.startBlock(bEnd)
4063 r := s.variable(ternaryVar, x.Type)
4064 delete(s.vars, ternaryVar)
4065 return r
4066 }
4067
4068
4069
4070
4071
4072 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4073 switch cond.Op() {
4074 case ir.OANDAND:
4075 cond := cond.(*ir.LogicalExpr)
4076 mid := s.f.NewBlock(ssa.BlockPlain)
4077 s.stmtList(cond.Init())
4078 s.condBranch(cond.X, mid, no, max(likely, 0))
4079 s.startBlock(mid)
4080 s.condBranch(cond.Y, yes, no, likely)
4081 return
4082
4083
4084
4085
4086
4087
4088 case ir.OOROR:
4089 cond := cond.(*ir.LogicalExpr)
4090 mid := s.f.NewBlock(ssa.BlockPlain)
4091 s.stmtList(cond.Init())
4092 s.condBranch(cond.X, yes, mid, min(likely, 0))
4093 s.startBlock(mid)
4094 s.condBranch(cond.Y, yes, no, likely)
4095 return
4096
4097
4098
4099 case ir.ONOT:
4100 cond := cond.(*ir.UnaryExpr)
4101 s.stmtList(cond.Init())
4102 s.condBranch(cond.X, no, yes, -likely)
4103 return
4104 case ir.OCONVNOP:
4105 cond := cond.(*ir.ConvExpr)
4106 s.stmtList(cond.Init())
4107 s.condBranch(cond.X, yes, no, likely)
4108 return
4109 }
4110 c := s.expr(cond)
4111 b := s.endBlock()
4112 b.Kind = ssa.BlockIf
4113 b.SetControl(c)
4114 b.Likely = ssa.BranchPrediction(likely)
4115 b.AddEdgeTo(yes)
4116 b.AddEdgeTo(no)
4117 }
4118
4119 type skipMask uint8
4120
4121 const (
4122 skipPtr skipMask = 1 << iota
4123 skipLen
4124 skipCap
4125 )
4126
4127
4128
4129
4130
4131
4132
4133 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4134 s.assignWhichMayOverlap(left, right, deref, skip, false)
4135 }
4136 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4137 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4138 return
4139 }
4140 t := left.Type()
4141 types.CalcSize(t)
4142 if s.canSSA(left) {
4143 if deref {
4144 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4145 }
4146 if left.Op() == ir.ODOT {
4147
4148
4149
4150
4151
4152
4153
4154
4155
4156
4157 left := left.(*ir.SelectorExpr)
4158 t := left.X.Type()
4159 nf := t.NumFields()
4160 idx := fieldIdx(left)
4161
4162
4163 old := s.expr(left.X)
4164
4165
4166 new := s.newValue0(ssa.OpStructMake, t)
4167
4168
4169 for i := 0; i < nf; i++ {
4170 if i == idx {
4171 new.AddArg(right)
4172 } else {
4173 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4174 }
4175 }
4176
4177
4178 s.assign(left.X, new, false, 0)
4179
4180 return
4181 }
4182 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4183 left := left.(*ir.IndexExpr)
4184 s.pushLine(left.Pos())
4185 defer s.popLine()
4186
4187
4188 t := left.X.Type()
4189 n := t.NumElem()
4190
4191 i := s.expr(left.Index)
4192 if n == 0 {
4193
4194
4195 z := s.constInt(types.Types[types.TINT], 0)
4196 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4197 return
4198 }
4199 if n != 1 {
4200 s.Fatalf("assigning to non-1-length array")
4201 }
4202
4203 len := s.constInt(types.Types[types.TINT], 1)
4204 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4205 v := s.newValue1(ssa.OpArrayMake1, t, right)
4206 s.assign(left.X, v, false, 0)
4207 return
4208 }
4209 left := left.(*ir.Name)
4210
4211 s.vars[left] = right
4212 s.addNamedValue(left, right)
4213 return
4214 }
4215
4216
4217
4218 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4219 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4220 }
4221
4222
4223 addr := s.addr(left)
4224 if ir.IsReflectHeaderDataField(left) {
4225
4226
4227
4228
4229
4230 t = types.Types[types.TUNSAFEPTR]
4231 }
4232 if deref {
4233
4234 if right == nil {
4235 s.zero(t, addr)
4236 } else {
4237 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4238 }
4239 return
4240 }
4241
4242 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4243 }
4244
4245
4246 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4247 switch {
4248 case t.IsInteger():
4249 switch t.Size() {
4250 case 1:
4251 return s.constInt8(t, 0)
4252 case 2:
4253 return s.constInt16(t, 0)
4254 case 4:
4255 return s.constInt32(t, 0)
4256 case 8:
4257 return s.constInt64(t, 0)
4258 default:
4259 s.Fatalf("bad sized integer type %v", t)
4260 }
4261 case t.IsFloat():
4262 switch t.Size() {
4263 case 4:
4264 return s.constFloat32(t, 0)
4265 case 8:
4266 return s.constFloat64(t, 0)
4267 default:
4268 s.Fatalf("bad sized float type %v", t)
4269 }
4270 case t.IsComplex():
4271 switch t.Size() {
4272 case 8:
4273 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4274 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4275 case 16:
4276 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4277 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4278 default:
4279 s.Fatalf("bad sized complex type %v", t)
4280 }
4281
4282 case t.IsString():
4283 return s.constEmptyString(t)
4284 case t.IsPtrShaped():
4285 return s.constNil(t)
4286 case t.IsBoolean():
4287 return s.constBool(false)
4288 case t.IsInterface():
4289 return s.constInterface(t)
4290 case t.IsSlice():
4291 return s.constSlice(t)
4292 case t.IsStruct():
4293 n := t.NumFields()
4294 v := s.entryNewValue0(ssa.OpStructMake, t)
4295 for i := 0; i < n; i++ {
4296 v.AddArg(s.zeroVal(t.FieldType(i)))
4297 }
4298 return v
4299 case t.IsArray():
4300 switch t.NumElem() {
4301 case 0:
4302 return s.entryNewValue0(ssa.OpArrayMake0, t)
4303 case 1:
4304 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4305 }
4306 }
4307 s.Fatalf("zero for type %v not implemented", t)
4308 return nil
4309 }
4310
4311 type callKind int8
4312
4313 const (
4314 callNormal callKind = iota
4315 callDefer
4316 callDeferStack
4317 callGo
4318 callTail
4319 )
4320
4321 type sfRtCallDef struct {
4322 rtfn *obj.LSym
4323 rtype types.Kind
4324 }
4325
4326 var softFloatOps map[ssa.Op]sfRtCallDef
4327
4328 func softfloatInit() {
4329
4330 softFloatOps = map[ssa.Op]sfRtCallDef{
4331 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4332 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4333 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4334 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4335 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4336 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4337 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4338 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4339
4340 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4341 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4342 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4343 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4344 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4345 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4346 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4347 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4348
4349 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4350 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4351 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4352 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4353 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4354 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4355 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4356 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4357 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4358 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4359 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4360 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4361 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4362 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4363 }
4364 }
4365
4366
4367
4368 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4369 f2i := func(t *types.Type) *types.Type {
4370 switch t.Kind() {
4371 case types.TFLOAT32:
4372 return types.Types[types.TUINT32]
4373 case types.TFLOAT64:
4374 return types.Types[types.TUINT64]
4375 }
4376 return t
4377 }
4378
4379 if callDef, ok := softFloatOps[op]; ok {
4380 switch op {
4381 case ssa.OpLess32F,
4382 ssa.OpLess64F,
4383 ssa.OpLeq32F,
4384 ssa.OpLeq64F:
4385 args[0], args[1] = args[1], args[0]
4386 case ssa.OpSub32F,
4387 ssa.OpSub64F:
4388 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4389 }
4390
4391
4392
4393 for i, a := range args {
4394 if a.Type.IsFloat() {
4395 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4396 }
4397 }
4398
4399 rt := types.Types[callDef.rtype]
4400 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4401 if rt.IsFloat() {
4402 result = s.newValue1(ssa.OpCopy, rt, result)
4403 }
4404 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4405 result = s.newValue1(ssa.OpNot, result.Type, result)
4406 }
4407 return result, true
4408 }
4409 return nil, false
4410 }
4411
4412
4413 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4414 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4415 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4416 return p0, p1
4417 }
4418
4419
4420 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4421 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4422 if ssa.IntrinsicsDebug > 0 {
4423 x := v
4424 if x == nil {
4425 x = s.mem()
4426 }
4427 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4428 x = x.Args[0]
4429 }
4430 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4431 }
4432 return v
4433 }
4434
4435
4436 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4437 args := make([]*ssa.Value, len(n.Args))
4438 for i, n := range n.Args {
4439 args[i] = s.expr(n)
4440 }
4441 return args
4442 }
4443
4444
4445
4446
4447
4448
4449
4450 func (s *state) openDeferRecord(n *ir.CallExpr) {
4451 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4452 s.Fatalf("defer call with arguments or results: %v", n)
4453 }
4454
4455 opendefer := &openDeferInfo{
4456 n: n,
4457 }
4458 fn := n.Fun
4459
4460
4461
4462 closureVal := s.expr(fn)
4463 closure := s.openDeferSave(fn.Type(), closureVal)
4464 opendefer.closureNode = closure.Aux.(*ir.Name)
4465 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4466 opendefer.closure = closure
4467 }
4468 index := len(s.openDefers)
4469 s.openDefers = append(s.openDefers, opendefer)
4470
4471
4472
4473 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4474 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4475 s.vars[deferBitsVar] = newDeferBits
4476 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4477 }
4478
4479
4480
4481
4482
4483
4484 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4485 if !ssa.CanSSA(t) {
4486 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4487 }
4488 if !t.HasPointers() {
4489 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4490 }
4491 pos := val.Pos
4492 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4493 temp.SetOpenDeferSlot(true)
4494 temp.SetFrameOffset(int64(len(s.openDefers)))
4495 var addrTemp *ssa.Value
4496
4497
4498 if s.curBlock.ID != s.f.Entry.ID {
4499
4500
4501
4502 if t.HasPointers() {
4503 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4504 }
4505 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4506 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4507 } else {
4508
4509
4510
4511 if t.HasPointers() {
4512 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4513 }
4514 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4515 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4516 }
4517
4518
4519
4520
4521
4522 temp.SetNeedzero(true)
4523
4524
4525 s.store(t, addrTemp, val)
4526 return addrTemp
4527 }
4528
4529
4530
4531
4532
4533 func (s *state) openDeferExit() {
4534 deferExit := s.f.NewBlock(ssa.BlockPlain)
4535 s.endBlock().AddEdgeTo(deferExit)
4536 s.startBlock(deferExit)
4537 s.lastDeferExit = deferExit
4538 s.lastDeferCount = len(s.openDefers)
4539 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4540
4541 for i := len(s.openDefers) - 1; i >= 0; i-- {
4542 r := s.openDefers[i]
4543 bCond := s.f.NewBlock(ssa.BlockPlain)
4544 bEnd := s.f.NewBlock(ssa.BlockPlain)
4545
4546 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4547
4548
4549 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4550 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4551 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4552 b := s.endBlock()
4553 b.Kind = ssa.BlockIf
4554 b.SetControl(eqVal)
4555 b.AddEdgeTo(bEnd)
4556 b.AddEdgeTo(bCond)
4557 bCond.AddEdgeTo(bEnd)
4558 s.startBlock(bCond)
4559
4560
4561
4562 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4563 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4564 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4565
4566
4567 s.vars[deferBitsVar] = maskedval
4568
4569
4570
4571
4572 fn := r.n.Fun
4573 stksize := fn.Type().ArgWidth()
4574 var callArgs []*ssa.Value
4575 var call *ssa.Value
4576 if r.closure != nil {
4577 v := s.load(r.closure.Type.Elem(), r.closure)
4578 s.maybeNilCheckClosure(v, callDefer)
4579 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4580 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4581 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4582 } else {
4583 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4584 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4585 }
4586 callArgs = append(callArgs, s.mem())
4587 call.AddArgs(callArgs...)
4588 call.AuxInt = stksize
4589 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4590
4591
4592
4593
4594 if r.closureNode != nil {
4595 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4596 }
4597
4598 s.endBlock()
4599 s.startBlock(bEnd)
4600 }
4601 }
4602
4603 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4604 return s.call(n, k, false, nil)
4605 }
4606
4607 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4608 return s.call(n, k, true, nil)
4609 }
4610
4611
4612
4613 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4614 s.prevCall = nil
4615 var calleeLSym *obj.LSym
4616 var closure *ssa.Value
4617 var codeptr *ssa.Value
4618 var dextra *ssa.Value
4619 var rcvr *ssa.Value
4620 fn := n.Fun
4621 var ACArgs []*types.Type
4622 var ACResults []*types.Type
4623 var callArgs []*ssa.Value
4624
4625 callABI := s.f.ABIDefault
4626
4627 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4628 s.Fatalf("go/defer call with arguments: %v", n)
4629 }
4630
4631 isCallDeferRangeFunc := false
4632
4633 switch n.Op() {
4634 case ir.OCALLFUNC:
4635 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4636 fn := fn.(*ir.Name)
4637 calleeLSym = callTargetLSym(fn)
4638 if buildcfg.Experiment.RegabiArgs {
4639
4640
4641
4642
4643
4644 if fn.Func != nil {
4645 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4646 }
4647 } else {
4648
4649 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4650 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4651 if inRegistersImported || inRegistersSamePackage {
4652 callABI = s.f.ABI1
4653 }
4654 }
4655 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4656 isCallDeferRangeFunc = true
4657 }
4658 break
4659 }
4660 closure = s.expr(fn)
4661 if k != callDefer && k != callDeferStack {
4662
4663
4664 s.maybeNilCheckClosure(closure, k)
4665 }
4666 case ir.OCALLINTER:
4667 if fn.Op() != ir.ODOTINTER {
4668 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4669 }
4670 fn := fn.(*ir.SelectorExpr)
4671 var iclosure *ssa.Value
4672 iclosure, rcvr = s.getClosureAndRcvr(fn)
4673 if k == callNormal {
4674 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4675 } else {
4676 closure = iclosure
4677 }
4678 }
4679 if deferExtra != nil {
4680 dextra = s.expr(deferExtra)
4681 }
4682
4683 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4684 types.CalcSize(fn.Type())
4685 stksize := params.ArgWidth()
4686
4687 res := n.Fun.Type().Results()
4688 if k == callNormal || k == callTail {
4689 for _, p := range params.OutParams() {
4690 ACResults = append(ACResults, p.Type)
4691 }
4692 }
4693
4694 var call *ssa.Value
4695 if k == callDeferStack {
4696 if stksize != 0 {
4697 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4698 }
4699
4700 t := deferstruct()
4701 n, addr := s.temp(n.Pos(), t)
4702 n.SetNonMergeable(true)
4703 s.store(closure.Type,
4704 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4705 closure)
4706
4707
4708 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4709 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4710 callArgs = append(callArgs, addr, s.mem())
4711 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4712 call.AddArgs(callArgs...)
4713 call.AuxInt = int64(types.PtrSize)
4714 } else {
4715
4716
4717 argStart := base.Ctxt.Arch.FixedFrameSize
4718
4719 if k != callNormal && k != callTail {
4720
4721 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4722 callArgs = append(callArgs, closure)
4723 stksize += int64(types.PtrSize)
4724 argStart += int64(types.PtrSize)
4725 if dextra != nil {
4726
4727 ACArgs = append(ACArgs, types.Types[types.TINTER])
4728 callArgs = append(callArgs, dextra)
4729 stksize += 2 * int64(types.PtrSize)
4730 argStart += 2 * int64(types.PtrSize)
4731 }
4732 }
4733
4734
4735 if rcvr != nil {
4736 callArgs = append(callArgs, rcvr)
4737 }
4738
4739
4740 t := n.Fun.Type()
4741 args := n.Args
4742
4743 for _, p := range params.InParams() {
4744 ACArgs = append(ACArgs, p.Type)
4745 }
4746
4747
4748
4749
4750 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4751 b := s.endBlock()
4752 b.Kind = ssa.BlockPlain
4753 curb := s.f.NewBlock(ssa.BlockPlain)
4754 b.AddEdgeTo(curb)
4755 s.startBlock(curb)
4756 }
4757
4758 for i, n := range args {
4759 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4760 }
4761
4762 callArgs = append(callArgs, s.mem())
4763
4764
4765 switch {
4766 case k == callDefer:
4767 sym := ir.Syms.Deferproc
4768 if dextra != nil {
4769 sym = ir.Syms.Deferprocat
4770 }
4771 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4772 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4773 case k == callGo:
4774 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4775 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4776 case closure != nil:
4777
4778
4779
4780
4781
4782 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4783 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4784 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4785 case codeptr != nil:
4786
4787 aux := ssa.InterfaceAuxCall(params)
4788 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4789 case calleeLSym != nil:
4790 aux := ssa.StaticAuxCall(calleeLSym, params)
4791 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4792 if k == callTail {
4793 call.Op = ssa.OpTailLECall
4794 stksize = 0
4795 }
4796 default:
4797 s.Fatalf("bad call type %v %v", n.Op(), n)
4798 }
4799 call.AddArgs(callArgs...)
4800 call.AuxInt = stksize
4801 }
4802 s.prevCall = call
4803 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4804
4805 for _, v := range n.KeepAlive {
4806 if !v.Addrtaken() {
4807 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4808 }
4809 switch v.Class {
4810 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4811 default:
4812 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4813 }
4814 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4815 }
4816
4817
4818 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
4819 b := s.endBlock()
4820 b.Kind = ssa.BlockDefer
4821 b.SetControl(call)
4822 bNext := s.f.NewBlock(ssa.BlockPlain)
4823 b.AddEdgeTo(bNext)
4824 r := s.f.DeferReturn
4825 if r == nil {
4826 r = s.f.NewBlock(ssa.BlockPlain)
4827 s.startBlock(r)
4828 s.exit()
4829 s.f.DeferReturn = r
4830 }
4831 b.AddEdgeTo(r)
4832 b.Likely = ssa.BranchLikely
4833 s.startBlock(bNext)
4834 }
4835
4836 if len(res) == 0 || k != callNormal {
4837
4838 return nil
4839 }
4840 fp := res[0]
4841 if returnResultAddr {
4842 return s.resultAddrOfCall(call, 0, fp.Type)
4843 }
4844 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4845 }
4846
4847
4848
4849 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4850 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4851
4852
4853 s.nilCheck(closure)
4854 }
4855 }
4856
4857
4858
4859 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4860 i := s.expr(fn.X)
4861 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4862 s.nilCheck(itab)
4863 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4864 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4865 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4866 return closure, rcvr
4867 }
4868
4869
4870
4871 func etypesign(e types.Kind) int8 {
4872 switch e {
4873 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4874 return -1
4875 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4876 return +1
4877 }
4878 return 0
4879 }
4880
4881
4882
4883 func (s *state) addr(n ir.Node) *ssa.Value {
4884 if n.Op() != ir.ONAME {
4885 s.pushLine(n.Pos())
4886 defer s.popLine()
4887 }
4888
4889 if s.canSSA(n) {
4890 s.Fatalf("addr of canSSA expression: %+v", n)
4891 }
4892
4893 t := types.NewPtr(n.Type())
4894 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
4895 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
4896
4897 if offset != 0 {
4898 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
4899 }
4900 return v
4901 }
4902 switch n.Op() {
4903 case ir.OLINKSYMOFFSET:
4904 no := n.(*ir.LinksymOffsetExpr)
4905 return linksymOffset(no.Linksym, no.Offset_)
4906 case ir.ONAME:
4907 n := n.(*ir.Name)
4908 if n.Heapaddr != nil {
4909 return s.expr(n.Heapaddr)
4910 }
4911 switch n.Class {
4912 case ir.PEXTERN:
4913
4914 return linksymOffset(n.Linksym(), 0)
4915 case ir.PPARAM:
4916
4917 v := s.decladdrs[n]
4918 if v != nil {
4919 return v
4920 }
4921 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
4922 return nil
4923 case ir.PAUTO:
4924 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
4925
4926 case ir.PPARAMOUT:
4927
4928
4929 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
4930 default:
4931 s.Fatalf("variable address class %v not implemented", n.Class)
4932 return nil
4933 }
4934 case ir.ORESULT:
4935
4936 n := n.(*ir.ResultExpr)
4937 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
4938 case ir.OINDEX:
4939 n := n.(*ir.IndexExpr)
4940 if n.X.Type().IsSlice() {
4941 a := s.expr(n.X)
4942 i := s.expr(n.Index)
4943 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
4944 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4945 p := s.newValue1(ssa.OpSlicePtr, t, a)
4946 return s.newValue2(ssa.OpPtrIndex, t, p, i)
4947 } else {
4948 a := s.addr(n.X)
4949 i := s.expr(n.Index)
4950 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
4951 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4952 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
4953 }
4954 case ir.ODEREF:
4955 n := n.(*ir.StarExpr)
4956 return s.exprPtr(n.X, n.Bounded(), n.Pos())
4957 case ir.ODOT:
4958 n := n.(*ir.SelectorExpr)
4959 p := s.addr(n.X)
4960 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4961 case ir.ODOTPTR:
4962 n := n.(*ir.SelectorExpr)
4963 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
4964 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
4965 case ir.OCONVNOP:
4966 n := n.(*ir.ConvExpr)
4967 if n.Type() == n.X.Type() {
4968 return s.addr(n.X)
4969 }
4970 addr := s.addr(n.X)
4971 return s.newValue1(ssa.OpCopy, t, addr)
4972 case ir.OCALLFUNC, ir.OCALLINTER:
4973 n := n.(*ir.CallExpr)
4974 return s.callAddr(n, callNormal)
4975 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
4976 var v *ssa.Value
4977 if n.Op() == ir.ODOTTYPE {
4978 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
4979 } else {
4980 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
4981 }
4982 if v.Op != ssa.OpLoad {
4983 s.Fatalf("dottype of non-load")
4984 }
4985 if v.Args[1] != s.mem() {
4986 s.Fatalf("memory no longer live from dottype load")
4987 }
4988 return v.Args[0]
4989 default:
4990 s.Fatalf("unhandled addr %v", n.Op())
4991 return nil
4992 }
4993 }
4994
4995
4996
4997 func (s *state) canSSA(n ir.Node) bool {
4998 if base.Flag.N != 0 {
4999 return false
5000 }
5001 for {
5002 nn := n
5003 if nn.Op() == ir.ODOT {
5004 nn := nn.(*ir.SelectorExpr)
5005 n = nn.X
5006 continue
5007 }
5008 if nn.Op() == ir.OINDEX {
5009 nn := nn.(*ir.IndexExpr)
5010 if nn.X.Type().IsArray() {
5011 n = nn.X
5012 continue
5013 }
5014 }
5015 break
5016 }
5017 if n.Op() != ir.ONAME {
5018 return false
5019 }
5020 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5021 }
5022
5023 func (s *state) canSSAName(name *ir.Name) bool {
5024 if name.Addrtaken() || !name.OnStack() {
5025 return false
5026 }
5027 switch name.Class {
5028 case ir.PPARAMOUT:
5029 if s.hasdefer {
5030
5031
5032
5033
5034
5035 return false
5036 }
5037 if s.cgoUnsafeArgs {
5038
5039
5040 return false
5041 }
5042 }
5043 return true
5044
5045 }
5046
5047
5048 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5049 p := s.expr(n)
5050 if bounded || n.NonNil() {
5051 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5052 s.f.Warnl(lineno, "removed nil check")
5053 }
5054 return p
5055 }
5056 p = s.nilCheck(p)
5057 return p
5058 }
5059
5060
5061
5062
5063
5064
5065 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5066 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5067 return ptr
5068 }
5069 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5070 }
5071
5072
5073
5074
5075
5076
5077
5078 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5079 idx = s.extendIndex(idx, len, kind, bounded)
5080
5081 if bounded || base.Flag.B != 0 {
5082
5083
5084
5085
5086
5087
5088
5089
5090
5091
5092
5093
5094
5095
5096
5097
5098
5099
5100
5101
5102 return idx
5103 }
5104
5105 bNext := s.f.NewBlock(ssa.BlockPlain)
5106 bPanic := s.f.NewBlock(ssa.BlockExit)
5107
5108 if !idx.Type.IsSigned() {
5109 switch kind {
5110 case ssa.BoundsIndex:
5111 kind = ssa.BoundsIndexU
5112 case ssa.BoundsSliceAlen:
5113 kind = ssa.BoundsSliceAlenU
5114 case ssa.BoundsSliceAcap:
5115 kind = ssa.BoundsSliceAcapU
5116 case ssa.BoundsSliceB:
5117 kind = ssa.BoundsSliceBU
5118 case ssa.BoundsSlice3Alen:
5119 kind = ssa.BoundsSlice3AlenU
5120 case ssa.BoundsSlice3Acap:
5121 kind = ssa.BoundsSlice3AcapU
5122 case ssa.BoundsSlice3B:
5123 kind = ssa.BoundsSlice3BU
5124 case ssa.BoundsSlice3C:
5125 kind = ssa.BoundsSlice3CU
5126 }
5127 }
5128
5129 var cmp *ssa.Value
5130 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5131 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5132 } else {
5133 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5134 }
5135 b := s.endBlock()
5136 b.Kind = ssa.BlockIf
5137 b.SetControl(cmp)
5138 b.Likely = ssa.BranchLikely
5139 b.AddEdgeTo(bNext)
5140 b.AddEdgeTo(bPanic)
5141
5142 s.startBlock(bPanic)
5143 if Arch.LinkArch.Family == sys.Wasm {
5144
5145
5146 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5147 } else {
5148 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5149 s.endBlock().SetControl(mem)
5150 }
5151 s.startBlock(bNext)
5152
5153
5154 if base.Flag.Cfg.SpectreIndex {
5155 op := ssa.OpSpectreIndex
5156 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5157 op = ssa.OpSpectreSliceIndex
5158 }
5159 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5160 }
5161
5162 return idx
5163 }
5164
5165
5166 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5167 b := s.endBlock()
5168 b.Kind = ssa.BlockIf
5169 b.SetControl(cmp)
5170 b.Likely = ssa.BranchLikely
5171 bNext := s.f.NewBlock(ssa.BlockPlain)
5172 line := s.peekPos()
5173 pos := base.Ctxt.PosTable.Pos(line)
5174 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5175 bPanic := s.panics[fl]
5176 if bPanic == nil {
5177 bPanic = s.f.NewBlock(ssa.BlockPlain)
5178 s.panics[fl] = bPanic
5179 s.startBlock(bPanic)
5180
5181
5182 s.rtcall(fn, false, nil)
5183 }
5184 b.AddEdgeTo(bNext)
5185 b.AddEdgeTo(bPanic)
5186 s.startBlock(bNext)
5187 }
5188
5189 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5190 needcheck := true
5191 switch b.Op {
5192 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5193 if b.AuxInt != 0 {
5194 needcheck = false
5195 }
5196 }
5197 if needcheck {
5198
5199 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5200 s.check(cmp, ir.Syms.Panicdivide)
5201 }
5202 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5203 }
5204
5205
5206
5207
5208
5209 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5210 s.prevCall = nil
5211
5212 off := base.Ctxt.Arch.FixedFrameSize
5213 var callArgs []*ssa.Value
5214 var callArgTypes []*types.Type
5215
5216 for _, arg := range args {
5217 t := arg.Type
5218 off = types.RoundUp(off, t.Alignment())
5219 size := t.Size()
5220 callArgs = append(callArgs, arg)
5221 callArgTypes = append(callArgTypes, t)
5222 off += size
5223 }
5224 off = types.RoundUp(off, int64(types.RegSize))
5225
5226
5227 var call *ssa.Value
5228 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5229 callArgs = append(callArgs, s.mem())
5230 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5231 call.AddArgs(callArgs...)
5232 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5233
5234 if !returns {
5235
5236 b := s.endBlock()
5237 b.Kind = ssa.BlockExit
5238 b.SetControl(call)
5239 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5240 if len(results) > 0 {
5241 s.Fatalf("panic call can't have results")
5242 }
5243 return nil
5244 }
5245
5246
5247 res := make([]*ssa.Value, len(results))
5248 for i, t := range results {
5249 off = types.RoundUp(off, t.Alignment())
5250 res[i] = s.resultOfCall(call, int64(i), t)
5251 off += t.Size()
5252 }
5253 off = types.RoundUp(off, int64(types.PtrSize))
5254
5255
5256 call.AuxInt = off
5257
5258 return res
5259 }
5260
5261
5262 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5263 s.instrument(t, left, instrumentWrite)
5264
5265 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5266
5267 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5268 return
5269 }
5270
5271
5272
5273
5274
5275
5276 s.storeTypeScalars(t, left, right, skip)
5277 if skip&skipPtr == 0 && t.HasPointers() {
5278 s.storeTypePtrs(t, left, right)
5279 }
5280 }
5281
5282
5283 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5284 switch {
5285 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5286 s.store(t, left, right)
5287 case t.IsPtrShaped():
5288 if t.IsPtr() && t.Elem().NotInHeap() {
5289 s.store(t, left, right)
5290 }
5291
5292 case t.IsString():
5293 if skip&skipLen != 0 {
5294 return
5295 }
5296 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5297 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5298 s.store(types.Types[types.TINT], lenAddr, len)
5299 case t.IsSlice():
5300 if skip&skipLen == 0 {
5301 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5302 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5303 s.store(types.Types[types.TINT], lenAddr, len)
5304 }
5305 if skip&skipCap == 0 {
5306 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5307 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5308 s.store(types.Types[types.TINT], capAddr, cap)
5309 }
5310 case t.IsInterface():
5311
5312 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5313 s.store(types.Types[types.TUINTPTR], left, itab)
5314 case t.IsStruct():
5315 n := t.NumFields()
5316 for i := 0; i < n; i++ {
5317 ft := t.FieldType(i)
5318 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5319 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5320 s.storeTypeScalars(ft, addr, val, 0)
5321 }
5322 case t.IsArray() && t.NumElem() == 0:
5323
5324 case t.IsArray() && t.NumElem() == 1:
5325 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5326 default:
5327 s.Fatalf("bad write barrier type %v", t)
5328 }
5329 }
5330
5331
5332 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5333 switch {
5334 case t.IsPtrShaped():
5335 if t.IsPtr() && t.Elem().NotInHeap() {
5336 break
5337 }
5338 s.store(t, left, right)
5339 case t.IsString():
5340 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5341 s.store(s.f.Config.Types.BytePtr, left, ptr)
5342 case t.IsSlice():
5343 elType := types.NewPtr(t.Elem())
5344 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5345 s.store(elType, left, ptr)
5346 case t.IsInterface():
5347
5348 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5349 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5350 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5351 case t.IsStruct():
5352 n := t.NumFields()
5353 for i := 0; i < n; i++ {
5354 ft := t.FieldType(i)
5355 if !ft.HasPointers() {
5356 continue
5357 }
5358 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5359 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5360 s.storeTypePtrs(ft, addr, val)
5361 }
5362 case t.IsArray() && t.NumElem() == 0:
5363
5364 case t.IsArray() && t.NumElem() == 1:
5365 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5366 default:
5367 s.Fatalf("bad write barrier type %v", t)
5368 }
5369 }
5370
5371
5372 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5373 var a *ssa.Value
5374 if !ssa.CanSSA(t) {
5375 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5376 } else {
5377 a = s.expr(n)
5378 }
5379 return a
5380 }
5381
5382
5383
5384
5385 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5386 t := v.Type
5387 var ptr, len, cap *ssa.Value
5388 switch {
5389 case t.IsSlice():
5390 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5391 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5392 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5393 case t.IsString():
5394 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5395 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5396 cap = len
5397 case t.IsPtr():
5398 if !t.Elem().IsArray() {
5399 s.Fatalf("bad ptr to array in slice %v\n", t)
5400 }
5401 nv := s.nilCheck(v)
5402 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5403 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5404 cap = len
5405 default:
5406 s.Fatalf("bad type in slice %v\n", t)
5407 }
5408
5409
5410 if i == nil {
5411 i = s.constInt(types.Types[types.TINT], 0)
5412 }
5413 if j == nil {
5414 j = len
5415 }
5416 three := true
5417 if k == nil {
5418 three = false
5419 k = cap
5420 }
5421
5422
5423
5424
5425 if three {
5426 if k != cap {
5427 kind := ssa.BoundsSlice3Alen
5428 if t.IsSlice() {
5429 kind = ssa.BoundsSlice3Acap
5430 }
5431 k = s.boundsCheck(k, cap, kind, bounded)
5432 }
5433 if j != k {
5434 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5435 }
5436 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5437 } else {
5438 if j != k {
5439 kind := ssa.BoundsSliceAlen
5440 if t.IsSlice() {
5441 kind = ssa.BoundsSliceAcap
5442 }
5443 j = s.boundsCheck(j, k, kind, bounded)
5444 }
5445 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5446 }
5447
5448
5449 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5450 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5451 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5452
5453
5454
5455
5456
5457 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5458 rcap := rlen
5459 if j != k && !t.IsString() {
5460 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5461 }
5462
5463 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5464
5465 return ptr, rlen, rcap
5466 }
5467
5468
5469
5470
5471
5472
5473
5474
5475
5476
5477
5478
5479
5480
5481
5482 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5483
5484
5485 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5486
5487
5488
5489 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5490 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5491
5492
5493 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5494
5495 return rptr, rlen, rcap
5496 }
5497
5498 type u642fcvtTab struct {
5499 leq, cvt2F, and, rsh, or, add ssa.Op
5500 one func(*state, *types.Type, int64) *ssa.Value
5501 }
5502
5503 var u64_f64 = u642fcvtTab{
5504 leq: ssa.OpLeq64,
5505 cvt2F: ssa.OpCvt64to64F,
5506 and: ssa.OpAnd64,
5507 rsh: ssa.OpRsh64Ux64,
5508 or: ssa.OpOr64,
5509 add: ssa.OpAdd64F,
5510 one: (*state).constInt64,
5511 }
5512
5513 var u64_f32 = u642fcvtTab{
5514 leq: ssa.OpLeq64,
5515 cvt2F: ssa.OpCvt64to32F,
5516 and: ssa.OpAnd64,
5517 rsh: ssa.OpRsh64Ux64,
5518 or: ssa.OpOr64,
5519 add: ssa.OpAdd32F,
5520 one: (*state).constInt64,
5521 }
5522
5523 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5524 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5525 }
5526
5527 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5528 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5529 }
5530
5531 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5532
5533
5534
5535
5536
5537
5538
5539
5540
5541
5542
5543
5544
5545
5546
5547
5548
5549
5550
5551
5552
5553
5554
5555
5556 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5557 b := s.endBlock()
5558 b.Kind = ssa.BlockIf
5559 b.SetControl(cmp)
5560 b.Likely = ssa.BranchLikely
5561
5562 bThen := s.f.NewBlock(ssa.BlockPlain)
5563 bElse := s.f.NewBlock(ssa.BlockPlain)
5564 bAfter := s.f.NewBlock(ssa.BlockPlain)
5565
5566 b.AddEdgeTo(bThen)
5567 s.startBlock(bThen)
5568 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5569 s.vars[n] = a0
5570 s.endBlock()
5571 bThen.AddEdgeTo(bAfter)
5572
5573 b.AddEdgeTo(bElse)
5574 s.startBlock(bElse)
5575 one := cvttab.one(s, ft, 1)
5576 y := s.newValue2(cvttab.and, ft, x, one)
5577 z := s.newValue2(cvttab.rsh, ft, x, one)
5578 z = s.newValue2(cvttab.or, ft, z, y)
5579 a := s.newValue1(cvttab.cvt2F, tt, z)
5580 a1 := s.newValue2(cvttab.add, tt, a, a)
5581 s.vars[n] = a1
5582 s.endBlock()
5583 bElse.AddEdgeTo(bAfter)
5584
5585 s.startBlock(bAfter)
5586 return s.variable(n, n.Type())
5587 }
5588
5589 type u322fcvtTab struct {
5590 cvtI2F, cvtF2F ssa.Op
5591 }
5592
5593 var u32_f64 = u322fcvtTab{
5594 cvtI2F: ssa.OpCvt32to64F,
5595 cvtF2F: ssa.OpCopy,
5596 }
5597
5598 var u32_f32 = u322fcvtTab{
5599 cvtI2F: ssa.OpCvt32to32F,
5600 cvtF2F: ssa.OpCvt64Fto32F,
5601 }
5602
5603 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5604 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5605 }
5606
5607 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5608 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5609 }
5610
5611 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5612
5613
5614
5615
5616
5617 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5618 b := s.endBlock()
5619 b.Kind = ssa.BlockIf
5620 b.SetControl(cmp)
5621 b.Likely = ssa.BranchLikely
5622
5623 bThen := s.f.NewBlock(ssa.BlockPlain)
5624 bElse := s.f.NewBlock(ssa.BlockPlain)
5625 bAfter := s.f.NewBlock(ssa.BlockPlain)
5626
5627 b.AddEdgeTo(bThen)
5628 s.startBlock(bThen)
5629 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5630 s.vars[n] = a0
5631 s.endBlock()
5632 bThen.AddEdgeTo(bAfter)
5633
5634 b.AddEdgeTo(bElse)
5635 s.startBlock(bElse)
5636 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5637 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5638 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5639 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5640
5641 s.vars[n] = a3
5642 s.endBlock()
5643 bElse.AddEdgeTo(bAfter)
5644
5645 s.startBlock(bAfter)
5646 return s.variable(n, n.Type())
5647 }
5648
5649
5650 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5651 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5652 s.Fatalf("node must be a map or a channel")
5653 }
5654 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5655 s.Fatalf("cannot inline len(chan)")
5656 }
5657 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5658 s.Fatalf("cannot inline cap(chan)")
5659 }
5660 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5661 s.Fatalf("cannot inline cap(map)")
5662 }
5663
5664
5665
5666
5667
5668
5669
5670
5671 lenType := n.Type()
5672 nilValue := s.constNil(types.Types[types.TUINTPTR])
5673 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5674 b := s.endBlock()
5675 b.Kind = ssa.BlockIf
5676 b.SetControl(cmp)
5677 b.Likely = ssa.BranchUnlikely
5678
5679 bThen := s.f.NewBlock(ssa.BlockPlain)
5680 bElse := s.f.NewBlock(ssa.BlockPlain)
5681 bAfter := s.f.NewBlock(ssa.BlockPlain)
5682
5683
5684 b.AddEdgeTo(bThen)
5685 s.startBlock(bThen)
5686 s.vars[n] = s.zeroVal(lenType)
5687 s.endBlock()
5688 bThen.AddEdgeTo(bAfter)
5689
5690 b.AddEdgeTo(bElse)
5691 s.startBlock(bElse)
5692 switch n.Op() {
5693 case ir.OLEN:
5694 if n.X.Type().IsMap() {
5695
5696 loadType := reflectdata.MapType().Field(0).Type
5697 load := s.load(loadType, x)
5698 s.vars[n] = s.conv(nil, load, loadType, lenType)
5699 } else {
5700
5701 s.vars[n] = s.load(lenType, x)
5702 }
5703 case ir.OCAP:
5704
5705 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5706 s.vars[n] = s.load(lenType, sw)
5707 default:
5708 s.Fatalf("op must be OLEN or OCAP")
5709 }
5710 s.endBlock()
5711 bElse.AddEdgeTo(bAfter)
5712
5713 s.startBlock(bAfter)
5714 return s.variable(n, lenType)
5715 }
5716
5717 type f2uCvtTab struct {
5718 ltf, cvt2U, subf, or ssa.Op
5719 floatValue func(*state, *types.Type, float64) *ssa.Value
5720 intValue func(*state, *types.Type, int64) *ssa.Value
5721 cutoff uint64
5722 }
5723
5724 var f32_u64 = f2uCvtTab{
5725 ltf: ssa.OpLess32F,
5726 cvt2U: ssa.OpCvt32Fto64,
5727 subf: ssa.OpSub32F,
5728 or: ssa.OpOr64,
5729 floatValue: (*state).constFloat32,
5730 intValue: (*state).constInt64,
5731 cutoff: 1 << 63,
5732 }
5733
5734 var f64_u64 = f2uCvtTab{
5735 ltf: ssa.OpLess64F,
5736 cvt2U: ssa.OpCvt64Fto64,
5737 subf: ssa.OpSub64F,
5738 or: ssa.OpOr64,
5739 floatValue: (*state).constFloat64,
5740 intValue: (*state).constInt64,
5741 cutoff: 1 << 63,
5742 }
5743
5744 var f32_u32 = f2uCvtTab{
5745 ltf: ssa.OpLess32F,
5746 cvt2U: ssa.OpCvt32Fto32,
5747 subf: ssa.OpSub32F,
5748 or: ssa.OpOr32,
5749 floatValue: (*state).constFloat32,
5750 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5751 cutoff: 1 << 31,
5752 }
5753
5754 var f64_u32 = f2uCvtTab{
5755 ltf: ssa.OpLess64F,
5756 cvt2U: ssa.OpCvt64Fto32,
5757 subf: ssa.OpSub64F,
5758 or: ssa.OpOr32,
5759 floatValue: (*state).constFloat64,
5760 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5761 cutoff: 1 << 31,
5762 }
5763
5764 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5765 return s.floatToUint(&f32_u64, n, x, ft, tt)
5766 }
5767 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5768 return s.floatToUint(&f64_u64, n, x, ft, tt)
5769 }
5770
5771 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5772 return s.floatToUint(&f32_u32, n, x, ft, tt)
5773 }
5774
5775 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5776 return s.floatToUint(&f64_u32, n, x, ft, tt)
5777 }
5778
5779 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5780
5781
5782
5783
5784
5785
5786
5787
5788 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5789 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5790 b := s.endBlock()
5791 b.Kind = ssa.BlockIf
5792 b.SetControl(cmp)
5793 b.Likely = ssa.BranchLikely
5794
5795 bThen := s.f.NewBlock(ssa.BlockPlain)
5796 bElse := s.f.NewBlock(ssa.BlockPlain)
5797 bAfter := s.f.NewBlock(ssa.BlockPlain)
5798
5799 b.AddEdgeTo(bThen)
5800 s.startBlock(bThen)
5801 a0 := s.newValue1(cvttab.cvt2U, tt, x)
5802 s.vars[n] = a0
5803 s.endBlock()
5804 bThen.AddEdgeTo(bAfter)
5805
5806 b.AddEdgeTo(bElse)
5807 s.startBlock(bElse)
5808 y := s.newValue2(cvttab.subf, ft, x, cutoff)
5809 y = s.newValue1(cvttab.cvt2U, tt, y)
5810 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5811 a1 := s.newValue2(cvttab.or, tt, y, z)
5812 s.vars[n] = a1
5813 s.endBlock()
5814 bElse.AddEdgeTo(bAfter)
5815
5816 s.startBlock(bAfter)
5817 return s.variable(n, n.Type())
5818 }
5819
5820
5821
5822
5823 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5824 iface := s.expr(n.X)
5825 target := s.reflectType(n.Type())
5826 var targetItab *ssa.Value
5827 if n.ITab != nil {
5828 targetItab = s.expr(n.ITab)
5829 }
5830 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5831 }
5832
5833 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5834 iface := s.expr(n.X)
5835 var source, target, targetItab *ssa.Value
5836 if n.SrcRType != nil {
5837 source = s.expr(n.SrcRType)
5838 }
5839 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5840 byteptr := s.f.Config.Types.BytePtr
5841 targetItab = s.expr(n.ITab)
5842
5843
5844 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
5845 } else {
5846 target = s.expr(n.RType)
5847 }
5848 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
5849 }
5850
5851
5852
5853
5854
5855
5856
5857
5858
5859 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
5860 typs := s.f.Config.Types
5861 byteptr := typs.BytePtr
5862 if dst.IsInterface() {
5863 if dst.IsEmptyInterface() {
5864
5865
5866 if base.Debug.TypeAssert > 0 {
5867 base.WarnfAt(pos, "type assertion inlined")
5868 }
5869
5870
5871 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5872
5873 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5874
5875 if src.IsEmptyInterface() && commaok {
5876
5877 return iface, cond
5878 }
5879
5880
5881 b := s.endBlock()
5882 b.Kind = ssa.BlockIf
5883 b.SetControl(cond)
5884 b.Likely = ssa.BranchLikely
5885 bOk := s.f.NewBlock(ssa.BlockPlain)
5886 bFail := s.f.NewBlock(ssa.BlockPlain)
5887 b.AddEdgeTo(bOk)
5888 b.AddEdgeTo(bFail)
5889
5890 if !commaok {
5891
5892 s.startBlock(bFail)
5893 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5894
5895
5896 s.startBlock(bOk)
5897 if src.IsEmptyInterface() {
5898 res = iface
5899 return
5900 }
5901
5902 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5903 typ := s.load(byteptr, off)
5904 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5905 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
5906 return
5907 }
5908
5909 s.startBlock(bOk)
5910
5911
5912 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5913 s.vars[typVar] = s.load(byteptr, off)
5914 s.endBlock()
5915
5916
5917 s.startBlock(bFail)
5918 s.vars[typVar] = itab
5919 s.endBlock()
5920
5921
5922 bEnd := s.f.NewBlock(ssa.BlockPlain)
5923 bOk.AddEdgeTo(bEnd)
5924 bFail.AddEdgeTo(bEnd)
5925 s.startBlock(bEnd)
5926 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5927 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
5928 resok = cond
5929 delete(s.vars, typVar)
5930 return
5931 }
5932
5933 if base.Debug.TypeAssert > 0 {
5934 base.WarnfAt(pos, "type assertion not inlined")
5935 }
5936
5937 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5938 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
5939
5940
5941 bNil := s.f.NewBlock(ssa.BlockPlain)
5942 bNonNil := s.f.NewBlock(ssa.BlockPlain)
5943 bMerge := s.f.NewBlock(ssa.BlockPlain)
5944 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5945 b := s.endBlock()
5946 b.Kind = ssa.BlockIf
5947 b.SetControl(cond)
5948 b.Likely = ssa.BranchLikely
5949 b.AddEdgeTo(bNonNil)
5950 b.AddEdgeTo(bNil)
5951
5952 s.startBlock(bNil)
5953 if commaok {
5954 s.vars[typVar] = itab
5955 b := s.endBlock()
5956 b.AddEdgeTo(bMerge)
5957 } else {
5958
5959 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5960 }
5961
5962
5963 s.startBlock(bNonNil)
5964 typ := itab
5965 if !src.IsEmptyInterface() {
5966 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
5967 }
5968
5969
5970 var d *ssa.Value
5971 if descriptor != nil {
5972 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
5973 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
5974
5975
5976 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
5977 s.Fatalf("atomic load not available")
5978 }
5979
5980 var mul, and, add, zext ssa.Op
5981 if s.config.PtrSize == 4 {
5982 mul = ssa.OpMul32
5983 and = ssa.OpAnd32
5984 add = ssa.OpAdd32
5985 zext = ssa.OpCopy
5986 } else {
5987 mul = ssa.OpMul64
5988 and = ssa.OpAnd64
5989 add = ssa.OpAdd64
5990 zext = ssa.OpZeroExt32to64
5991 }
5992
5993 loopHead := s.f.NewBlock(ssa.BlockPlain)
5994 loopBody := s.f.NewBlock(ssa.BlockPlain)
5995 cacheHit := s.f.NewBlock(ssa.BlockPlain)
5996 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
5997
5998
5999
6000 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6001 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6002 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6003
6004
6005 var hash *ssa.Value
6006 if src.IsEmptyInterface() {
6007 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6008 } else {
6009 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6010 }
6011 hash = s.newValue1(zext, typs.Uintptr, hash)
6012 s.vars[hashVar] = hash
6013
6014 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6015
6016 b := s.endBlock()
6017 b.AddEdgeTo(loopHead)
6018
6019
6020
6021 s.startBlock(loopHead)
6022 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6023 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6024 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6025 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6026
6027 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6028
6029
6030
6031 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6032 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6033 b = s.endBlock()
6034 b.Kind = ssa.BlockIf
6035 b.SetControl(cmp1)
6036 b.AddEdgeTo(cacheHit)
6037 b.AddEdgeTo(loopBody)
6038
6039
6040
6041 s.startBlock(loopBody)
6042 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6043 b = s.endBlock()
6044 b.Kind = ssa.BlockIf
6045 b.SetControl(cmp2)
6046 b.AddEdgeTo(cacheMiss)
6047 b.AddEdgeTo(loopHead)
6048
6049
6050
6051 s.startBlock(cacheHit)
6052 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6053 s.vars[typVar] = eItab
6054 b = s.endBlock()
6055 b.AddEdgeTo(bMerge)
6056
6057
6058 s.startBlock(cacheMiss)
6059 }
6060 }
6061
6062
6063 if descriptor != nil {
6064 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6065 } else {
6066 var fn *obj.LSym
6067 if commaok {
6068 fn = ir.Syms.AssertE2I2
6069 } else {
6070 fn = ir.Syms.AssertE2I
6071 }
6072 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6073 }
6074 s.vars[typVar] = itab
6075 b = s.endBlock()
6076 b.AddEdgeTo(bMerge)
6077
6078
6079 s.startBlock(bMerge)
6080 itab = s.variable(typVar, byteptr)
6081 var ok *ssa.Value
6082 if commaok {
6083 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6084 }
6085 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6086 }
6087
6088 if base.Debug.TypeAssert > 0 {
6089 base.WarnfAt(pos, "type assertion inlined")
6090 }
6091
6092
6093 direct := types.IsDirectIface(dst)
6094 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6095 if base.Debug.TypeAssert > 0 {
6096 base.WarnfAt(pos, "type assertion inlined")
6097 }
6098 var wantedFirstWord *ssa.Value
6099 if src.IsEmptyInterface() {
6100
6101 wantedFirstWord = target
6102 } else {
6103
6104 wantedFirstWord = targetItab
6105 }
6106
6107 var tmp ir.Node
6108 var addr *ssa.Value
6109 if commaok && !ssa.CanSSA(dst) {
6110
6111
6112 tmp, addr = s.temp(pos, dst)
6113 }
6114
6115 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6116 b := s.endBlock()
6117 b.Kind = ssa.BlockIf
6118 b.SetControl(cond)
6119 b.Likely = ssa.BranchLikely
6120
6121 bOk := s.f.NewBlock(ssa.BlockPlain)
6122 bFail := s.f.NewBlock(ssa.BlockPlain)
6123 b.AddEdgeTo(bOk)
6124 b.AddEdgeTo(bFail)
6125
6126 if !commaok {
6127
6128 s.startBlock(bFail)
6129 taddr := source
6130 if taddr == nil {
6131 taddr = s.reflectType(src)
6132 }
6133 if src.IsEmptyInterface() {
6134 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6135 } else {
6136 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6137 }
6138
6139
6140 s.startBlock(bOk)
6141 if direct {
6142 return s.newValue1(ssa.OpIData, dst, iface), nil
6143 }
6144 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6145 return s.load(dst, p), nil
6146 }
6147
6148
6149
6150 bEnd := s.f.NewBlock(ssa.BlockPlain)
6151
6152
6153 valVar := ssaMarker("val")
6154
6155
6156 s.startBlock(bOk)
6157 if tmp == nil {
6158 if direct {
6159 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6160 } else {
6161 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6162 s.vars[valVar] = s.load(dst, p)
6163 }
6164 } else {
6165 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6166 s.move(dst, addr, p)
6167 }
6168 s.vars[okVar] = s.constBool(true)
6169 s.endBlock()
6170 bOk.AddEdgeTo(bEnd)
6171
6172
6173 s.startBlock(bFail)
6174 if tmp == nil {
6175 s.vars[valVar] = s.zeroVal(dst)
6176 } else {
6177 s.zero(dst, addr)
6178 }
6179 s.vars[okVar] = s.constBool(false)
6180 s.endBlock()
6181 bFail.AddEdgeTo(bEnd)
6182
6183
6184 s.startBlock(bEnd)
6185 if tmp == nil {
6186 res = s.variable(valVar, dst)
6187 delete(s.vars, valVar)
6188 } else {
6189 res = s.load(dst, addr)
6190 }
6191 resok = s.variable(okVar, types.Types[types.TBOOL])
6192 delete(s.vars, okVar)
6193 return res, resok
6194 }
6195
6196
6197 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6198 tmp := typecheck.TempAt(pos, s.curfn, t)
6199 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6200 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6201 }
6202 addr := s.addr(tmp)
6203 return tmp, addr
6204 }
6205
6206
6207 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6208 v := s.vars[n]
6209 if v != nil {
6210 return v
6211 }
6212 v = s.fwdVars[n]
6213 if v != nil {
6214 return v
6215 }
6216
6217 if s.curBlock == s.f.Entry {
6218
6219 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6220 }
6221
6222
6223 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6224 s.fwdVars[n] = v
6225 if n.Op() == ir.ONAME {
6226 s.addNamedValue(n.(*ir.Name), v)
6227 }
6228 return v
6229 }
6230
6231 func (s *state) mem() *ssa.Value {
6232 return s.variable(memVar, types.TypeMem)
6233 }
6234
6235 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6236 if n.Class == ir.Pxxx {
6237
6238 return
6239 }
6240 if ir.IsAutoTmp(n) {
6241
6242 return
6243 }
6244 if n.Class == ir.PPARAMOUT {
6245
6246
6247 return
6248 }
6249 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6250 values, ok := s.f.NamedValues[loc]
6251 if !ok {
6252 s.f.Names = append(s.f.Names, &loc)
6253 s.f.CanonicalLocalSlots[loc] = &loc
6254 }
6255 s.f.NamedValues[loc] = append(values, v)
6256 }
6257
6258
6259 type Branch struct {
6260 P *obj.Prog
6261 B *ssa.Block
6262 }
6263
6264
6265 type State struct {
6266 ABI obj.ABI
6267
6268 pp *objw.Progs
6269
6270
6271
6272 Branches []Branch
6273
6274
6275 JumpTables []*ssa.Block
6276
6277
6278 bstart []*obj.Prog
6279
6280 maxarg int64
6281
6282
6283
6284 livenessMap liveness.Map
6285
6286
6287
6288 partLiveArgs map[*ir.Name]bool
6289
6290
6291
6292
6293 lineRunStart *obj.Prog
6294
6295
6296 OnWasmStackSkipped int
6297 }
6298
6299 func (s *State) FuncInfo() *obj.FuncInfo {
6300 return s.pp.CurFunc.LSym.Func()
6301 }
6302
6303
6304 func (s *State) Prog(as obj.As) *obj.Prog {
6305 p := s.pp.Prog(as)
6306 if objw.LosesStmtMark(as) {
6307 return p
6308 }
6309
6310
6311 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6312 s.lineRunStart = p
6313 } else if p.Pos.IsStmt() == src.PosIsStmt {
6314 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6315 p.Pos = p.Pos.WithNotStmt()
6316 }
6317 return p
6318 }
6319
6320
6321 func (s *State) Pc() *obj.Prog {
6322 return s.pp.Next
6323 }
6324
6325
6326 func (s *State) SetPos(pos src.XPos) {
6327 s.pp.Pos = pos
6328 }
6329
6330
6331
6332
6333 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6334 p := s.Prog(op)
6335 p.To.Type = obj.TYPE_BRANCH
6336 s.Branches = append(s.Branches, Branch{P: p, B: target})
6337 return p
6338 }
6339
6340
6341
6342
6343
6344
6345 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6346 switch v.Op {
6347 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6348
6349 s.SetPos(v.Pos.WithNotStmt())
6350 default:
6351 p := v.Pos
6352 if p != src.NoXPos {
6353
6354
6355
6356
6357 if p.IsStmt() != src.PosIsStmt {
6358 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6359
6360
6361
6362
6363
6364
6365
6366
6367
6368
6369
6370
6371
6372 return
6373 }
6374 p = p.WithNotStmt()
6375
6376 }
6377 s.SetPos(p)
6378 } else {
6379 s.SetPos(s.pp.Pos.WithNotStmt())
6380 }
6381 }
6382 }
6383
6384
6385 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6386 ft := e.curfn.Type()
6387 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6388 return
6389 }
6390
6391 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6392 x.Set(obj.AttrContentAddressable, true)
6393 e.curfn.LSym.Func().ArgInfo = x
6394
6395
6396 p := pp.Prog(obj.AFUNCDATA)
6397 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6398 p.To.Type = obj.TYPE_MEM
6399 p.To.Name = obj.NAME_EXTERN
6400 p.To.Sym = x
6401 }
6402
6403
6404 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6405 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6406
6407
6408
6409
6410 PtrSize := int64(types.PtrSize)
6411 uintptrTyp := types.Types[types.TUINTPTR]
6412
6413 isAggregate := func(t *types.Type) bool {
6414 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6415 }
6416
6417 wOff := 0
6418 n := 0
6419 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6420
6421
6422 write1 := func(sz, offset int64) {
6423 if offset >= rtabi.TraceArgsSpecial {
6424 writebyte(rtabi.TraceArgsOffsetTooLarge)
6425 } else {
6426 writebyte(uint8(offset))
6427 writebyte(uint8(sz))
6428 }
6429 n++
6430 }
6431
6432
6433
6434 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6435 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6436 if n >= rtabi.TraceArgsLimit {
6437 writebyte(rtabi.TraceArgsDotdotdot)
6438 return false
6439 }
6440 if !isAggregate(t) {
6441 write1(t.Size(), baseOffset)
6442 return true
6443 }
6444 writebyte(rtabi.TraceArgsStartAgg)
6445 depth++
6446 if depth >= rtabi.TraceArgsMaxDepth {
6447 writebyte(rtabi.TraceArgsDotdotdot)
6448 writebyte(rtabi.TraceArgsEndAgg)
6449 n++
6450 return true
6451 }
6452 switch {
6453 case t.IsInterface(), t.IsString():
6454 _ = visitType(baseOffset, uintptrTyp, depth) &&
6455 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6456 case t.IsSlice():
6457 _ = visitType(baseOffset, uintptrTyp, depth) &&
6458 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6459 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6460 case t.IsComplex():
6461 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6462 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6463 case t.IsArray():
6464 if t.NumElem() == 0 {
6465 n++
6466 break
6467 }
6468 for i := int64(0); i < t.NumElem(); i++ {
6469 if !visitType(baseOffset, t.Elem(), depth) {
6470 break
6471 }
6472 baseOffset += t.Elem().Size()
6473 }
6474 case t.IsStruct():
6475 if t.NumFields() == 0 {
6476 n++
6477 break
6478 }
6479 for _, field := range t.Fields() {
6480 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6481 break
6482 }
6483 }
6484 }
6485 writebyte(rtabi.TraceArgsEndAgg)
6486 return true
6487 }
6488
6489 start := 0
6490 if strings.Contains(f.LSym.Name, "[") {
6491
6492 start = 1
6493 }
6494
6495 for _, a := range abiInfo.InParams()[start:] {
6496 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6497 break
6498 }
6499 }
6500 writebyte(rtabi.TraceArgsEndSeq)
6501 if wOff > rtabi.TraceArgsMaxLen {
6502 base.Fatalf("ArgInfo too large")
6503 }
6504
6505 return x
6506 }
6507
6508
6509 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6510 if base.Ctxt.Flag_linkshared {
6511
6512
6513 return
6514 }
6515
6516 wfn := e.curfn.WrappedFunc
6517 if wfn == nil {
6518 return
6519 }
6520
6521 wsym := wfn.Linksym()
6522 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6523 objw.SymPtrOff(x, 0, wsym)
6524 x.Set(obj.AttrContentAddressable, true)
6525 })
6526 e.curfn.LSym.Func().WrapInfo = x
6527
6528
6529 p := pp.Prog(obj.AFUNCDATA)
6530 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6531 p.To.Type = obj.TYPE_MEM
6532 p.To.Name = obj.NAME_EXTERN
6533 p.To.Sym = x
6534 }
6535
6536
6537 func genssa(f *ssa.Func, pp *objw.Progs) {
6538 var s State
6539 s.ABI = f.OwnAux.Fn.ABI()
6540
6541 e := f.Frontend().(*ssafn)
6542
6543 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6544
6545 var lv *liveness.Liveness
6546 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6547 emitArgInfo(e, f, pp)
6548 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6549
6550 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6551 if openDeferInfo != nil {
6552
6553
6554 p := pp.Prog(obj.AFUNCDATA)
6555 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6556 p.To.Type = obj.TYPE_MEM
6557 p.To.Name = obj.NAME_EXTERN
6558 p.To.Sym = openDeferInfo
6559 }
6560
6561 emitWrappedFuncInfo(e, pp)
6562
6563
6564 s.bstart = make([]*obj.Prog, f.NumBlocks())
6565 s.pp = pp
6566 var progToValue map[*obj.Prog]*ssa.Value
6567 var progToBlock map[*obj.Prog]*ssa.Block
6568 var valueToProgAfter []*obj.Prog
6569 if gatherPrintInfo {
6570 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6571 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6572 f.Logf("genssa %s\n", f.Name)
6573 progToBlock[s.pp.Next] = f.Blocks[0]
6574 }
6575
6576 if base.Ctxt.Flag_locationlists {
6577 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6578 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6579 }
6580 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6581 clear(valueToProgAfter)
6582 }
6583
6584
6585
6586 firstPos := src.NoXPos
6587 for _, v := range f.Entry.Values {
6588 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6589 firstPos = v.Pos
6590 v.Pos = firstPos.WithDefaultStmt()
6591 break
6592 }
6593 }
6594
6595
6596
6597
6598 var inlMarks map[*obj.Prog]int32
6599 var inlMarkList []*obj.Prog
6600
6601
6602
6603 var inlMarksByPos map[src.XPos][]*obj.Prog
6604
6605 var argLiveIdx int = -1
6606
6607
6608
6609
6610
6611 var hotAlign, hotRequire int64
6612
6613 if base.Debug.AlignHot > 0 {
6614 switch base.Ctxt.Arch.Name {
6615
6616
6617
6618
6619
6620 case "amd64", "386":
6621
6622
6623
6624 hotAlign = 64
6625 hotRequire = 31
6626 }
6627 }
6628
6629
6630 for i, b := range f.Blocks {
6631
6632 s.lineRunStart = nil
6633 s.SetPos(s.pp.Pos.WithNotStmt())
6634
6635 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6636
6637
6638
6639
6640
6641 p := s.pp.Prog(obj.APCALIGNMAX)
6642 p.From.SetConst(hotAlign)
6643 p.To.SetConst(hotRequire)
6644 }
6645
6646 s.bstart[b.ID] = s.pp.Next
6647
6648 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6649 argLiveIdx = idx
6650 p := s.pp.Prog(obj.APCDATA)
6651 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6652 p.To.SetConst(int64(idx))
6653 }
6654
6655
6656 Arch.SSAMarkMoves(&s, b)
6657 for _, v := range b.Values {
6658 x := s.pp.Next
6659 s.DebugFriendlySetPosFrom(v)
6660
6661 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6662 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6663 }
6664
6665 switch v.Op {
6666 case ssa.OpInitMem:
6667
6668 case ssa.OpArg:
6669
6670 case ssa.OpSP, ssa.OpSB:
6671
6672 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6673
6674 case ssa.OpGetG:
6675
6676
6677 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6678
6679 case ssa.OpPhi:
6680 CheckLoweredPhi(v)
6681 case ssa.OpConvert:
6682
6683 if v.Args[0].Reg() != v.Reg() {
6684 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6685 }
6686 case ssa.OpInlMark:
6687 p := Arch.Ginsnop(s.pp)
6688 if inlMarks == nil {
6689 inlMarks = map[*obj.Prog]int32{}
6690 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6691 }
6692 inlMarks[p] = v.AuxInt32()
6693 inlMarkList = append(inlMarkList, p)
6694 pos := v.Pos.AtColumn1()
6695 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6696 firstPos = src.NoXPos
6697
6698 default:
6699
6700 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6701 s.SetPos(firstPos)
6702 firstPos = src.NoXPos
6703 }
6704
6705
6706 s.pp.NextLive = s.livenessMap.Get(v)
6707 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6708
6709
6710 Arch.SSAGenValue(&s, v)
6711 }
6712
6713 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6714 argLiveIdx = idx
6715 p := s.pp.Prog(obj.APCDATA)
6716 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6717 p.To.SetConst(int64(idx))
6718 }
6719
6720 if base.Ctxt.Flag_locationlists {
6721 valueToProgAfter[v.ID] = s.pp.Next
6722 }
6723
6724 if gatherPrintInfo {
6725 for ; x != s.pp.Next; x = x.Link {
6726 progToValue[x] = v
6727 }
6728 }
6729 }
6730
6731 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6732 p := Arch.Ginsnop(s.pp)
6733 p.Pos = p.Pos.WithIsStmt()
6734 if b.Pos == src.NoXPos {
6735 b.Pos = p.Pos
6736 if b.Pos == src.NoXPos {
6737 b.Pos = s.pp.Text.Pos
6738 }
6739 }
6740 b.Pos = b.Pos.WithBogusLine()
6741 }
6742
6743
6744
6745
6746
6747 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6748
6749
6750 var next *ssa.Block
6751 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6752
6753
6754
6755
6756 next = f.Blocks[i+1]
6757 }
6758 x := s.pp.Next
6759 s.SetPos(b.Pos)
6760 Arch.SSAGenBlock(&s, b, next)
6761 if gatherPrintInfo {
6762 for ; x != s.pp.Next; x = x.Link {
6763 progToBlock[x] = b
6764 }
6765 }
6766 }
6767 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6768
6769
6770
6771
6772 Arch.Ginsnop(s.pp)
6773 }
6774 if openDeferInfo != nil {
6775
6776
6777
6778
6779
6780
6781
6782
6783 s.pp.NextLive = s.livenessMap.DeferReturn
6784 p := s.pp.Prog(obj.ACALL)
6785 p.To.Type = obj.TYPE_MEM
6786 p.To.Name = obj.NAME_EXTERN
6787 p.To.Sym = ir.Syms.Deferreturn
6788
6789
6790
6791
6792
6793 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6794 n := o.Name
6795 rts, offs := o.RegisterTypesAndOffsets()
6796 for i := range o.Registers {
6797 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6798 }
6799 }
6800
6801 s.pp.Prog(obj.ARET)
6802 }
6803
6804 if inlMarks != nil {
6805 hasCall := false
6806
6807
6808
6809
6810 for p := s.pp.Text; p != nil; p = p.Link {
6811 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6812 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6813
6814
6815
6816
6817
6818 continue
6819 }
6820 if _, ok := inlMarks[p]; ok {
6821
6822
6823 continue
6824 }
6825 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6826 hasCall = true
6827 }
6828 pos := p.Pos.AtColumn1()
6829 marks := inlMarksByPos[pos]
6830 if len(marks) == 0 {
6831 continue
6832 }
6833 for _, m := range marks {
6834
6835
6836
6837 p.Pos = p.Pos.WithIsStmt()
6838 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6839
6840 m.As = obj.ANOP
6841 m.Pos = src.NoXPos
6842 m.From = obj.Addr{}
6843 m.To = obj.Addr{}
6844 }
6845 delete(inlMarksByPos, pos)
6846 }
6847
6848 for _, p := range inlMarkList {
6849 if p.As != obj.ANOP {
6850 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
6851 }
6852 }
6853
6854 if e.stksize == 0 && !hasCall {
6855
6856
6857
6858
6859
6860
6861 for p := s.pp.Text; p != nil; p = p.Link {
6862 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
6863 continue
6864 }
6865 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
6866
6867 nop := Arch.Ginsnop(s.pp)
6868 nop.Pos = e.curfn.Pos().WithIsStmt()
6869
6870
6871
6872
6873
6874 for x := s.pp.Text; x != nil; x = x.Link {
6875 if x.Link == nop {
6876 x.Link = nop.Link
6877 break
6878 }
6879 }
6880
6881 for x := s.pp.Text; x != nil; x = x.Link {
6882 if x.Link == p {
6883 nop.Link = p
6884 x.Link = nop
6885 break
6886 }
6887 }
6888 }
6889 break
6890 }
6891 }
6892 }
6893
6894 if base.Ctxt.Flag_locationlists {
6895 var debugInfo *ssa.FuncDebug
6896 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
6897
6898
6899 debugInfo.EntryID = f.Entry.ID
6900 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
6901 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
6902 } else {
6903 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
6904 }
6905 bstart := s.bstart
6906 idToIdx := make([]int, f.NumBlocks())
6907 for i, b := range f.Blocks {
6908 idToIdx[b.ID] = i
6909 }
6910
6911
6912
6913 debugInfo.GetPC = func(b, v ssa.ID) int64 {
6914 switch v {
6915 case ssa.BlockStart.ID:
6916 if b == f.Entry.ID {
6917 return 0
6918
6919 }
6920 return bstart[b].Pc
6921 case ssa.BlockEnd.ID:
6922 blk := f.Blocks[idToIdx[b]]
6923 nv := len(blk.Values)
6924 return valueToProgAfter[blk.Values[nv-1].ID].Pc
6925 case ssa.FuncEnd.ID:
6926 return e.curfn.LSym.Size
6927 default:
6928 return valueToProgAfter[v].Pc
6929 }
6930 }
6931 }
6932
6933
6934 for _, br := range s.Branches {
6935 br.P.To.SetTarget(s.bstart[br.B.ID])
6936 if br.P.Pos.IsStmt() != src.PosIsStmt {
6937 br.P.Pos = br.P.Pos.WithNotStmt()
6938 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
6939 br.P.Pos = br.P.Pos.WithNotStmt()
6940 }
6941
6942 }
6943
6944
6945 for _, jt := range s.JumpTables {
6946
6947 targets := make([]*obj.Prog, len(jt.Succs))
6948 for i, e := range jt.Succs {
6949 targets[i] = s.bstart[e.Block().ID]
6950 }
6951
6952
6953
6954 fi := s.pp.CurFunc.LSym.Func()
6955 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
6956 }
6957
6958 if e.log {
6959 filename := ""
6960 for p := s.pp.Text; p != nil; p = p.Link {
6961 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6962 filename = p.InnermostFilename()
6963 f.Logf("# %s\n", filename)
6964 }
6965
6966 var s string
6967 if v, ok := progToValue[p]; ok {
6968 s = v.String()
6969 } else if b, ok := progToBlock[p]; ok {
6970 s = b.String()
6971 } else {
6972 s = " "
6973 }
6974 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
6975 }
6976 }
6977 if f.HTMLWriter != nil {
6978 var buf strings.Builder
6979 buf.WriteString("<code>")
6980 buf.WriteString("<dl class=\"ssa-gen\">")
6981 filename := ""
6982
6983 liveness := lv.Format(nil)
6984 if liveness != "" {
6985 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
6986 buf.WriteString(html.EscapeString("# " + liveness))
6987 buf.WriteString("</dd>")
6988 }
6989
6990 for p := s.pp.Text; p != nil; p = p.Link {
6991
6992
6993 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
6994 filename = p.InnermostFilename()
6995 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
6996 buf.WriteString(html.EscapeString("# " + filename))
6997 buf.WriteString("</dd>")
6998 }
6999
7000 buf.WriteString("<dt class=\"ssa-prog-src\">")
7001 if v, ok := progToValue[p]; ok {
7002
7003
7004 if p.As != obj.APCDATA {
7005 if liveness := lv.Format(v); liveness != "" {
7006
7007 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7008 buf.WriteString(html.EscapeString("# " + liveness))
7009 buf.WriteString("</dd>")
7010
7011 buf.WriteString("<dt class=\"ssa-prog-src\">")
7012 }
7013 }
7014
7015 buf.WriteString(v.HTML())
7016 } else if b, ok := progToBlock[p]; ok {
7017 buf.WriteString("<b>" + b.HTML() + "</b>")
7018 }
7019 buf.WriteString("</dt>")
7020 buf.WriteString("<dd class=\"ssa-prog\">")
7021 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7022 buf.WriteString("</dd>")
7023 }
7024 buf.WriteString("</dl>")
7025 buf.WriteString("</code>")
7026 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7027 }
7028 if ssa.GenssaDump[f.Name] {
7029 fi := f.DumpFileForPhase("genssa")
7030 if fi != nil {
7031
7032
7033 inliningDiffers := func(a, b []src.Pos) bool {
7034 if len(a) != len(b) {
7035 return true
7036 }
7037 for i := range a {
7038 if a[i].Filename() != b[i].Filename() {
7039 return true
7040 }
7041 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7042 return true
7043 }
7044 }
7045 return false
7046 }
7047
7048 var allPosOld []src.Pos
7049 var allPos []src.Pos
7050
7051 for p := s.pp.Text; p != nil; p = p.Link {
7052 if p.Pos.IsKnown() {
7053 allPos = allPos[:0]
7054 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7055 if inliningDiffers(allPos, allPosOld) {
7056 for _, pos := range allPos {
7057 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7058 }
7059 allPos, allPosOld = allPosOld, allPos
7060 }
7061 }
7062
7063 var s string
7064 if v, ok := progToValue[p]; ok {
7065 s = v.String()
7066 } else if b, ok := progToBlock[p]; ok {
7067 s = b.String()
7068 } else {
7069 s = " "
7070 }
7071 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7072 }
7073 fi.Close()
7074 }
7075 }
7076
7077 defframe(&s, e, f)
7078
7079 f.HTMLWriter.Close()
7080 f.HTMLWriter = nil
7081 }
7082
7083 func defframe(s *State, e *ssafn, f *ssa.Func) {
7084 pp := s.pp
7085
7086 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7087 frame := s.maxarg + e.stksize
7088 if Arch.PadFrame != nil {
7089 frame = Arch.PadFrame(frame)
7090 }
7091
7092
7093 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7094 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7095 pp.Text.To.Offset = frame
7096
7097 p := pp.Text
7098
7099
7100
7101
7102
7103
7104
7105
7106
7107
7108 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7109
7110
7111 type nameOff struct {
7112 n *ir.Name
7113 off int64
7114 }
7115 partLiveArgsSpilled := make(map[nameOff]bool)
7116 for _, v := range f.Entry.Values {
7117 if v.Op.IsCall() {
7118 break
7119 }
7120 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7121 continue
7122 }
7123 n, off := ssa.AutoVar(v)
7124 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7125 continue
7126 }
7127 partLiveArgsSpilled[nameOff{n, off}] = true
7128 }
7129
7130
7131 for _, a := range f.OwnAux.ABIInfo().InParams() {
7132 n := a.Name
7133 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7134 continue
7135 }
7136 rts, offs := a.RegisterTypesAndOffsets()
7137 for i := range a.Registers {
7138 if !rts[i].HasPointers() {
7139 continue
7140 }
7141 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7142 continue
7143 }
7144 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7145 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7146 }
7147 }
7148 }
7149
7150
7151
7152
7153 var lo, hi int64
7154
7155
7156
7157 var state uint32
7158
7159
7160
7161 for _, n := range e.curfn.Dcl {
7162 if !n.Needzero() {
7163 continue
7164 }
7165 if n.Class != ir.PAUTO {
7166 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7167 }
7168 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7169 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7170 }
7171
7172 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7173
7174 lo = n.FrameOffset()
7175 continue
7176 }
7177
7178
7179 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7180
7181
7182 lo = n.FrameOffset()
7183 hi = lo + n.Type().Size()
7184 }
7185
7186
7187 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7188 }
7189
7190
7191 type IndexJump struct {
7192 Jump obj.As
7193 Index int
7194 }
7195
7196 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7197 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7198 p.Pos = b.Pos
7199 }
7200
7201
7202
7203 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7204 switch next {
7205 case b.Succs[0].Block():
7206 s.oneJump(b, &jumps[0][0])
7207 s.oneJump(b, &jumps[0][1])
7208 case b.Succs[1].Block():
7209 s.oneJump(b, &jumps[1][0])
7210 s.oneJump(b, &jumps[1][1])
7211 default:
7212 var q *obj.Prog
7213 if b.Likely != ssa.BranchUnlikely {
7214 s.oneJump(b, &jumps[1][0])
7215 s.oneJump(b, &jumps[1][1])
7216 q = s.Br(obj.AJMP, b.Succs[1].Block())
7217 } else {
7218 s.oneJump(b, &jumps[0][0])
7219 s.oneJump(b, &jumps[0][1])
7220 q = s.Br(obj.AJMP, b.Succs[0].Block())
7221 }
7222 q.Pos = b.Pos
7223 }
7224 }
7225
7226
7227 func AddAux(a *obj.Addr, v *ssa.Value) {
7228 AddAux2(a, v, v.AuxInt)
7229 }
7230 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7231 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7232 v.Fatalf("bad AddAux addr %v", a)
7233 }
7234
7235 a.Offset += offset
7236
7237
7238 if v.Aux == nil {
7239 return
7240 }
7241
7242 switch n := v.Aux.(type) {
7243 case *ssa.AuxCall:
7244 a.Name = obj.NAME_EXTERN
7245 a.Sym = n.Fn
7246 case *obj.LSym:
7247 a.Name = obj.NAME_EXTERN
7248 a.Sym = n
7249 case *ir.Name:
7250 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7251 a.Name = obj.NAME_PARAM
7252 } else {
7253 a.Name = obj.NAME_AUTO
7254 }
7255 a.Sym = n.Linksym()
7256 a.Offset += n.FrameOffset()
7257 default:
7258 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7259 }
7260 }
7261
7262
7263
7264 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7265 size := idx.Type.Size()
7266 if size == s.config.PtrSize {
7267 return idx
7268 }
7269 if size > s.config.PtrSize {
7270
7271
7272 var lo *ssa.Value
7273 if idx.Type.IsSigned() {
7274 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7275 } else {
7276 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7277 }
7278 if bounded || base.Flag.B != 0 {
7279 return lo
7280 }
7281 bNext := s.f.NewBlock(ssa.BlockPlain)
7282 bPanic := s.f.NewBlock(ssa.BlockExit)
7283 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7284 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7285 if !idx.Type.IsSigned() {
7286 switch kind {
7287 case ssa.BoundsIndex:
7288 kind = ssa.BoundsIndexU
7289 case ssa.BoundsSliceAlen:
7290 kind = ssa.BoundsSliceAlenU
7291 case ssa.BoundsSliceAcap:
7292 kind = ssa.BoundsSliceAcapU
7293 case ssa.BoundsSliceB:
7294 kind = ssa.BoundsSliceBU
7295 case ssa.BoundsSlice3Alen:
7296 kind = ssa.BoundsSlice3AlenU
7297 case ssa.BoundsSlice3Acap:
7298 kind = ssa.BoundsSlice3AcapU
7299 case ssa.BoundsSlice3B:
7300 kind = ssa.BoundsSlice3BU
7301 case ssa.BoundsSlice3C:
7302 kind = ssa.BoundsSlice3CU
7303 }
7304 }
7305 b := s.endBlock()
7306 b.Kind = ssa.BlockIf
7307 b.SetControl(cmp)
7308 b.Likely = ssa.BranchLikely
7309 b.AddEdgeTo(bNext)
7310 b.AddEdgeTo(bPanic)
7311
7312 s.startBlock(bPanic)
7313 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7314 s.endBlock().SetControl(mem)
7315 s.startBlock(bNext)
7316
7317 return lo
7318 }
7319
7320
7321 var op ssa.Op
7322 if idx.Type.IsSigned() {
7323 switch 10*size + s.config.PtrSize {
7324 case 14:
7325 op = ssa.OpSignExt8to32
7326 case 18:
7327 op = ssa.OpSignExt8to64
7328 case 24:
7329 op = ssa.OpSignExt16to32
7330 case 28:
7331 op = ssa.OpSignExt16to64
7332 case 48:
7333 op = ssa.OpSignExt32to64
7334 default:
7335 s.Fatalf("bad signed index extension %s", idx.Type)
7336 }
7337 } else {
7338 switch 10*size + s.config.PtrSize {
7339 case 14:
7340 op = ssa.OpZeroExt8to32
7341 case 18:
7342 op = ssa.OpZeroExt8to64
7343 case 24:
7344 op = ssa.OpZeroExt16to32
7345 case 28:
7346 op = ssa.OpZeroExt16to64
7347 case 48:
7348 op = ssa.OpZeroExt32to64
7349 default:
7350 s.Fatalf("bad unsigned index extension %s", idx.Type)
7351 }
7352 }
7353 return s.newValue1(op, types.Types[types.TINT], idx)
7354 }
7355
7356
7357
7358 func CheckLoweredPhi(v *ssa.Value) {
7359 if v.Op != ssa.OpPhi {
7360 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7361 }
7362 if v.Type.IsMemory() {
7363 return
7364 }
7365 f := v.Block.Func
7366 loc := f.RegAlloc[v.ID]
7367 for _, a := range v.Args {
7368 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7369 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7370 }
7371 }
7372 }
7373
7374
7375
7376
7377
7378 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7379 entry := v.Block.Func.Entry
7380 if entry != v.Block {
7381 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7382 }
7383 for _, w := range entry.Values {
7384 if w == v {
7385 break
7386 }
7387 switch w.Op {
7388 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7389
7390 default:
7391 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7392 }
7393 }
7394 }
7395
7396
7397 func CheckArgReg(v *ssa.Value) {
7398 entry := v.Block.Func.Entry
7399 if entry != v.Block {
7400 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7401 }
7402 }
7403
7404 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7405 n, off := ssa.AutoVar(v)
7406 a.Type = obj.TYPE_MEM
7407 a.Sym = n.Linksym()
7408 a.Reg = int16(Arch.REGSP)
7409 a.Offset = n.FrameOffset() + off
7410 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7411 a.Name = obj.NAME_PARAM
7412 } else {
7413 a.Name = obj.NAME_AUTO
7414 }
7415 }
7416
7417
7418
7419 func (s *State) Call(v *ssa.Value) *obj.Prog {
7420 pPosIsStmt := s.pp.Pos.IsStmt()
7421 s.PrepareCall(v)
7422
7423 p := s.Prog(obj.ACALL)
7424 if pPosIsStmt == src.PosIsStmt {
7425 p.Pos = v.Pos.WithIsStmt()
7426 } else {
7427 p.Pos = v.Pos.WithNotStmt()
7428 }
7429 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7430 p.To.Type = obj.TYPE_MEM
7431 p.To.Name = obj.NAME_EXTERN
7432 p.To.Sym = sym.Fn
7433 } else {
7434
7435 switch Arch.LinkArch.Family {
7436 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7437 p.To.Type = obj.TYPE_REG
7438 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7439 p.To.Type = obj.TYPE_MEM
7440 default:
7441 base.Fatalf("unknown indirect call family")
7442 }
7443 p.To.Reg = v.Args[0].Reg()
7444 }
7445 return p
7446 }
7447
7448
7449
7450 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7451 p := s.Call(v)
7452 p.As = obj.ARET
7453 return p
7454 }
7455
7456
7457
7458
7459 func (s *State) PrepareCall(v *ssa.Value) {
7460 idx := s.livenessMap.Get(v)
7461 if !idx.StackMapValid() {
7462
7463 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7464 base.Fatalf("missing stack map index for %v", v.LongString())
7465 }
7466 }
7467
7468 call, ok := v.Aux.(*ssa.AuxCall)
7469
7470 if ok {
7471
7472
7473 if nowritebarrierrecCheck != nil {
7474 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7475 }
7476 }
7477
7478 if s.maxarg < v.AuxInt {
7479 s.maxarg = v.AuxInt
7480 }
7481 }
7482
7483
7484
7485 func (s *State) UseArgs(n int64) {
7486 if s.maxarg < n {
7487 s.maxarg = n
7488 }
7489 }
7490
7491
7492 func fieldIdx(n *ir.SelectorExpr) int {
7493 t := n.X.Type()
7494 if !t.IsStruct() {
7495 panic("ODOT's LHS is not a struct")
7496 }
7497
7498 for i, f := range t.Fields() {
7499 if f.Sym == n.Sel {
7500 if f.Offset != n.Offset() {
7501 panic("field offset doesn't match")
7502 }
7503 return i
7504 }
7505 }
7506 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7507
7508
7509
7510 }
7511
7512
7513
7514 type ssafn struct {
7515 curfn *ir.Func
7516 strings map[string]*obj.LSym
7517 stksize int64
7518 stkptrsize int64
7519
7520
7521
7522
7523
7524 stkalign int64
7525
7526 log bool
7527 }
7528
7529
7530
7531 func (e *ssafn) StringData(s string) *obj.LSym {
7532 if aux, ok := e.strings[s]; ok {
7533 return aux
7534 }
7535 if e.strings == nil {
7536 e.strings = make(map[string]*obj.LSym)
7537 }
7538 data := staticdata.StringSym(e.curfn.Pos(), s)
7539 e.strings[s] = data
7540 return data
7541 }
7542
7543
7544 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7545 node := parent.N
7546
7547 if node.Class != ir.PAUTO || node.Addrtaken() {
7548
7549 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7550 }
7551
7552 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7553 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7554 n.SetUsed(true)
7555 n.SetEsc(ir.EscNever)
7556 types.CalcSize(t)
7557 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7558 }
7559
7560
7561 func (e *ssafn) Logf(msg string, args ...interface{}) {
7562 if e.log {
7563 fmt.Printf(msg, args...)
7564 }
7565 }
7566
7567 func (e *ssafn) Log() bool {
7568 return e.log
7569 }
7570
7571
7572 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7573 base.Pos = pos
7574 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7575 base.Fatalf("'%s': "+msg, nargs...)
7576 }
7577
7578
7579
7580 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7581 base.WarnfAt(pos, fmt_, args...)
7582 }
7583
7584 func (e *ssafn) Debug_checknil() bool {
7585 return base.Debug.Nil != 0
7586 }
7587
7588 func (e *ssafn) UseWriteBarrier() bool {
7589 return base.Flag.WB
7590 }
7591
7592 func (e *ssafn) Syslook(name string) *obj.LSym {
7593 switch name {
7594 case "goschedguarded":
7595 return ir.Syms.Goschedguarded
7596 case "writeBarrier":
7597 return ir.Syms.WriteBarrier
7598 case "wbZero":
7599 return ir.Syms.WBZero
7600 case "wbMove":
7601 return ir.Syms.WBMove
7602 case "cgoCheckMemmove":
7603 return ir.Syms.CgoCheckMemmove
7604 case "cgoCheckPtrWrite":
7605 return ir.Syms.CgoCheckPtrWrite
7606 }
7607 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7608 return nil
7609 }
7610
7611 func (e *ssafn) Func() *ir.Func {
7612 return e.curfn
7613 }
7614
7615 func clobberBase(n ir.Node) ir.Node {
7616 if n.Op() == ir.ODOT {
7617 n := n.(*ir.SelectorExpr)
7618 if n.X.Type().NumFields() == 1 {
7619 return clobberBase(n.X)
7620 }
7621 }
7622 if n.Op() == ir.OINDEX {
7623 n := n.(*ir.IndexExpr)
7624 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7625 return clobberBase(n.X)
7626 }
7627 }
7628 return n
7629 }
7630
7631
7632 func callTargetLSym(callee *ir.Name) *obj.LSym {
7633 if callee.Func == nil {
7634
7635
7636
7637 return callee.Linksym()
7638 }
7639
7640 return callee.LinksymABI(callee.Func.ABI)
7641 }
7642
7643
7644 const deferStructFnField = 4
7645
7646 var deferType *types.Type
7647
7648
7649
7650 func deferstruct() *types.Type {
7651 if deferType != nil {
7652 return deferType
7653 }
7654
7655 makefield := func(name string, t *types.Type) *types.Field {
7656 sym := (*types.Pkg)(nil).Lookup(name)
7657 return types.NewField(src.NoXPos, sym, t)
7658 }
7659
7660 fields := []*types.Field{
7661 makefield("heap", types.Types[types.TBOOL]),
7662 makefield("rangefunc", types.Types[types.TBOOL]),
7663 makefield("sp", types.Types[types.TUINTPTR]),
7664 makefield("pc", types.Types[types.TUINTPTR]),
7665
7666
7667
7668 makefield("fn", types.Types[types.TUINTPTR]),
7669 makefield("link", types.Types[types.TUINTPTR]),
7670 makefield("head", types.Types[types.TUINTPTR]),
7671 }
7672 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7673 base.Fatalf("deferStructFnField is %q, not fn", name)
7674 }
7675
7676 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7677 typ := types.NewNamed(n)
7678 n.SetType(typ)
7679 n.SetTypecheck(1)
7680
7681
7682 typ.SetUnderlying(types.NewStruct(fields))
7683 types.CalcStructSize(typ)
7684
7685 deferType = typ
7686 return typ
7687 }
7688
7689
7690
7691
7692
7693 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7694 return obj.Addr{
7695 Name: obj.NAME_NONE,
7696 Type: obj.TYPE_MEM,
7697 Reg: baseReg,
7698 Offset: spill.Offset + extraOffset,
7699 }
7700 }
7701
7702 var BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7703
View as plain text