1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmd/compile/internal/abi"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "os"
16 "path/filepath"
17 "sort"
18 "strings"
19
20 "cmd/compile/internal/base"
21 "cmd/compile/internal/ir"
22 "cmd/compile/internal/liveness"
23 "cmd/compile/internal/objw"
24 "cmd/compile/internal/reflectdata"
25 "cmd/compile/internal/ssa"
26 "cmd/compile/internal/staticdata"
27 "cmd/compile/internal/typecheck"
28 "cmd/compile/internal/types"
29 "cmd/internal/obj"
30 "cmd/internal/obj/x86"
31 "cmd/internal/objabi"
32 "cmd/internal/src"
33 "cmd/internal/sys"
34 )
35
36 var ssaConfig *ssa.Config
37 var ssaCaches []ssa.Cache
38
39 var ssaDump string
40 var ssaDir string
41 var ssaDumpStdout bool
42 var ssaDumpCFG string
43 const ssaDumpFile = "ssa.html"
44
45
46 var ssaDumpInlined []*ir.Func
47
48 func DumpInline(fn *ir.Func) {
49 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
50 ssaDumpInlined = append(ssaDumpInlined, fn)
51 }
52 }
53
54 func InitEnv() {
55 ssaDump = os.Getenv("GOSSAFUNC")
56 ssaDir = os.Getenv("GOSSADIR")
57 if ssaDump != "" {
58 if strings.HasSuffix(ssaDump, "+") {
59 ssaDump = ssaDump[:len(ssaDump)-1]
60 ssaDumpStdout = true
61 }
62 spl := strings.Split(ssaDump, ":")
63 if len(spl) > 1 {
64 ssaDump = spl[0]
65 ssaDumpCFG = spl[1]
66 }
67 }
68 }
69
70 func InitConfig() {
71 types_ := ssa.NewTypes()
72
73 if Arch.SoftFloat {
74 softfloatInit()
75 }
76
77
78
79 _ = types.NewPtr(types.Types[types.TINTER])
80 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
81 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
82 _ = types.NewPtr(types.NewPtr(types.ByteType))
83 _ = types.NewPtr(types.NewSlice(types.ByteType))
84 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
85 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
86 _ = types.NewPtr(types.Types[types.TINT16])
87 _ = types.NewPtr(types.Types[types.TINT64])
88 _ = types.NewPtr(types.ErrorType)
89 types.NewPtrCacheEnabled = false
90 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
91 ssaConfig.Race = base.Flag.Race
92 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
93
94
95 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
96 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
97 ir.Syms.AssertI2I = typecheck.LookupRuntimeFunc("assertI2I")
98 ir.Syms.AssertI2I2 = typecheck.LookupRuntimeFunc("assertI2I2")
99 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
100 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
101 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
102 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
103 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
104 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
105 ir.Syms.GCWriteBarrier = typecheck.LookupRuntimeFunc("gcWriteBarrier")
106 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
107 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
108 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
109 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
110 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
111 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
112 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
113 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
114 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
115 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
116 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
117 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
118 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
119 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
120 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
121 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
122 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
123 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
124 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
125 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
126 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
127 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
128 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
129 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
130 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
131 ir.Syms.Typedmemclr = typecheck.LookupRuntimeFunc("typedmemclr")
132 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
133 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
134 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
135 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
136
137
138 if base.Ctxt.Arch.Name == "amd64" {
139 GCWriteBarrierReg = map[int16]*obj.LSym{
140 x86.REG_AX: typecheck.LookupRuntimeFunc("gcWriteBarrier"),
141 x86.REG_CX: typecheck.LookupRuntimeFunc("gcWriteBarrierCX"),
142 x86.REG_DX: typecheck.LookupRuntimeFunc("gcWriteBarrierDX"),
143 x86.REG_BX: typecheck.LookupRuntimeFunc("gcWriteBarrierBX"),
144 x86.REG_BP: typecheck.LookupRuntimeFunc("gcWriteBarrierBP"),
145 x86.REG_SI: typecheck.LookupRuntimeFunc("gcWriteBarrierSI"),
146 x86.REG_R8: typecheck.LookupRuntimeFunc("gcWriteBarrierR8"),
147 x86.REG_R9: typecheck.LookupRuntimeFunc("gcWriteBarrierR9"),
148 }
149 }
150
151 if Arch.LinkArch.Family == sys.Wasm {
152 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
153 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
154 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
155 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
156 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
157 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
158 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
159 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
160 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
161 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
162 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
163 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
164 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
165 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
166 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
167 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
168 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
169 } else {
170 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
171 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
172 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
173 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
174 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
175 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
176 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
177 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
178 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
179 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
180 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
181 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
182 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
183 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
184 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
185 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
186 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
187 }
188 if Arch.LinkArch.PtrSize == 4 {
189 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
190 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
191 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
192 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
193 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
194 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
195 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
196 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
197 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
198 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
199 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
200 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
201 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
202 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
203 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
204 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
205 }
206
207
208 ir.Syms.WasmMove = typecheck.LookupRuntimeVar("wasmMove")
209 ir.Syms.WasmZero = typecheck.LookupRuntimeVar("wasmZero")
210 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
211 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
212 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
213 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
214 }
215
216
217
218
219
220
221
222
223 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
224 return ssaConfig.ABI0.Copy()
225 }
226
227
228
229 const magicNameDotSuffix = ".*disabled*MagicMethodNameForTestingRegisterABI"
230 const magicLastTypeName = "*disabled*MagicLastTypeNameForTestingRegisterABI"
231
232
233
234 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
235 if buildcfg.Experiment.RegabiArgs {
236
237 if fn == nil {
238 return abi1
239 }
240 switch fn.ABI {
241 case obj.ABI0:
242 return abi0
243 case obj.ABIInternal:
244
245
246 return abi1
247 }
248 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
249 panic("not reachable")
250 }
251
252 a := abi0
253 if fn != nil {
254 name := ir.FuncName(fn)
255 magicName := strings.HasSuffix(name, magicNameDotSuffix)
256 if fn.Pragma&ir.RegisterParams != 0 {
257 if strings.Contains(name, ".") {
258 if !magicName {
259 base.ErrorfAt(fn.Pos(), "Calls to //go:registerparams method %s won't work, remove the pragma from the declaration.", name)
260 }
261 }
262 a = abi1
263 } else if magicName {
264 if base.FmtPos(fn.Pos()) == "<autogenerated>:1" {
265
266 a = abi1
267 } else {
268 base.ErrorfAt(fn.Pos(), "Methods with magic name %s (method %s) must also specify //go:registerparams", magicNameDotSuffix[1:], name)
269 }
270 }
271 if regAbiForFuncType(fn.Type().FuncType()) {
272
273 a = abi1
274 }
275 }
276 return a
277 }
278
279 func regAbiForFuncType(ft *types.Func) bool {
280 np := ft.Params.NumFields()
281 return np > 0 && strings.Contains(ft.Params.FieldType(np-1).String(), magicLastTypeName)
282 }
283
284
285 func dvarint(x *obj.LSym, off int, v int64) int {
286 if v < 0 || v > 1e9 {
287 panic(fmt.Sprintf("dvarint: bad offset for funcdata - %v", v))
288 }
289 if v < 1<<7 {
290 return objw.Uint8(x, off, uint8(v))
291 }
292 off = objw.Uint8(x, off, uint8((v&127)|128))
293 if v < 1<<14 {
294 return objw.Uint8(x, off, uint8(v>>7))
295 }
296 off = objw.Uint8(x, off, uint8(((v>>7)&127)|128))
297 if v < 1<<21 {
298 return objw.Uint8(x, off, uint8(v>>14))
299 }
300 off = objw.Uint8(x, off, uint8(((v>>14)&127)|128))
301 if v < 1<<28 {
302 return objw.Uint8(x, off, uint8(v>>21))
303 }
304 off = objw.Uint8(x, off, uint8(((v>>21)&127)|128))
305 return objw.Uint8(x, off, uint8(v>>28))
306 }
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321 func (s *state) emitOpenDeferInfo() {
322 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
323 x.Set(obj.AttrContentAddressable, true)
324 s.curfn.LSym.Func().OpenCodedDeferInfo = x
325 off := 0
326 off = dvarint(x, off, -s.deferBitsTemp.FrameOffset())
327 off = dvarint(x, off, int64(len(s.openDefers)))
328
329
330 for i := len(s.openDefers) - 1; i >= 0; i-- {
331 r := s.openDefers[i]
332 off = dvarint(x, off, -r.closureNode.FrameOffset())
333 }
334 }
335
336 func okOffset(offset int64) int64 {
337 if offset == types.BOGUS_FUNARG_OFFSET {
338 panic(fmt.Errorf("Bogus offset %d", offset))
339 }
340 return offset
341 }
342
343
344
345 func buildssa(fn *ir.Func, worker int) *ssa.Func {
346 name := ir.FuncName(fn)
347 printssa := false
348 if ssaDump != "" {
349 pkgDotName := base.Ctxt.Pkgpath + "." + name
350 printssa = name == ssaDump ||
351 strings.HasSuffix(pkgDotName, ssaDump) && (pkgDotName == ssaDump || strings.HasSuffix(pkgDotName, "/"+ssaDump))
352 }
353 var astBuf *bytes.Buffer
354 if printssa {
355 astBuf = &bytes.Buffer{}
356 ir.FDumpList(astBuf, "buildssa-enter", fn.Enter)
357 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
358 ir.FDumpList(astBuf, "buildssa-exit", fn.Exit)
359 if ssaDumpStdout {
360 fmt.Println("generating SSA for", name)
361 fmt.Print(astBuf.String())
362 }
363 }
364
365 var s state
366 s.pushLine(fn.Pos())
367 defer s.popLine()
368
369 s.hasdefer = fn.HasDefer()
370 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
371 s.cgoUnsafeArgs = true
372 }
373 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
374
375 fe := ssafn{
376 curfn: fn,
377 log: printssa && ssaDumpStdout,
378 }
379 s.curfn = fn
380
381 s.f = ssa.NewFunc(&fe)
382 s.config = ssaConfig
383 s.f.Type = fn.Type()
384 s.f.Config = ssaConfig
385 s.f.Cache = &ssaCaches[worker]
386 s.f.Cache.Reset()
387 s.f.Name = name
388 s.f.DebugTest = s.f.DebugHashMatch("GOSSAHASH")
389 s.f.PrintOrHtmlSSA = printssa
390 if fn.Pragma&ir.Nosplit != 0 {
391 s.f.NoSplit = true
392 }
393 s.f.ABI0 = ssaConfig.ABI0.Copy()
394 s.f.ABI1 = ssaConfig.ABI1.Copy()
395 s.f.ABIDefault = abiForFunc(nil, s.f.ABI0, s.f.ABI1)
396 s.f.ABISelf = abiForFunc(fn, s.f.ABI0, s.f.ABI1)
397
398 s.panics = map[funcLine]*ssa.Block{}
399 s.softFloat = s.config.SoftFloat
400
401
402 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
403 s.f.Entry.Pos = fn.Pos()
404
405 if printssa {
406 ssaDF := ssaDumpFile
407 if ssaDir != "" {
408 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+name+".html")
409 ssaD := filepath.Dir(ssaDF)
410 os.MkdirAll(ssaD, 0755)
411 }
412 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
413
414 dumpSourcesColumn(s.f.HTMLWriter, fn)
415 s.f.HTMLWriter.WriteAST("AST", astBuf)
416 }
417
418
419 s.labels = map[string]*ssaLabel{}
420 s.fwdVars = map[ir.Node]*ssa.Value{}
421 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
422
423 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
424 switch {
425 case base.Debug.NoOpenDefer != 0:
426 s.hasOpenDefers = false
427 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
428
429
430
431 s.hasOpenDefers = false
432 }
433 if s.hasOpenDefers && len(s.curfn.Exit) > 0 {
434
435
436
437 s.hasOpenDefers = false
438 }
439 if s.hasOpenDefers {
440
441
442 for _, f := range s.curfn.Type().Results().FieldSlice() {
443 if !f.Nname.(*ir.Name).OnStack() {
444 s.hasOpenDefers = false
445 break
446 }
447 }
448 }
449 if s.hasOpenDefers &&
450 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
451
452
453
454
455
456 s.hasOpenDefers = false
457 }
458
459 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
460 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
461
462 s.startBlock(s.f.Entry)
463 s.vars[memVar] = s.startmem
464 if s.hasOpenDefers {
465
466
467
468 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
469 deferBitsTemp.SetAddrtaken(true)
470 s.deferBitsTemp = deferBitsTemp
471
472 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
473 s.vars[deferBitsVar] = startDeferBits
474 s.deferBitsAddr = s.addr(deferBitsTemp)
475 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
476
477
478
479
480
481 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
482 }
483
484 var params *abi.ABIParamResultInfo
485 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
486
487
488
489
490
491
492 var debugInfo ssa.FuncDebug
493 for _, n := range fn.Dcl {
494 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
495 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
496 }
497 }
498 fn.DebugInfo = &debugInfo
499
500
501 s.decladdrs = map[*ir.Name]*ssa.Value{}
502 for _, n := range fn.Dcl {
503 switch n.Class {
504 case ir.PPARAM:
505
506 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
507 case ir.PPARAMOUT:
508 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
509 case ir.PAUTO:
510
511
512 default:
513 s.Fatalf("local variable with class %v unimplemented", n.Class)
514 }
515 }
516
517 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
518
519
520 for _, n := range fn.Dcl {
521 if n.Class == ir.PPARAM {
522 if s.canSSA(n) {
523 v := s.newValue0A(ssa.OpArg, n.Type(), n)
524 s.vars[n] = v
525 s.addNamedValue(n, v)
526 } else {
527 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
528 if len(paramAssignment.Registers) > 0 {
529 if TypeOK(n.Type()) {
530 v := s.newValue0A(ssa.OpArg, n.Type(), n)
531 s.store(n.Type(), s.decladdrs[n], v)
532 } else {
533
534
535 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
536 }
537 }
538 }
539 }
540 }
541
542
543 if fn.Needctxt() {
544 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
545 offset := int64(types.PtrSize)
546 for _, n := range fn.ClosureVars {
547 typ := n.Type()
548 if !n.Byval() {
549 typ = types.NewPtr(typ)
550 }
551
552 offset = types.Rnd(offset, typ.Alignment())
553 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
554 offset += typ.Size()
555
556
557
558
559
560
561
562
563
564 if n.Byval() && !n.Addrtaken() && TypeOK(n.Type()) {
565 n.Class = ir.PAUTO
566 fn.Dcl = append(fn.Dcl, n)
567 s.assign(n, s.load(n.Type(), ptr), false, 0)
568 continue
569 }
570
571 if !n.Byval() {
572 ptr = s.load(typ, ptr)
573 }
574 s.setHeapaddr(fn.Pos(), n, ptr)
575 }
576 }
577
578
579 s.stmtList(fn.Enter)
580 s.zeroResults()
581 s.paramsToHeap()
582 s.stmtList(fn.Body)
583
584
585 if s.curBlock != nil {
586 s.pushLine(fn.Endlineno)
587 s.exit()
588 s.popLine()
589 }
590
591 for _, b := range s.f.Blocks {
592 if b.Pos != src.NoXPos {
593 s.updateUnsetPredPos(b)
594 }
595 }
596
597 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
598
599 s.insertPhis()
600
601
602 ssa.Compile(s.f)
603
604 if s.hasOpenDefers {
605 s.emitOpenDeferInfo()
606 }
607
608
609
610
611
612
613 for _, p := range params.InParams() {
614 typs, offs := p.RegisterTypesAndOffsets()
615 for i, t := range typs {
616 o := offs[i]
617 fo := p.FrameOffset(params)
618 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
619 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
620 }
621 }
622
623 return s.f
624 }
625
626 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
627 typs, offs := paramAssignment.RegisterTypesAndOffsets()
628 for i, t := range typs {
629 if pointersOnly && !t.IsPtrShaped() {
630 continue
631 }
632 r := paramAssignment.Registers[i]
633 o := offs[i]
634 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
635 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
636 v := s.newValue0I(op, t, reg)
637 v.Aux = aux
638 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
639 s.store(t, p, v)
640 }
641 }
642
643
644
645
646
647
648
649 func (s *state) zeroResults() {
650 for _, f := range s.curfn.Type().Results().FieldSlice() {
651 n := f.Nname.(*ir.Name)
652 if !n.OnStack() {
653
654
655
656 continue
657 }
658
659 if typ := n.Type(); TypeOK(typ) {
660 s.assign(n, s.zeroVal(typ), false, 0)
661 } else {
662 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
663 s.zero(n.Type(), s.decladdrs[n])
664 }
665 }
666 }
667
668
669
670 func (s *state) paramsToHeap() {
671 do := func(params *types.Type) {
672 for _, f := range params.FieldSlice() {
673 if f.Nname == nil {
674 continue
675 }
676 n := f.Nname.(*ir.Name)
677 if ir.IsBlank(n) || n.OnStack() {
678 continue
679 }
680 s.newHeapaddr(n)
681 if n.Class == ir.PPARAM {
682 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
683 }
684 }
685 }
686
687 typ := s.curfn.Type()
688 do(typ.Recvs())
689 do(typ.Params())
690 do(typ.Results())
691 }
692
693
694 func (s *state) newHeapaddr(n *ir.Name) {
695 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type()))
696 }
697
698
699
700 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
701 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
702 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
703 }
704
705
706 addr := ir.NewNameAt(pos, &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg})
707 addr.SetType(types.NewPtr(n.Type()))
708 addr.Class = ir.PAUTO
709 addr.SetUsed(true)
710 addr.Curfn = s.curfn
711 s.curfn.Dcl = append(s.curfn.Dcl, addr)
712 types.CalcSize(addr.Type())
713
714 if n.Class == ir.PPARAMOUT {
715 addr.SetIsOutputParamHeapAddr(true)
716 }
717
718 n.Heapaddr = addr
719 s.assign(addr, ptr, false, 0)
720 }
721
722
723 func (s *state) newObject(typ *types.Type) *ssa.Value {
724 if typ.Size() == 0 {
725 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
726 }
727 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, s.reflectType(typ))[0]
728 }
729
730 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
731 if !n.Type().IsPtr() {
732 s.Fatalf("expected pointer type: %v", n.Type())
733 }
734 elem := n.Type().Elem()
735 if count != nil {
736 if !elem.IsArray() {
737 s.Fatalf("expected array type: %v", elem)
738 }
739 elem = elem.Elem()
740 }
741 size := elem.Size()
742
743 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
744 return
745 }
746 if count == nil {
747 count = s.constInt(types.Types[types.TUINTPTR], 1)
748 }
749 if count.Type.Size() != s.config.PtrSize {
750 s.Fatalf("expected count fit to an uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
751 }
752 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, s.reflectType(elem), count)
753 }
754
755
756
757 func (s *state) reflectType(typ *types.Type) *ssa.Value {
758 lsym := reflectdata.TypeLinksym(typ)
759 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
760 }
761
762 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
763
764 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
765 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
766 if err != nil {
767 writer.Logf("cannot read sources for function %v: %v", fn, err)
768 }
769
770
771 var inlFns []*ssa.FuncLines
772 for _, fi := range ssaDumpInlined {
773 elno := fi.Endlineno
774 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
775 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
776 if err != nil {
777 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
778 continue
779 }
780 inlFns = append(inlFns, fnLines)
781 }
782
783 sort.Sort(ssa.ByTopo(inlFns))
784 if targetFn != nil {
785 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
786 }
787
788 writer.WriteSources("sources", inlFns)
789 }
790
791 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
792 f, err := os.Open(os.ExpandEnv(file))
793 if err != nil {
794 return nil, err
795 }
796 defer f.Close()
797 var lines []string
798 ln := uint(1)
799 scanner := bufio.NewScanner(f)
800 for scanner.Scan() && ln <= end {
801 if ln >= start {
802 lines = append(lines, scanner.Text())
803 }
804 ln++
805 }
806 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
807 }
808
809
810
811
812 func (s *state) updateUnsetPredPos(b *ssa.Block) {
813 if b.Pos == src.NoXPos {
814 s.Fatalf("Block %s should have a position", b)
815 }
816 bestPos := src.NoXPos
817 for _, e := range b.Preds {
818 p := e.Block()
819 if !p.LackingPos() {
820 continue
821 }
822 if bestPos == src.NoXPos {
823 bestPos = b.Pos
824 for _, v := range b.Values {
825 if v.LackingPos() {
826 continue
827 }
828 if v.Pos != src.NoXPos {
829
830
831 bestPos = v.Pos
832 break
833 }
834 }
835 }
836 p.Pos = bestPos
837 s.updateUnsetPredPos(p)
838 }
839 }
840
841
842 type openDeferInfo struct {
843
844 n *ir.CallExpr
845
846
847 closure *ssa.Value
848
849
850
851 closureNode *ir.Name
852 }
853
854 type state struct {
855
856 config *ssa.Config
857
858
859 f *ssa.Func
860
861
862 curfn *ir.Func
863
864
865 labels map[string]*ssaLabel
866
867
868 breakTo *ssa.Block
869 continueTo *ssa.Block
870
871
872 curBlock *ssa.Block
873
874
875
876
877 vars map[ir.Node]*ssa.Value
878
879
880
881
882 fwdVars map[ir.Node]*ssa.Value
883
884
885 defvars []map[ir.Node]*ssa.Value
886
887
888 decladdrs map[*ir.Name]*ssa.Value
889
890
891 startmem *ssa.Value
892 sp *ssa.Value
893 sb *ssa.Value
894
895 deferBitsAddr *ssa.Value
896 deferBitsTemp *ir.Name
897
898
899 line []src.XPos
900
901 lastPos src.XPos
902
903
904
905 panics map[funcLine]*ssa.Block
906
907 cgoUnsafeArgs bool
908 hasdefer bool
909 softFloat bool
910 hasOpenDefers bool
911 checkPtrEnabled bool
912
913
914
915
916 openDefers []*openDeferInfo
917
918
919
920
921 lastDeferExit *ssa.Block
922 lastDeferFinalBlock *ssa.Block
923 lastDeferCount int
924
925 prevCall *ssa.Value
926 }
927
928 type funcLine struct {
929 f *obj.LSym
930 base *src.PosBase
931 line uint
932 }
933
934 type ssaLabel struct {
935 target *ssa.Block
936 breakTarget *ssa.Block
937 continueTarget *ssa.Block
938 }
939
940
941 func (s *state) label(sym *types.Sym) *ssaLabel {
942 lab := s.labels[sym.Name]
943 if lab == nil {
944 lab = new(ssaLabel)
945 s.labels[sym.Name] = lab
946 }
947 return lab
948 }
949
950 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
951 func (s *state) Log() bool { return s.f.Log() }
952 func (s *state) Fatalf(msg string, args ...interface{}) {
953 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
954 }
955 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
956 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
957
958 func ssaMarker(name string) *ir.Name {
959 return typecheck.NewName(&types.Sym{Name: name})
960 }
961
962 var (
963
964 memVar = ssaMarker("mem")
965
966
967 ptrVar = ssaMarker("ptr")
968 lenVar = ssaMarker("len")
969 newlenVar = ssaMarker("newlen")
970 capVar = ssaMarker("cap")
971 typVar = ssaMarker("typ")
972 okVar = ssaMarker("ok")
973 deferBitsVar = ssaMarker("deferBits")
974 )
975
976
977 func (s *state) startBlock(b *ssa.Block) {
978 if s.curBlock != nil {
979 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
980 }
981 s.curBlock = b
982 s.vars = map[ir.Node]*ssa.Value{}
983 for n := range s.fwdVars {
984 delete(s.fwdVars, n)
985 }
986 }
987
988
989
990
991 func (s *state) endBlock() *ssa.Block {
992 b := s.curBlock
993 if b == nil {
994 return nil
995 }
996 for len(s.defvars) <= int(b.ID) {
997 s.defvars = append(s.defvars, nil)
998 }
999 s.defvars[b.ID] = s.vars
1000 s.curBlock = nil
1001 s.vars = nil
1002 if b.LackingPos() {
1003
1004
1005
1006 b.Pos = src.NoXPos
1007 } else {
1008 b.Pos = s.lastPos
1009 }
1010 return b
1011 }
1012
1013
1014 func (s *state) pushLine(line src.XPos) {
1015 if !line.IsKnown() {
1016
1017
1018 line = s.peekPos()
1019 if base.Flag.K != 0 {
1020 base.Warn("buildssa: unknown position (line 0)")
1021 }
1022 } else {
1023 s.lastPos = line
1024 }
1025
1026 s.line = append(s.line, line)
1027 }
1028
1029
1030 func (s *state) popLine() {
1031 s.line = s.line[:len(s.line)-1]
1032 }
1033
1034
1035 func (s *state) peekPos() src.XPos {
1036 return s.line[len(s.line)-1]
1037 }
1038
1039
1040 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1041 return s.curBlock.NewValue0(s.peekPos(), op, t)
1042 }
1043
1044
1045 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1046 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1047 }
1048
1049
1050 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1051 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1052 }
1053
1054
1055 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1056 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1057 }
1058
1059
1060 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1061 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1062 }
1063
1064
1065
1066
1067 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1068 if isStmt {
1069 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1070 }
1071 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1072 }
1073
1074
1075 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1076 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1077 }
1078
1079
1080 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1081 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1082 }
1083
1084
1085 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1086 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1087 }
1088
1089
1090
1091
1092 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1093 if isStmt {
1094 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1095 }
1096 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1097 }
1098
1099
1100 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1101 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1102 }
1103
1104
1105 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1106 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1107 }
1108
1109
1110 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1111 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1112 }
1113
1114
1115 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1116 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1117 }
1118
1119
1120
1121
1122 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1123 if isStmt {
1124 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1125 }
1126 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1127 }
1128
1129
1130 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1131 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1132 }
1133
1134
1135 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1136 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1137 }
1138
1139 func (s *state) entryBlock() *ssa.Block {
1140 b := s.f.Entry
1141 if base.Flag.N > 0 && s.curBlock != nil {
1142
1143
1144
1145
1146 b = s.curBlock
1147 }
1148 return b
1149 }
1150
1151
1152 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1153 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1154 }
1155
1156
1157 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1158 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1159 }
1160
1161
1162 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1163 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1164 }
1165
1166
1167 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1168 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1169 }
1170
1171
1172 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1173 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1174 }
1175
1176
1177 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1178 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1179 }
1180
1181
1182 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1183 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1184 }
1185
1186
1187 func (s *state) constSlice(t *types.Type) *ssa.Value {
1188 return s.f.ConstSlice(t)
1189 }
1190 func (s *state) constInterface(t *types.Type) *ssa.Value {
1191 return s.f.ConstInterface(t)
1192 }
1193 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1194 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1195 return s.f.ConstEmptyString(t)
1196 }
1197 func (s *state) constBool(c bool) *ssa.Value {
1198 return s.f.ConstBool(types.Types[types.TBOOL], c)
1199 }
1200 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1201 return s.f.ConstInt8(t, c)
1202 }
1203 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1204 return s.f.ConstInt16(t, c)
1205 }
1206 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1207 return s.f.ConstInt32(t, c)
1208 }
1209 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1210 return s.f.ConstInt64(t, c)
1211 }
1212 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1213 return s.f.ConstFloat32(t, c)
1214 }
1215 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1216 return s.f.ConstFloat64(t, c)
1217 }
1218 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1219 if s.config.PtrSize == 8 {
1220 return s.constInt64(t, c)
1221 }
1222 if int64(int32(c)) != c {
1223 s.Fatalf("integer constant too big %d", c)
1224 }
1225 return s.constInt32(t, int32(c))
1226 }
1227 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1228 return s.f.ConstOffPtrSP(t, c, s.sp)
1229 }
1230
1231
1232
1233 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1234 if s.softFloat {
1235 if c, ok := s.sfcall(op, arg); ok {
1236 return c
1237 }
1238 }
1239 return s.newValue1(op, t, arg)
1240 }
1241 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1242 if s.softFloat {
1243 if c, ok := s.sfcall(op, arg0, arg1); ok {
1244 return c
1245 }
1246 }
1247 return s.newValue2(op, t, arg0, arg1)
1248 }
1249
1250 type instrumentKind uint8
1251
1252 const (
1253 instrumentRead = iota
1254 instrumentWrite
1255 instrumentMove
1256 )
1257
1258 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1259 s.instrument2(t, addr, nil, kind)
1260 }
1261
1262
1263
1264
1265 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1266 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1267 s.instrument(t, addr, kind)
1268 return
1269 }
1270 for _, f := range t.Fields().Slice() {
1271 if f.Sym.IsBlank() {
1272 continue
1273 }
1274 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1275 s.instrumentFields(f.Type, offptr, kind)
1276 }
1277 }
1278
1279 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1280 if base.Flag.MSan {
1281 s.instrument2(t, dst, src, instrumentMove)
1282 } else {
1283 s.instrument(t, src, instrumentRead)
1284 s.instrument(t, dst, instrumentWrite)
1285 }
1286 }
1287
1288 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1289 if !s.curfn.InstrumentBody() {
1290 return
1291 }
1292
1293 w := t.Size()
1294 if w == 0 {
1295 return
1296 }
1297
1298 if ssa.IsSanitizerSafeAddr(addr) {
1299 return
1300 }
1301
1302 var fn *obj.LSym
1303 needWidth := false
1304
1305 if addr2 != nil && kind != instrumentMove {
1306 panic("instrument2: non-nil addr2 for non-move instrumentation")
1307 }
1308
1309 if base.Flag.MSan {
1310 switch kind {
1311 case instrumentRead:
1312 fn = ir.Syms.Msanread
1313 case instrumentWrite:
1314 fn = ir.Syms.Msanwrite
1315 case instrumentMove:
1316 fn = ir.Syms.Msanmove
1317 default:
1318 panic("unreachable")
1319 }
1320 needWidth = true
1321 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1322
1323
1324
1325 switch kind {
1326 case instrumentRead:
1327 fn = ir.Syms.Racereadrange
1328 case instrumentWrite:
1329 fn = ir.Syms.Racewriterange
1330 default:
1331 panic("unreachable")
1332 }
1333 needWidth = true
1334 } else if base.Flag.Race {
1335
1336
1337 switch kind {
1338 case instrumentRead:
1339 fn = ir.Syms.Raceread
1340 case instrumentWrite:
1341 fn = ir.Syms.Racewrite
1342 default:
1343 panic("unreachable")
1344 }
1345 } else if base.Flag.ASan {
1346 switch kind {
1347 case instrumentRead:
1348 fn = ir.Syms.Asanread
1349 case instrumentWrite:
1350 fn = ir.Syms.Asanwrite
1351 default:
1352 panic("unreachable")
1353 }
1354 needWidth = true
1355 } else {
1356 panic("unreachable")
1357 }
1358
1359 args := []*ssa.Value{addr}
1360 if addr2 != nil {
1361 args = append(args, addr2)
1362 }
1363 if needWidth {
1364 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1365 }
1366 s.rtcall(fn, true, nil, args...)
1367 }
1368
1369 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1370 s.instrumentFields(t, src, instrumentRead)
1371 return s.rawLoad(t, src)
1372 }
1373
1374 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1375 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1376 }
1377
1378 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1379 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1380 }
1381
1382 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1383 s.instrument(t, dst, instrumentWrite)
1384 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1385 store.Aux = t
1386 s.vars[memVar] = store
1387 }
1388
1389 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1390 s.instrumentMove(t, dst, src)
1391 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1392 store.Aux = t
1393 s.vars[memVar] = store
1394 }
1395
1396
1397 func (s *state) stmtList(l ir.Nodes) {
1398 for _, n := range l {
1399 s.stmt(n)
1400 }
1401 }
1402
1403
1404 func (s *state) stmt(n ir.Node) {
1405 if !(n.Op() == ir.OVARKILL || n.Op() == ir.OVARLIVE || n.Op() == ir.OVARDEF) {
1406
1407 s.pushLine(n.Pos())
1408 defer s.popLine()
1409 }
1410
1411
1412
1413 if s.curBlock == nil && n.Op() != ir.OLABEL {
1414 return
1415 }
1416
1417 s.stmtList(n.Init())
1418 switch n.Op() {
1419
1420 case ir.OBLOCK:
1421 n := n.(*ir.BlockStmt)
1422 s.stmtList(n.List)
1423
1424
1425 case ir.ODCLCONST, ir.ODCLTYPE, ir.OFALL:
1426
1427
1428 case ir.OCALLFUNC:
1429 n := n.(*ir.CallExpr)
1430 if ir.IsIntrinsicCall(n) {
1431 s.intrinsicCall(n)
1432 return
1433 }
1434 fallthrough
1435
1436 case ir.OCALLINTER:
1437 n := n.(*ir.CallExpr)
1438 s.callResult(n, callNormal)
1439 if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PFUNC {
1440 if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1441 n.X.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
1442 m := s.mem()
1443 b := s.endBlock()
1444 b.Kind = ssa.BlockExit
1445 b.SetControl(m)
1446
1447
1448
1449 }
1450 }
1451 case ir.ODEFER:
1452 n := n.(*ir.GoDeferStmt)
1453 if base.Debug.Defer > 0 {
1454 var defertype string
1455 if s.hasOpenDefers {
1456 defertype = "open-coded"
1457 } else if n.Esc() == ir.EscNever {
1458 defertype = "stack-allocated"
1459 } else {
1460 defertype = "heap-allocated"
1461 }
1462 base.WarnfAt(n.Pos(), "%s defer", defertype)
1463 }
1464 if s.hasOpenDefers {
1465 s.openDeferRecord(n.Call.(*ir.CallExpr))
1466 } else {
1467 d := callDefer
1468 if n.Esc() == ir.EscNever {
1469 d = callDeferStack
1470 }
1471 s.callResult(n.Call.(*ir.CallExpr), d)
1472 }
1473 case ir.OGO:
1474 n := n.(*ir.GoDeferStmt)
1475 s.callResult(n.Call.(*ir.CallExpr), callGo)
1476
1477 case ir.OAS2DOTTYPE:
1478 n := n.(*ir.AssignListStmt)
1479 var res, resok *ssa.Value
1480 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1481 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1482 } else {
1483 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1484 }
1485 deref := false
1486 if !TypeOK(n.Rhs[0].Type()) {
1487 if res.Op != ssa.OpLoad {
1488 s.Fatalf("dottype of non-load")
1489 }
1490 mem := s.mem()
1491 if mem.Op == ssa.OpVarKill {
1492 mem = mem.Args[0]
1493 }
1494 if res.Args[1] != mem {
1495 s.Fatalf("memory no longer live from 2-result dottype load")
1496 }
1497 deref = true
1498 res = res.Args[0]
1499 }
1500 s.assign(n.Lhs[0], res, deref, 0)
1501 s.assign(n.Lhs[1], resok, false, 0)
1502 return
1503
1504 case ir.OAS2FUNC:
1505
1506 n := n.(*ir.AssignListStmt)
1507 call := n.Rhs[0].(*ir.CallExpr)
1508 if !ir.IsIntrinsicCall(call) {
1509 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1510 }
1511 v := s.intrinsicCall(call)
1512 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1513 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1514 s.assign(n.Lhs[0], v1, false, 0)
1515 s.assign(n.Lhs[1], v2, false, 0)
1516 return
1517
1518 case ir.ODCL:
1519 n := n.(*ir.Decl)
1520 if v := n.X; v.Esc() == ir.EscHeap {
1521 s.newHeapaddr(v)
1522 }
1523
1524 case ir.OLABEL:
1525 n := n.(*ir.LabelStmt)
1526 sym := n.Label
1527 lab := s.label(sym)
1528
1529
1530 if lab.target == nil {
1531 lab.target = s.f.NewBlock(ssa.BlockPlain)
1532 }
1533
1534
1535
1536 if s.curBlock != nil {
1537 b := s.endBlock()
1538 b.AddEdgeTo(lab.target)
1539 }
1540 s.startBlock(lab.target)
1541
1542 case ir.OGOTO:
1543 n := n.(*ir.BranchStmt)
1544 sym := n.Label
1545
1546 lab := s.label(sym)
1547 if lab.target == nil {
1548 lab.target = s.f.NewBlock(ssa.BlockPlain)
1549 }
1550
1551 b := s.endBlock()
1552 b.Pos = s.lastPos.WithIsStmt()
1553 b.AddEdgeTo(lab.target)
1554
1555 case ir.OAS:
1556 n := n.(*ir.AssignStmt)
1557 if n.X == n.Y && n.X.Op() == ir.ONAME {
1558
1559
1560
1561
1562
1563
1564
1565 return
1566 }
1567
1568
1569 rhs := n.Y
1570 if rhs != nil {
1571 switch rhs.Op() {
1572 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1573
1574
1575
1576 if !ir.IsZero(rhs) {
1577 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1578 }
1579 rhs = nil
1580 case ir.OAPPEND:
1581 rhs := rhs.(*ir.CallExpr)
1582
1583
1584
1585 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1586 break
1587 }
1588
1589
1590
1591 if s.canSSA(n.X) {
1592 if base.Debug.Append > 0 {
1593 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1594 }
1595 break
1596 }
1597 if base.Debug.Append > 0 {
1598 base.WarnfAt(n.Pos(), "append: len-only update")
1599 }
1600 s.append(rhs, true)
1601 return
1602 }
1603 }
1604
1605 if ir.IsBlank(n.X) {
1606
1607
1608 if rhs != nil {
1609 s.expr(rhs)
1610 }
1611 return
1612 }
1613
1614 var t *types.Type
1615 if n.Y != nil {
1616 t = n.Y.Type()
1617 } else {
1618 t = n.X.Type()
1619 }
1620
1621 var r *ssa.Value
1622 deref := !TypeOK(t)
1623 if deref {
1624 if rhs == nil {
1625 r = nil
1626 } else {
1627 r = s.addr(rhs)
1628 }
1629 } else {
1630 if rhs == nil {
1631 r = s.zeroVal(t)
1632 } else {
1633 r = s.expr(rhs)
1634 }
1635 }
1636
1637 var skip skipMask
1638 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1639
1640
1641 rhs := rhs.(*ir.SliceExpr)
1642 i, j, k := rhs.Low, rhs.High, rhs.Max
1643 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1644
1645 i = nil
1646 }
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657 if i == nil {
1658 skip |= skipPtr
1659 if j == nil {
1660 skip |= skipLen
1661 }
1662 if k == nil {
1663 skip |= skipCap
1664 }
1665 }
1666 }
1667
1668 s.assign(n.X, r, deref, skip)
1669
1670 case ir.OIF:
1671 n := n.(*ir.IfStmt)
1672 if ir.IsConst(n.Cond, constant.Bool) {
1673 s.stmtList(n.Cond.Init())
1674 if ir.BoolVal(n.Cond) {
1675 s.stmtList(n.Body)
1676 } else {
1677 s.stmtList(n.Else)
1678 }
1679 break
1680 }
1681
1682 bEnd := s.f.NewBlock(ssa.BlockPlain)
1683 var likely int8
1684 if n.Likely {
1685 likely = 1
1686 }
1687 var bThen *ssa.Block
1688 if len(n.Body) != 0 {
1689 bThen = s.f.NewBlock(ssa.BlockPlain)
1690 } else {
1691 bThen = bEnd
1692 }
1693 var bElse *ssa.Block
1694 if len(n.Else) != 0 {
1695 bElse = s.f.NewBlock(ssa.BlockPlain)
1696 } else {
1697 bElse = bEnd
1698 }
1699 s.condBranch(n.Cond, bThen, bElse, likely)
1700
1701 if len(n.Body) != 0 {
1702 s.startBlock(bThen)
1703 s.stmtList(n.Body)
1704 if b := s.endBlock(); b != nil {
1705 b.AddEdgeTo(bEnd)
1706 }
1707 }
1708 if len(n.Else) != 0 {
1709 s.startBlock(bElse)
1710 s.stmtList(n.Else)
1711 if b := s.endBlock(); b != nil {
1712 b.AddEdgeTo(bEnd)
1713 }
1714 }
1715 s.startBlock(bEnd)
1716
1717 case ir.ORETURN:
1718 n := n.(*ir.ReturnStmt)
1719 s.stmtList(n.Results)
1720 b := s.exit()
1721 b.Pos = s.lastPos.WithIsStmt()
1722
1723 case ir.OTAILCALL:
1724 n := n.(*ir.TailCallStmt)
1725 s.callResult(n.Call, callTail)
1726 call := s.mem()
1727 b := s.endBlock()
1728 b.Kind = ssa.BlockRetJmp
1729 b.SetControl(call)
1730
1731 case ir.OCONTINUE, ir.OBREAK:
1732 n := n.(*ir.BranchStmt)
1733 var to *ssa.Block
1734 if n.Label == nil {
1735
1736 switch n.Op() {
1737 case ir.OCONTINUE:
1738 to = s.continueTo
1739 case ir.OBREAK:
1740 to = s.breakTo
1741 }
1742 } else {
1743
1744 sym := n.Label
1745 lab := s.label(sym)
1746 switch n.Op() {
1747 case ir.OCONTINUE:
1748 to = lab.continueTarget
1749 case ir.OBREAK:
1750 to = lab.breakTarget
1751 }
1752 }
1753
1754 b := s.endBlock()
1755 b.Pos = s.lastPos.WithIsStmt()
1756 b.AddEdgeTo(to)
1757
1758 case ir.OFOR, ir.OFORUNTIL:
1759
1760
1761
1762
1763
1764 n := n.(*ir.ForStmt)
1765 bCond := s.f.NewBlock(ssa.BlockPlain)
1766 bBody := s.f.NewBlock(ssa.BlockPlain)
1767 bIncr := s.f.NewBlock(ssa.BlockPlain)
1768 bEnd := s.f.NewBlock(ssa.BlockPlain)
1769
1770
1771 bBody.Pos = n.Pos()
1772
1773
1774 b := s.endBlock()
1775 if n.Op() == ir.OFOR {
1776 b.AddEdgeTo(bCond)
1777
1778 s.startBlock(bCond)
1779 if n.Cond != nil {
1780 s.condBranch(n.Cond, bBody, bEnd, 1)
1781 } else {
1782 b := s.endBlock()
1783 b.Kind = ssa.BlockPlain
1784 b.AddEdgeTo(bBody)
1785 }
1786
1787 } else {
1788 b.AddEdgeTo(bBody)
1789 }
1790
1791
1792 prevContinue := s.continueTo
1793 prevBreak := s.breakTo
1794 s.continueTo = bIncr
1795 s.breakTo = bEnd
1796 var lab *ssaLabel
1797 if sym := n.Label; sym != nil {
1798
1799 lab = s.label(sym)
1800 lab.continueTarget = bIncr
1801 lab.breakTarget = bEnd
1802 }
1803
1804
1805 s.startBlock(bBody)
1806 s.stmtList(n.Body)
1807
1808
1809 s.continueTo = prevContinue
1810 s.breakTo = prevBreak
1811 if lab != nil {
1812 lab.continueTarget = nil
1813 lab.breakTarget = nil
1814 }
1815
1816
1817 if b := s.endBlock(); b != nil {
1818 b.AddEdgeTo(bIncr)
1819 }
1820
1821
1822 s.startBlock(bIncr)
1823 if n.Post != nil {
1824 s.stmt(n.Post)
1825 }
1826 if n.Op() == ir.OFOR {
1827 if b := s.endBlock(); b != nil {
1828 b.AddEdgeTo(bCond)
1829
1830
1831 if b.Pos == src.NoXPos {
1832 b.Pos = bCond.Pos
1833 }
1834 }
1835 } else {
1836
1837 bLateIncr := bCond
1838
1839 s.condBranch(n.Cond, bLateIncr, bEnd, 1)
1840
1841 s.startBlock(bLateIncr)
1842 s.stmtList(n.Late)
1843 s.endBlock().AddEdgeTo(bBody)
1844 }
1845
1846 s.startBlock(bEnd)
1847
1848 case ir.OSWITCH, ir.OSELECT:
1849
1850
1851 bEnd := s.f.NewBlock(ssa.BlockPlain)
1852
1853 prevBreak := s.breakTo
1854 s.breakTo = bEnd
1855 var sym *types.Sym
1856 var body ir.Nodes
1857 if n.Op() == ir.OSWITCH {
1858 n := n.(*ir.SwitchStmt)
1859 sym = n.Label
1860 body = n.Compiled
1861 } else {
1862 n := n.(*ir.SelectStmt)
1863 sym = n.Label
1864 body = n.Compiled
1865 }
1866
1867 var lab *ssaLabel
1868 if sym != nil {
1869
1870 lab = s.label(sym)
1871 lab.breakTarget = bEnd
1872 }
1873
1874
1875 s.stmtList(body)
1876
1877 s.breakTo = prevBreak
1878 if lab != nil {
1879 lab.breakTarget = nil
1880 }
1881
1882
1883
1884 if s.curBlock != nil {
1885 m := s.mem()
1886 b := s.endBlock()
1887 b.Kind = ssa.BlockExit
1888 b.SetControl(m)
1889 }
1890 s.startBlock(bEnd)
1891
1892 case ir.OVARDEF:
1893 n := n.(*ir.UnaryExpr)
1894 if !s.canSSA(n.X) {
1895 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, n.X.(*ir.Name), s.mem(), false)
1896 }
1897 case ir.OVARKILL:
1898
1899
1900
1901
1902 n := n.(*ir.UnaryExpr)
1903 if !s.canSSA(n.X) {
1904 s.vars[memVar] = s.newValue1Apos(ssa.OpVarKill, types.TypeMem, n.X.(*ir.Name), s.mem(), false)
1905 }
1906
1907 case ir.OVARLIVE:
1908
1909 n := n.(*ir.UnaryExpr)
1910 v := n.X.(*ir.Name)
1911 if !v.Addrtaken() {
1912 s.Fatalf("VARLIVE variable %v must have Addrtaken set", v)
1913 }
1914 switch v.Class {
1915 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
1916 default:
1917 s.Fatalf("VARLIVE variable %v must be Auto or Arg", v)
1918 }
1919 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
1920
1921 case ir.OCHECKNIL:
1922 n := n.(*ir.UnaryExpr)
1923 p := s.expr(n.X)
1924 s.nilCheck(p)
1925
1926 case ir.OINLMARK:
1927 n := n.(*ir.InlineMarkStmt)
1928 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
1929
1930 default:
1931 s.Fatalf("unhandled stmt %v", n.Op())
1932 }
1933 }
1934
1935
1936
1937 const shareDeferExits = false
1938
1939
1940
1941
1942 func (s *state) exit() *ssa.Block {
1943 if s.hasdefer {
1944 if s.hasOpenDefers {
1945 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
1946 if s.curBlock.Kind != ssa.BlockPlain {
1947 panic("Block for an exit should be BlockPlain")
1948 }
1949 s.curBlock.AddEdgeTo(s.lastDeferExit)
1950 s.endBlock()
1951 return s.lastDeferFinalBlock
1952 }
1953 s.openDeferExit()
1954 } else {
1955 s.rtcall(ir.Syms.Deferreturn, true, nil)
1956 }
1957 }
1958
1959 var b *ssa.Block
1960 var m *ssa.Value
1961
1962
1963 resultFields := s.curfn.Type().Results().FieldSlice()
1964 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
1965 m = s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
1966
1967 for i, f := range resultFields {
1968 n := f.Nname.(*ir.Name)
1969 if s.canSSA(n) {
1970 if !n.IsOutputParamInRegisters() {
1971
1972 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
1973 }
1974 results[i] = s.variable(n, n.Type())
1975 } else if !n.OnStack() {
1976
1977 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
1978 ha := s.expr(n.Heapaddr)
1979 s.instrumentFields(n.Type(), ha, instrumentRead)
1980 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
1981 } else {
1982
1983
1984
1985 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
1986 }
1987 }
1988
1989
1990
1991
1992 s.stmtList(s.curfn.Exit)
1993
1994 results[len(results)-1] = s.mem()
1995 m.AddArgs(results...)
1996
1997 b = s.endBlock()
1998 b.Kind = ssa.BlockRet
1999 b.SetControl(m)
2000 if s.hasdefer && s.hasOpenDefers {
2001 s.lastDeferFinalBlock = b
2002 }
2003 return b
2004 }
2005
2006 type opAndType struct {
2007 op ir.Op
2008 etype types.Kind
2009 }
2010
2011 var opToSSA = map[opAndType]ssa.Op{
2012 opAndType{ir.OADD, types.TINT8}: ssa.OpAdd8,
2013 opAndType{ir.OADD, types.TUINT8}: ssa.OpAdd8,
2014 opAndType{ir.OADD, types.TINT16}: ssa.OpAdd16,
2015 opAndType{ir.OADD, types.TUINT16}: ssa.OpAdd16,
2016 opAndType{ir.OADD, types.TINT32}: ssa.OpAdd32,
2017 opAndType{ir.OADD, types.TUINT32}: ssa.OpAdd32,
2018 opAndType{ir.OADD, types.TINT64}: ssa.OpAdd64,
2019 opAndType{ir.OADD, types.TUINT64}: ssa.OpAdd64,
2020 opAndType{ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2021 opAndType{ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2022
2023 opAndType{ir.OSUB, types.TINT8}: ssa.OpSub8,
2024 opAndType{ir.OSUB, types.TUINT8}: ssa.OpSub8,
2025 opAndType{ir.OSUB, types.TINT16}: ssa.OpSub16,
2026 opAndType{ir.OSUB, types.TUINT16}: ssa.OpSub16,
2027 opAndType{ir.OSUB, types.TINT32}: ssa.OpSub32,
2028 opAndType{ir.OSUB, types.TUINT32}: ssa.OpSub32,
2029 opAndType{ir.OSUB, types.TINT64}: ssa.OpSub64,
2030 opAndType{ir.OSUB, types.TUINT64}: ssa.OpSub64,
2031 opAndType{ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2032 opAndType{ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2033
2034 opAndType{ir.ONOT, types.TBOOL}: ssa.OpNot,
2035
2036 opAndType{ir.ONEG, types.TINT8}: ssa.OpNeg8,
2037 opAndType{ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2038 opAndType{ir.ONEG, types.TINT16}: ssa.OpNeg16,
2039 opAndType{ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2040 opAndType{ir.ONEG, types.TINT32}: ssa.OpNeg32,
2041 opAndType{ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2042 opAndType{ir.ONEG, types.TINT64}: ssa.OpNeg64,
2043 opAndType{ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2044 opAndType{ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2045 opAndType{ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2046
2047 opAndType{ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2048 opAndType{ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2049 opAndType{ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2050 opAndType{ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2051 opAndType{ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2052 opAndType{ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2053 opAndType{ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2054 opAndType{ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2055
2056 opAndType{ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2057 opAndType{ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2058 opAndType{ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2059 opAndType{ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2060
2061 opAndType{ir.OMUL, types.TINT8}: ssa.OpMul8,
2062 opAndType{ir.OMUL, types.TUINT8}: ssa.OpMul8,
2063 opAndType{ir.OMUL, types.TINT16}: ssa.OpMul16,
2064 opAndType{ir.OMUL, types.TUINT16}: ssa.OpMul16,
2065 opAndType{ir.OMUL, types.TINT32}: ssa.OpMul32,
2066 opAndType{ir.OMUL, types.TUINT32}: ssa.OpMul32,
2067 opAndType{ir.OMUL, types.TINT64}: ssa.OpMul64,
2068 opAndType{ir.OMUL, types.TUINT64}: ssa.OpMul64,
2069 opAndType{ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2070 opAndType{ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2071
2072 opAndType{ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2073 opAndType{ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2074
2075 opAndType{ir.ODIV, types.TINT8}: ssa.OpDiv8,
2076 opAndType{ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2077 opAndType{ir.ODIV, types.TINT16}: ssa.OpDiv16,
2078 opAndType{ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2079 opAndType{ir.ODIV, types.TINT32}: ssa.OpDiv32,
2080 opAndType{ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2081 opAndType{ir.ODIV, types.TINT64}: ssa.OpDiv64,
2082 opAndType{ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2083
2084 opAndType{ir.OMOD, types.TINT8}: ssa.OpMod8,
2085 opAndType{ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2086 opAndType{ir.OMOD, types.TINT16}: ssa.OpMod16,
2087 opAndType{ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2088 opAndType{ir.OMOD, types.TINT32}: ssa.OpMod32,
2089 opAndType{ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2090 opAndType{ir.OMOD, types.TINT64}: ssa.OpMod64,
2091 opAndType{ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2092
2093 opAndType{ir.OAND, types.TINT8}: ssa.OpAnd8,
2094 opAndType{ir.OAND, types.TUINT8}: ssa.OpAnd8,
2095 opAndType{ir.OAND, types.TINT16}: ssa.OpAnd16,
2096 opAndType{ir.OAND, types.TUINT16}: ssa.OpAnd16,
2097 opAndType{ir.OAND, types.TINT32}: ssa.OpAnd32,
2098 opAndType{ir.OAND, types.TUINT32}: ssa.OpAnd32,
2099 opAndType{ir.OAND, types.TINT64}: ssa.OpAnd64,
2100 opAndType{ir.OAND, types.TUINT64}: ssa.OpAnd64,
2101
2102 opAndType{ir.OOR, types.TINT8}: ssa.OpOr8,
2103 opAndType{ir.OOR, types.TUINT8}: ssa.OpOr8,
2104 opAndType{ir.OOR, types.TINT16}: ssa.OpOr16,
2105 opAndType{ir.OOR, types.TUINT16}: ssa.OpOr16,
2106 opAndType{ir.OOR, types.TINT32}: ssa.OpOr32,
2107 opAndType{ir.OOR, types.TUINT32}: ssa.OpOr32,
2108 opAndType{ir.OOR, types.TINT64}: ssa.OpOr64,
2109 opAndType{ir.OOR, types.TUINT64}: ssa.OpOr64,
2110
2111 opAndType{ir.OXOR, types.TINT8}: ssa.OpXor8,
2112 opAndType{ir.OXOR, types.TUINT8}: ssa.OpXor8,
2113 opAndType{ir.OXOR, types.TINT16}: ssa.OpXor16,
2114 opAndType{ir.OXOR, types.TUINT16}: ssa.OpXor16,
2115 opAndType{ir.OXOR, types.TINT32}: ssa.OpXor32,
2116 opAndType{ir.OXOR, types.TUINT32}: ssa.OpXor32,
2117 opAndType{ir.OXOR, types.TINT64}: ssa.OpXor64,
2118 opAndType{ir.OXOR, types.TUINT64}: ssa.OpXor64,
2119
2120 opAndType{ir.OEQ, types.TBOOL}: ssa.OpEqB,
2121 opAndType{ir.OEQ, types.TINT8}: ssa.OpEq8,
2122 opAndType{ir.OEQ, types.TUINT8}: ssa.OpEq8,
2123 opAndType{ir.OEQ, types.TINT16}: ssa.OpEq16,
2124 opAndType{ir.OEQ, types.TUINT16}: ssa.OpEq16,
2125 opAndType{ir.OEQ, types.TINT32}: ssa.OpEq32,
2126 opAndType{ir.OEQ, types.TUINT32}: ssa.OpEq32,
2127 opAndType{ir.OEQ, types.TINT64}: ssa.OpEq64,
2128 opAndType{ir.OEQ, types.TUINT64}: ssa.OpEq64,
2129 opAndType{ir.OEQ, types.TINTER}: ssa.OpEqInter,
2130 opAndType{ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2131 opAndType{ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2132 opAndType{ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2133 opAndType{ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2134 opAndType{ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2135 opAndType{ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2136 opAndType{ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2137 opAndType{ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2138 opAndType{ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2139
2140 opAndType{ir.ONE, types.TBOOL}: ssa.OpNeqB,
2141 opAndType{ir.ONE, types.TINT8}: ssa.OpNeq8,
2142 opAndType{ir.ONE, types.TUINT8}: ssa.OpNeq8,
2143 opAndType{ir.ONE, types.TINT16}: ssa.OpNeq16,
2144 opAndType{ir.ONE, types.TUINT16}: ssa.OpNeq16,
2145 opAndType{ir.ONE, types.TINT32}: ssa.OpNeq32,
2146 opAndType{ir.ONE, types.TUINT32}: ssa.OpNeq32,
2147 opAndType{ir.ONE, types.TINT64}: ssa.OpNeq64,
2148 opAndType{ir.ONE, types.TUINT64}: ssa.OpNeq64,
2149 opAndType{ir.ONE, types.TINTER}: ssa.OpNeqInter,
2150 opAndType{ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2151 opAndType{ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2152 opAndType{ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2153 opAndType{ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2154 opAndType{ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2155 opAndType{ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2156 opAndType{ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2157 opAndType{ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2158 opAndType{ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2159
2160 opAndType{ir.OLT, types.TINT8}: ssa.OpLess8,
2161 opAndType{ir.OLT, types.TUINT8}: ssa.OpLess8U,
2162 opAndType{ir.OLT, types.TINT16}: ssa.OpLess16,
2163 opAndType{ir.OLT, types.TUINT16}: ssa.OpLess16U,
2164 opAndType{ir.OLT, types.TINT32}: ssa.OpLess32,
2165 opAndType{ir.OLT, types.TUINT32}: ssa.OpLess32U,
2166 opAndType{ir.OLT, types.TINT64}: ssa.OpLess64,
2167 opAndType{ir.OLT, types.TUINT64}: ssa.OpLess64U,
2168 opAndType{ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2169 opAndType{ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2170
2171 opAndType{ir.OLE, types.TINT8}: ssa.OpLeq8,
2172 opAndType{ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2173 opAndType{ir.OLE, types.TINT16}: ssa.OpLeq16,
2174 opAndType{ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2175 opAndType{ir.OLE, types.TINT32}: ssa.OpLeq32,
2176 opAndType{ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2177 opAndType{ir.OLE, types.TINT64}: ssa.OpLeq64,
2178 opAndType{ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2179 opAndType{ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2180 opAndType{ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2181 }
2182
2183 func (s *state) concreteEtype(t *types.Type) types.Kind {
2184 e := t.Kind()
2185 switch e {
2186 default:
2187 return e
2188 case types.TINT:
2189 if s.config.PtrSize == 8 {
2190 return types.TINT64
2191 }
2192 return types.TINT32
2193 case types.TUINT:
2194 if s.config.PtrSize == 8 {
2195 return types.TUINT64
2196 }
2197 return types.TUINT32
2198 case types.TUINTPTR:
2199 if s.config.PtrSize == 8 {
2200 return types.TUINT64
2201 }
2202 return types.TUINT32
2203 }
2204 }
2205
2206 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2207 etype := s.concreteEtype(t)
2208 x, ok := opToSSA[opAndType{op, etype}]
2209 if !ok {
2210 s.Fatalf("unhandled binary op %v %s", op, etype)
2211 }
2212 return x
2213 }
2214
2215 type opAndTwoTypes struct {
2216 op ir.Op
2217 etype1 types.Kind
2218 etype2 types.Kind
2219 }
2220
2221 type twoTypes struct {
2222 etype1 types.Kind
2223 etype2 types.Kind
2224 }
2225
2226 type twoOpsAndType struct {
2227 op1 ssa.Op
2228 op2 ssa.Op
2229 intermediateType types.Kind
2230 }
2231
2232 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2233
2234 twoTypes{types.TINT8, types.TFLOAT32}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2235 twoTypes{types.TINT16, types.TFLOAT32}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2236 twoTypes{types.TINT32, types.TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2237 twoTypes{types.TINT64, types.TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2238
2239 twoTypes{types.TINT8, types.TFLOAT64}: twoOpsAndType{ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2240 twoTypes{types.TINT16, types.TFLOAT64}: twoOpsAndType{ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2241 twoTypes{types.TINT32, types.TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2242 twoTypes{types.TINT64, types.TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2243
2244 twoTypes{types.TFLOAT32, types.TINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2245 twoTypes{types.TFLOAT32, types.TINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2246 twoTypes{types.TFLOAT32, types.TINT32}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2247 twoTypes{types.TFLOAT32, types.TINT64}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2248
2249 twoTypes{types.TFLOAT64, types.TINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2250 twoTypes{types.TFLOAT64, types.TINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2251 twoTypes{types.TFLOAT64, types.TINT32}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2252 twoTypes{types.TFLOAT64, types.TINT64}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2253
2254 twoTypes{types.TUINT8, types.TFLOAT32}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2255 twoTypes{types.TUINT16, types.TFLOAT32}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2256 twoTypes{types.TUINT32, types.TFLOAT32}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2257 twoTypes{types.TUINT64, types.TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2258
2259 twoTypes{types.TUINT8, types.TFLOAT64}: twoOpsAndType{ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2260 twoTypes{types.TUINT16, types.TFLOAT64}: twoOpsAndType{ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2261 twoTypes{types.TUINT32, types.TFLOAT64}: twoOpsAndType{ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2262 twoTypes{types.TUINT64, types.TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2263
2264 twoTypes{types.TFLOAT32, types.TUINT8}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2265 twoTypes{types.TFLOAT32, types.TUINT16}: twoOpsAndType{ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2266 twoTypes{types.TFLOAT32, types.TUINT32}: twoOpsAndType{ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2267 twoTypes{types.TFLOAT32, types.TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2268
2269 twoTypes{types.TFLOAT64, types.TUINT8}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2270 twoTypes{types.TFLOAT64, types.TUINT16}: twoOpsAndType{ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2271 twoTypes{types.TFLOAT64, types.TUINT32}: twoOpsAndType{ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2272 twoTypes{types.TFLOAT64, types.TUINT64}: twoOpsAndType{ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2273
2274
2275 twoTypes{types.TFLOAT64, types.TFLOAT32}: twoOpsAndType{ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2276 twoTypes{types.TFLOAT64, types.TFLOAT64}: twoOpsAndType{ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2277 twoTypes{types.TFLOAT32, types.TFLOAT32}: twoOpsAndType{ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2278 twoTypes{types.TFLOAT32, types.TFLOAT64}: twoOpsAndType{ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2279 }
2280
2281
2282
2283 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2284 twoTypes{types.TUINT32, types.TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2285 twoTypes{types.TUINT32, types.TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2286 twoTypes{types.TFLOAT32, types.TUINT32}: twoOpsAndType{ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2287 twoTypes{types.TFLOAT64, types.TUINT32}: twoOpsAndType{ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2288 }
2289
2290
2291 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2292 twoTypes{types.TUINT64, types.TFLOAT32}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2293 twoTypes{types.TUINT64, types.TFLOAT64}: twoOpsAndType{ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2294 twoTypes{types.TFLOAT32, types.TUINT64}: twoOpsAndType{ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2295 twoTypes{types.TFLOAT64, types.TUINT64}: twoOpsAndType{ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2296 }
2297
2298 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2299 opAndTwoTypes{ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2300 opAndTwoTypes{ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2301 opAndTwoTypes{ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2302 opAndTwoTypes{ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2303 opAndTwoTypes{ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2304 opAndTwoTypes{ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2305 opAndTwoTypes{ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2306 opAndTwoTypes{ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2307
2308 opAndTwoTypes{ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2309 opAndTwoTypes{ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2310 opAndTwoTypes{ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2311 opAndTwoTypes{ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2312 opAndTwoTypes{ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2313 opAndTwoTypes{ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2314 opAndTwoTypes{ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2315 opAndTwoTypes{ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2316
2317 opAndTwoTypes{ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2318 opAndTwoTypes{ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2319 opAndTwoTypes{ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2320 opAndTwoTypes{ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2321 opAndTwoTypes{ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2322 opAndTwoTypes{ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2323 opAndTwoTypes{ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2324 opAndTwoTypes{ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2325
2326 opAndTwoTypes{ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2327 opAndTwoTypes{ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2328 opAndTwoTypes{ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2329 opAndTwoTypes{ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2330 opAndTwoTypes{ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2331 opAndTwoTypes{ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2332 opAndTwoTypes{ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2333 opAndTwoTypes{ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2334
2335 opAndTwoTypes{ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2336 opAndTwoTypes{ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2337 opAndTwoTypes{ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2338 opAndTwoTypes{ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2339 opAndTwoTypes{ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2340 opAndTwoTypes{ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2341 opAndTwoTypes{ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2342 opAndTwoTypes{ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2343
2344 opAndTwoTypes{ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2345 opAndTwoTypes{ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2346 opAndTwoTypes{ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2347 opAndTwoTypes{ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2348 opAndTwoTypes{ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2349 opAndTwoTypes{ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2350 opAndTwoTypes{ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2351 opAndTwoTypes{ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2352
2353 opAndTwoTypes{ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2354 opAndTwoTypes{ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2355 opAndTwoTypes{ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2356 opAndTwoTypes{ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2357 opAndTwoTypes{ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2358 opAndTwoTypes{ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2359 opAndTwoTypes{ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2360 opAndTwoTypes{ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2361
2362 opAndTwoTypes{ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2363 opAndTwoTypes{ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2364 opAndTwoTypes{ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2365 opAndTwoTypes{ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2366 opAndTwoTypes{ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2367 opAndTwoTypes{ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2368 opAndTwoTypes{ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2369 opAndTwoTypes{ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2370 }
2371
2372 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2373 etype1 := s.concreteEtype(t)
2374 etype2 := s.concreteEtype(u)
2375 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2376 if !ok {
2377 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2378 }
2379 return x
2380 }
2381
2382 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2383 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2384
2385 return s.newValue1(ssa.OpCopy, tt, v)
2386 }
2387 if ft.IsInteger() && tt.IsInteger() {
2388 var op ssa.Op
2389 if tt.Size() == ft.Size() {
2390 op = ssa.OpCopy
2391 } else if tt.Size() < ft.Size() {
2392
2393 switch 10*ft.Size() + tt.Size() {
2394 case 21:
2395 op = ssa.OpTrunc16to8
2396 case 41:
2397 op = ssa.OpTrunc32to8
2398 case 42:
2399 op = ssa.OpTrunc32to16
2400 case 81:
2401 op = ssa.OpTrunc64to8
2402 case 82:
2403 op = ssa.OpTrunc64to16
2404 case 84:
2405 op = ssa.OpTrunc64to32
2406 default:
2407 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2408 }
2409 } else if ft.IsSigned() {
2410
2411 switch 10*ft.Size() + tt.Size() {
2412 case 12:
2413 op = ssa.OpSignExt8to16
2414 case 14:
2415 op = ssa.OpSignExt8to32
2416 case 18:
2417 op = ssa.OpSignExt8to64
2418 case 24:
2419 op = ssa.OpSignExt16to32
2420 case 28:
2421 op = ssa.OpSignExt16to64
2422 case 48:
2423 op = ssa.OpSignExt32to64
2424 default:
2425 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2426 }
2427 } else {
2428
2429 switch 10*ft.Size() + tt.Size() {
2430 case 12:
2431 op = ssa.OpZeroExt8to16
2432 case 14:
2433 op = ssa.OpZeroExt8to32
2434 case 18:
2435 op = ssa.OpZeroExt8to64
2436 case 24:
2437 op = ssa.OpZeroExt16to32
2438 case 28:
2439 op = ssa.OpZeroExt16to64
2440 case 48:
2441 op = ssa.OpZeroExt32to64
2442 default:
2443 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2444 }
2445 }
2446 return s.newValue1(op, tt, v)
2447 }
2448
2449 if ft.IsComplex() && tt.IsComplex() {
2450 var op ssa.Op
2451 if ft.Size() == tt.Size() {
2452 switch ft.Size() {
2453 case 8:
2454 op = ssa.OpRound32F
2455 case 16:
2456 op = ssa.OpRound64F
2457 default:
2458 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2459 }
2460 } else if ft.Size() == 8 && tt.Size() == 16 {
2461 op = ssa.OpCvt32Fto64F
2462 } else if ft.Size() == 16 && tt.Size() == 8 {
2463 op = ssa.OpCvt64Fto32F
2464 } else {
2465 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2466 }
2467 ftp := types.FloatForComplex(ft)
2468 ttp := types.FloatForComplex(tt)
2469 return s.newValue2(ssa.OpComplexMake, tt,
2470 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2471 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2472 }
2473
2474 if tt.IsComplex() {
2475
2476 et := types.FloatForComplex(tt)
2477 v = s.conv(n, v, ft, et)
2478 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2479 }
2480
2481 if ft.IsFloat() || tt.IsFloat() {
2482 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2483 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2484 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2485 conv = conv1
2486 }
2487 }
2488 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2489 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2490 conv = conv1
2491 }
2492 }
2493
2494 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2495 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2496
2497 if tt.Size() == 4 {
2498 return s.uint32Tofloat32(n, v, ft, tt)
2499 }
2500 if tt.Size() == 8 {
2501 return s.uint32Tofloat64(n, v, ft, tt)
2502 }
2503 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2504
2505 if ft.Size() == 4 {
2506 return s.float32ToUint32(n, v, ft, tt)
2507 }
2508 if ft.Size() == 8 {
2509 return s.float64ToUint32(n, v, ft, tt)
2510 }
2511 }
2512 }
2513
2514 if !ok {
2515 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2516 }
2517 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2518
2519 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2520
2521 if op1 == ssa.OpCopy {
2522 if op2 == ssa.OpCopy {
2523 return v
2524 }
2525 return s.newValueOrSfCall1(op2, tt, v)
2526 }
2527 if op2 == ssa.OpCopy {
2528 return s.newValueOrSfCall1(op1, tt, v)
2529 }
2530 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2531 }
2532
2533 if ft.IsInteger() {
2534
2535 if tt.Size() == 4 {
2536 return s.uint64Tofloat32(n, v, ft, tt)
2537 }
2538 if tt.Size() == 8 {
2539 return s.uint64Tofloat64(n, v, ft, tt)
2540 }
2541 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2542 }
2543
2544 if ft.Size() == 4 {
2545 return s.float32ToUint64(n, v, ft, tt)
2546 }
2547 if ft.Size() == 8 {
2548 return s.float64ToUint64(n, v, ft, tt)
2549 }
2550 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2551 return nil
2552 }
2553
2554 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2555 return nil
2556 }
2557
2558
2559 func (s *state) expr(n ir.Node) *ssa.Value {
2560 return s.exprCheckPtr(n, true)
2561 }
2562
2563 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2564 if ir.HasUniquePos(n) {
2565
2566
2567 s.pushLine(n.Pos())
2568 defer s.popLine()
2569 }
2570
2571 s.stmtList(n.Init())
2572 switch n.Op() {
2573 case ir.OBYTES2STRTMP:
2574 n := n.(*ir.ConvExpr)
2575 slice := s.expr(n.X)
2576 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2577 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2578 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2579 case ir.OSTR2BYTESTMP:
2580 n := n.(*ir.ConvExpr)
2581 str := s.expr(n.X)
2582 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2583 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2584 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2585 case ir.OCFUNC:
2586 n := n.(*ir.UnaryExpr)
2587 aux := n.X.(*ir.Name).Linksym()
2588
2589
2590 if aux.ABI() != obj.ABIInternal {
2591 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2592 }
2593 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2594 case ir.ONAME:
2595 n := n.(*ir.Name)
2596 if n.Class == ir.PFUNC {
2597
2598 sym := staticdata.FuncLinksym(n)
2599 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2600 }
2601 if s.canSSA(n) {
2602 return s.variable(n, n.Type())
2603 }
2604 return s.load(n.Type(), s.addr(n))
2605 case ir.OLINKSYMOFFSET:
2606 n := n.(*ir.LinksymOffsetExpr)
2607 return s.load(n.Type(), s.addr(n))
2608 case ir.ONIL:
2609 n := n.(*ir.NilExpr)
2610 t := n.Type()
2611 switch {
2612 case t.IsSlice():
2613 return s.constSlice(t)
2614 case t.IsInterface():
2615 return s.constInterface(t)
2616 default:
2617 return s.constNil(t)
2618 }
2619 case ir.OLITERAL:
2620 switch u := n.Val(); u.Kind() {
2621 case constant.Int:
2622 i := ir.IntVal(n.Type(), u)
2623 switch n.Type().Size() {
2624 case 1:
2625 return s.constInt8(n.Type(), int8(i))
2626 case 2:
2627 return s.constInt16(n.Type(), int16(i))
2628 case 4:
2629 return s.constInt32(n.Type(), int32(i))
2630 case 8:
2631 return s.constInt64(n.Type(), i)
2632 default:
2633 s.Fatalf("bad integer size %d", n.Type().Size())
2634 return nil
2635 }
2636 case constant.String:
2637 i := constant.StringVal(u)
2638 if i == "" {
2639 return s.constEmptyString(n.Type())
2640 }
2641 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2642 case constant.Bool:
2643 return s.constBool(constant.BoolVal(u))
2644 case constant.Float:
2645 f, _ := constant.Float64Val(u)
2646 switch n.Type().Size() {
2647 case 4:
2648 return s.constFloat32(n.Type(), f)
2649 case 8:
2650 return s.constFloat64(n.Type(), f)
2651 default:
2652 s.Fatalf("bad float size %d", n.Type().Size())
2653 return nil
2654 }
2655 case constant.Complex:
2656 re, _ := constant.Float64Val(constant.Real(u))
2657 im, _ := constant.Float64Val(constant.Imag(u))
2658 switch n.Type().Size() {
2659 case 8:
2660 pt := types.Types[types.TFLOAT32]
2661 return s.newValue2(ssa.OpComplexMake, n.Type(),
2662 s.constFloat32(pt, re),
2663 s.constFloat32(pt, im))
2664 case 16:
2665 pt := types.Types[types.TFLOAT64]
2666 return s.newValue2(ssa.OpComplexMake, n.Type(),
2667 s.constFloat64(pt, re),
2668 s.constFloat64(pt, im))
2669 default:
2670 s.Fatalf("bad complex size %d", n.Type().Size())
2671 return nil
2672 }
2673 default:
2674 s.Fatalf("unhandled OLITERAL %v", u.Kind())
2675 return nil
2676 }
2677 case ir.OCONVNOP:
2678 n := n.(*ir.ConvExpr)
2679 to := n.Type()
2680 from := n.X.Type()
2681
2682
2683
2684 x := s.expr(n.X)
2685 if to == from {
2686 return x
2687 }
2688
2689
2690
2691
2692
2693 if to.IsPtrShaped() != from.IsPtrShaped() {
2694 return s.newValue2(ssa.OpConvert, to, x, s.mem())
2695 }
2696
2697 v := s.newValue1(ssa.OpCopy, to, x)
2698
2699
2700 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
2701 return v
2702 }
2703
2704
2705 if from.Kind() == to.Kind() {
2706 return v
2707 }
2708
2709
2710 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
2711 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
2712 s.checkPtrAlignment(n, v, nil)
2713 }
2714 return v
2715 }
2716
2717
2718 if to.Kind() == types.TMAP && from.IsPtr() &&
2719 to.MapType().Hmap == from.Elem() {
2720 return v
2721 }
2722
2723 types.CalcSize(from)
2724 types.CalcSize(to)
2725 if from.Size() != to.Size() {
2726 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
2727 return nil
2728 }
2729 if etypesign(from.Kind()) != etypesign(to.Kind()) {
2730 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
2731 return nil
2732 }
2733
2734 if base.Flag.Cfg.Instrumenting {
2735
2736
2737
2738 return v
2739 }
2740
2741 if etypesign(from.Kind()) == 0 {
2742 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
2743 return nil
2744 }
2745
2746
2747 return v
2748
2749 case ir.OCONV:
2750 n := n.(*ir.ConvExpr)
2751 x := s.expr(n.X)
2752 return s.conv(n, x, n.X.Type(), n.Type())
2753
2754 case ir.ODOTTYPE:
2755 n := n.(*ir.TypeAssertExpr)
2756 res, _ := s.dottype(n, false)
2757 return res
2758
2759 case ir.ODYNAMICDOTTYPE:
2760 n := n.(*ir.DynamicTypeAssertExpr)
2761 res, _ := s.dynamicDottype(n, false)
2762 return res
2763
2764
2765 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
2766 n := n.(*ir.BinaryExpr)
2767 a := s.expr(n.X)
2768 b := s.expr(n.Y)
2769 if n.X.Type().IsComplex() {
2770 pt := types.FloatForComplex(n.X.Type())
2771 op := s.ssaOp(ir.OEQ, pt)
2772 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
2773 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
2774 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
2775 switch n.Op() {
2776 case ir.OEQ:
2777 return c
2778 case ir.ONE:
2779 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
2780 default:
2781 s.Fatalf("ordered complex compare %v", n.Op())
2782 }
2783 }
2784
2785
2786 op := n.Op()
2787 switch op {
2788 case ir.OGE:
2789 op, a, b = ir.OLE, b, a
2790 case ir.OGT:
2791 op, a, b = ir.OLT, b, a
2792 }
2793 if n.X.Type().IsFloat() {
2794
2795 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
2796 }
2797
2798 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
2799 case ir.OMUL:
2800 n := n.(*ir.BinaryExpr)
2801 a := s.expr(n.X)
2802 b := s.expr(n.Y)
2803 if n.Type().IsComplex() {
2804 mulop := ssa.OpMul64F
2805 addop := ssa.OpAdd64F
2806 subop := ssa.OpSub64F
2807 pt := types.FloatForComplex(n.Type())
2808 wt := types.Types[types.TFLOAT64]
2809
2810 areal := s.newValue1(ssa.OpComplexReal, pt, a)
2811 breal := s.newValue1(ssa.OpComplexReal, pt, b)
2812 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
2813 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
2814
2815 if pt != wt {
2816 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
2817 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
2818 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
2819 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
2820 }
2821
2822 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
2823 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
2824
2825 if pt != wt {
2826 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
2827 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
2828 }
2829
2830 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
2831 }
2832
2833 if n.Type().IsFloat() {
2834 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
2835 }
2836
2837 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
2838
2839 case ir.ODIV:
2840 n := n.(*ir.BinaryExpr)
2841 a := s.expr(n.X)
2842 b := s.expr(n.Y)
2843 if n.Type().IsComplex() {
2844
2845
2846
2847 mulop := ssa.OpMul64F
2848 addop := ssa.OpAdd64F
2849 subop := ssa.OpSub64F
2850 divop := ssa.OpDiv64F
2851 pt := types.FloatForComplex(n.Type())
2852 wt := types.Types[types.TFLOAT64]
2853
2854 areal := s.newValue1(ssa.OpComplexReal, pt, a)
2855 breal := s.newValue1(ssa.OpComplexReal, pt, b)
2856 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
2857 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
2858
2859 if pt != wt {
2860 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
2861 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
2862 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
2863 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
2864 }
2865
2866 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
2867 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
2868 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
2869
2870
2871
2872
2873
2874 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
2875 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
2876
2877 if pt != wt {
2878 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
2879 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
2880 }
2881 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
2882 }
2883 if n.Type().IsFloat() {
2884 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
2885 }
2886 return s.intDivide(n, a, b)
2887 case ir.OMOD:
2888 n := n.(*ir.BinaryExpr)
2889 a := s.expr(n.X)
2890 b := s.expr(n.Y)
2891 return s.intDivide(n, a, b)
2892 case ir.OADD, ir.OSUB:
2893 n := n.(*ir.BinaryExpr)
2894 a := s.expr(n.X)
2895 b := s.expr(n.Y)
2896 if n.Type().IsComplex() {
2897 pt := types.FloatForComplex(n.Type())
2898 op := s.ssaOp(n.Op(), pt)
2899 return s.newValue2(ssa.OpComplexMake, n.Type(),
2900 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
2901 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
2902 }
2903 if n.Type().IsFloat() {
2904 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
2905 }
2906 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
2907 case ir.OAND, ir.OOR, ir.OXOR:
2908 n := n.(*ir.BinaryExpr)
2909 a := s.expr(n.X)
2910 b := s.expr(n.Y)
2911 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
2912 case ir.OANDNOT:
2913 n := n.(*ir.BinaryExpr)
2914 a := s.expr(n.X)
2915 b := s.expr(n.Y)
2916 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
2917 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
2918 case ir.OLSH, ir.ORSH:
2919 n := n.(*ir.BinaryExpr)
2920 a := s.expr(n.X)
2921 b := s.expr(n.Y)
2922 bt := b.Type
2923 if bt.IsSigned() {
2924 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
2925 s.check(cmp, ir.Syms.Panicshift)
2926 bt = bt.ToUnsigned()
2927 }
2928 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
2929 case ir.OANDAND, ir.OOROR:
2930
2931
2932
2933
2934
2935
2936
2937
2938
2939
2940
2941
2942
2943 n := n.(*ir.LogicalExpr)
2944 el := s.expr(n.X)
2945 s.vars[n] = el
2946
2947 b := s.endBlock()
2948 b.Kind = ssa.BlockIf
2949 b.SetControl(el)
2950
2951
2952
2953
2954
2955 bRight := s.f.NewBlock(ssa.BlockPlain)
2956 bResult := s.f.NewBlock(ssa.BlockPlain)
2957 if n.Op() == ir.OANDAND {
2958 b.AddEdgeTo(bRight)
2959 b.AddEdgeTo(bResult)
2960 } else if n.Op() == ir.OOROR {
2961 b.AddEdgeTo(bResult)
2962 b.AddEdgeTo(bRight)
2963 }
2964
2965 s.startBlock(bRight)
2966 er := s.expr(n.Y)
2967 s.vars[n] = er
2968
2969 b = s.endBlock()
2970 b.AddEdgeTo(bResult)
2971
2972 s.startBlock(bResult)
2973 return s.variable(n, types.Types[types.TBOOL])
2974 case ir.OCOMPLEX:
2975 n := n.(*ir.BinaryExpr)
2976 r := s.expr(n.X)
2977 i := s.expr(n.Y)
2978 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
2979
2980
2981 case ir.ONEG:
2982 n := n.(*ir.UnaryExpr)
2983 a := s.expr(n.X)
2984 if n.Type().IsComplex() {
2985 tp := types.FloatForComplex(n.Type())
2986 negop := s.ssaOp(n.Op(), tp)
2987 return s.newValue2(ssa.OpComplexMake, n.Type(),
2988 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
2989 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
2990 }
2991 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
2992 case ir.ONOT, ir.OBITNOT:
2993 n := n.(*ir.UnaryExpr)
2994 a := s.expr(n.X)
2995 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
2996 case ir.OIMAG, ir.OREAL:
2997 n := n.(*ir.UnaryExpr)
2998 a := s.expr(n.X)
2999 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3000 case ir.OPLUS:
3001 n := n.(*ir.UnaryExpr)
3002 return s.expr(n.X)
3003
3004 case ir.OADDR:
3005 n := n.(*ir.AddrExpr)
3006 return s.addr(n.X)
3007
3008 case ir.ORESULT:
3009 n := n.(*ir.ResultExpr)
3010 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3011 panic("Expected to see a previous call")
3012 }
3013 which := n.Index
3014 if which == -1 {
3015 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3016 }
3017 return s.resultOfCall(s.prevCall, which, n.Type())
3018
3019 case ir.ODEREF:
3020 n := n.(*ir.StarExpr)
3021 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3022 return s.load(n.Type(), p)
3023
3024 case ir.ODOT:
3025 n := n.(*ir.SelectorExpr)
3026 if n.X.Op() == ir.OSTRUCTLIT {
3027
3028
3029
3030 if !ir.IsZero(n.X) {
3031 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3032 }
3033 return s.zeroVal(n.Type())
3034 }
3035
3036
3037
3038
3039 if ir.IsAddressable(n) && !s.canSSA(n) {
3040 p := s.addr(n)
3041 return s.load(n.Type(), p)
3042 }
3043 v := s.expr(n.X)
3044 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3045
3046 case ir.ODOTPTR:
3047 n := n.(*ir.SelectorExpr)
3048 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3049 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3050 return s.load(n.Type(), p)
3051
3052 case ir.OINDEX:
3053 n := n.(*ir.IndexExpr)
3054 switch {
3055 case n.X.Type().IsString():
3056 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3057
3058
3059
3060 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3061 }
3062 a := s.expr(n.X)
3063 i := s.expr(n.Index)
3064 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3065 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3066 ptrtyp := s.f.Config.Types.BytePtr
3067 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3068 if ir.IsConst(n.Index, constant.Int) {
3069 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3070 } else {
3071 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3072 }
3073 return s.load(types.Types[types.TUINT8], ptr)
3074 case n.X.Type().IsSlice():
3075 p := s.addr(n)
3076 return s.load(n.X.Type().Elem(), p)
3077 case n.X.Type().IsArray():
3078 if TypeOK(n.X.Type()) {
3079
3080 bound := n.X.Type().NumElem()
3081 a := s.expr(n.X)
3082 i := s.expr(n.Index)
3083 if bound == 0 {
3084
3085
3086 z := s.constInt(types.Types[types.TINT], 0)
3087 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3088
3089
3090 return s.zeroVal(n.Type())
3091 }
3092 len := s.constInt(types.Types[types.TINT], bound)
3093 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3094 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3095 }
3096 p := s.addr(n)
3097 return s.load(n.X.Type().Elem(), p)
3098 default:
3099 s.Fatalf("bad type for index %v", n.X.Type())
3100 return nil
3101 }
3102
3103 case ir.OLEN, ir.OCAP:
3104 n := n.(*ir.UnaryExpr)
3105 switch {
3106 case n.X.Type().IsSlice():
3107 op := ssa.OpSliceLen
3108 if n.Op() == ir.OCAP {
3109 op = ssa.OpSliceCap
3110 }
3111 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3112 case n.X.Type().IsString():
3113 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3114 case n.X.Type().IsMap(), n.X.Type().IsChan():
3115 return s.referenceTypeBuiltin(n, s.expr(n.X))
3116 default:
3117 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3118 }
3119
3120 case ir.OSPTR:
3121 n := n.(*ir.UnaryExpr)
3122 a := s.expr(n.X)
3123 if n.X.Type().IsSlice() {
3124 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3125 } else {
3126 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3127 }
3128
3129 case ir.OITAB:
3130 n := n.(*ir.UnaryExpr)
3131 a := s.expr(n.X)
3132 return s.newValue1(ssa.OpITab, n.Type(), a)
3133
3134 case ir.OIDATA:
3135 n := n.(*ir.UnaryExpr)
3136 a := s.expr(n.X)
3137 return s.newValue1(ssa.OpIData, n.Type(), a)
3138
3139 case ir.OEFACE:
3140 n := n.(*ir.BinaryExpr)
3141 tab := s.expr(n.X)
3142 data := s.expr(n.Y)
3143 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3144
3145 case ir.OSLICEHEADER:
3146 n := n.(*ir.SliceHeaderExpr)
3147 p := s.expr(n.Ptr)
3148 l := s.expr(n.Len)
3149 c := s.expr(n.Cap)
3150 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3151
3152 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3153 n := n.(*ir.SliceExpr)
3154 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3155 v := s.exprCheckPtr(n.X, !check)
3156 var i, j, k *ssa.Value
3157 if n.Low != nil {
3158 i = s.expr(n.Low)
3159 }
3160 if n.High != nil {
3161 j = s.expr(n.High)
3162 }
3163 if n.Max != nil {
3164 k = s.expr(n.Max)
3165 }
3166 p, l, c := s.slice(v, i, j, k, n.Bounded())
3167 if check {
3168
3169 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3170 }
3171 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3172
3173 case ir.OSLICESTR:
3174 n := n.(*ir.SliceExpr)
3175 v := s.expr(n.X)
3176 var i, j *ssa.Value
3177 if n.Low != nil {
3178 i = s.expr(n.Low)
3179 }
3180 if n.High != nil {
3181 j = s.expr(n.High)
3182 }
3183 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3184 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3185
3186 case ir.OSLICE2ARRPTR:
3187
3188
3189
3190
3191 n := n.(*ir.ConvExpr)
3192 v := s.expr(n.X)
3193 arrlen := s.constInt(types.Types[types.TINT], n.Type().Elem().NumElem())
3194 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3195 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3196 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), v)
3197
3198 case ir.OCALLFUNC:
3199 n := n.(*ir.CallExpr)
3200 if ir.IsIntrinsicCall(n) {
3201 return s.intrinsicCall(n)
3202 }
3203 fallthrough
3204
3205 case ir.OCALLINTER:
3206 n := n.(*ir.CallExpr)
3207 return s.callResult(n, callNormal)
3208
3209 case ir.OGETG:
3210 n := n.(*ir.CallExpr)
3211 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3212
3213 case ir.OGETCALLERPC:
3214 n := n.(*ir.CallExpr)
3215 return s.newValue0(ssa.OpGetCallerPC, n.Type())
3216
3217 case ir.OGETCALLERSP:
3218 n := n.(*ir.CallExpr)
3219 return s.newValue0(ssa.OpGetCallerSP, n.Type())
3220
3221 case ir.OAPPEND:
3222 return s.append(n.(*ir.CallExpr), false)
3223
3224 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3225
3226
3227
3228 n := n.(*ir.CompLitExpr)
3229 if !ir.IsZero(n) {
3230 s.Fatalf("literal with nonzero value in SSA: %v", n)
3231 }
3232 return s.zeroVal(n.Type())
3233
3234 case ir.ONEW:
3235 n := n.(*ir.UnaryExpr)
3236 return s.newObject(n.Type().Elem())
3237
3238 case ir.OUNSAFEADD:
3239 n := n.(*ir.BinaryExpr)
3240 ptr := s.expr(n.X)
3241 len := s.expr(n.Y)
3242
3243
3244
3245 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3246
3247 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3248
3249 default:
3250 s.Fatalf("unhandled expr %v", n.Op())
3251 return nil
3252 }
3253 }
3254
3255 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3256 aux := c.Aux.(*ssa.AuxCall)
3257 pa := aux.ParamAssignmentForResult(which)
3258
3259
3260 if len(pa.Registers) == 0 && !TypeOK(t) {
3261 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3262 return s.rawLoad(t, addr)
3263 }
3264 return s.newValue1I(ssa.OpSelectN, t, which, c)
3265 }
3266
3267 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3268 aux := c.Aux.(*ssa.AuxCall)
3269 pa := aux.ParamAssignmentForResult(which)
3270 if len(pa.Registers) == 0 {
3271 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3272 }
3273 _, addr := s.temp(c.Pos, t)
3274 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3275 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3276 return addr
3277 }
3278
3279
3280
3281
3282
3283
3284
3285 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296
3297
3298
3299
3300
3301
3302
3303
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
3315
3316
3317
3318
3319 et := n.Type().Elem()
3320 pt := types.NewPtr(et)
3321
3322
3323 sn := n.Args[0]
3324
3325 var slice, addr *ssa.Value
3326 if inplace {
3327 addr = s.addr(sn)
3328 slice = s.load(n.Type(), addr)
3329 } else {
3330 slice = s.expr(sn)
3331 }
3332
3333
3334 grow := s.f.NewBlock(ssa.BlockPlain)
3335 assign := s.f.NewBlock(ssa.BlockPlain)
3336
3337
3338 nargs := int64(len(n.Args) - 1)
3339 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3340 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3341 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3342 nl := s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, s.constInt(types.Types[types.TINT], nargs))
3343
3344 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, nl)
3345 s.vars[ptrVar] = p
3346
3347 if !inplace {
3348 s.vars[newlenVar] = nl
3349 s.vars[capVar] = c
3350 } else {
3351 s.vars[lenVar] = l
3352 }
3353
3354 b := s.endBlock()
3355 b.Kind = ssa.BlockIf
3356 b.Likely = ssa.BranchUnlikely
3357 b.SetControl(cmp)
3358 b.AddEdgeTo(grow)
3359 b.AddEdgeTo(assign)
3360
3361
3362 s.startBlock(grow)
3363 taddr := s.expr(n.X)
3364 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{pt, types.Types[types.TINT], types.Types[types.TINT]}, taddr, p, l, c, nl)
3365
3366 if inplace {
3367 if sn.Op() == ir.ONAME {
3368 sn := sn.(*ir.Name)
3369 if sn.Class != ir.PEXTERN {
3370
3371 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3372 }
3373 }
3374 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3375 s.store(types.Types[types.TINT], capaddr, r[2])
3376 s.store(pt, addr, r[0])
3377
3378 s.vars[ptrVar] = s.load(pt, addr)
3379 s.vars[lenVar] = r[1]
3380 } else {
3381 s.vars[ptrVar] = r[0]
3382 s.vars[newlenVar] = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], r[1], s.constInt(types.Types[types.TINT], nargs))
3383 s.vars[capVar] = r[2]
3384 }
3385
3386 b = s.endBlock()
3387 b.AddEdgeTo(assign)
3388
3389
3390 s.startBlock(assign)
3391
3392 if inplace {
3393 l = s.variable(lenVar, types.Types[types.TINT])
3394 nl = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, s.constInt(types.Types[types.TINT], nargs))
3395 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3396 s.store(types.Types[types.TINT], lenaddr, nl)
3397 }
3398
3399
3400 type argRec struct {
3401
3402
3403 v *ssa.Value
3404 store bool
3405 }
3406 args := make([]argRec, 0, nargs)
3407 for _, n := range n.Args[1:] {
3408 if TypeOK(n.Type()) {
3409 args = append(args, argRec{v: s.expr(n), store: true})
3410 } else {
3411 v := s.addr(n)
3412 args = append(args, argRec{v: v})
3413 }
3414 }
3415
3416 p = s.variable(ptrVar, pt)
3417 if !inplace {
3418 nl = s.variable(newlenVar, types.Types[types.TINT])
3419 c = s.variable(capVar, types.Types[types.TINT])
3420 }
3421 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, l)
3422 for i, arg := range args {
3423 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3424 if arg.store {
3425 s.storeType(et, addr, arg.v, 0, true)
3426 } else {
3427 s.move(et, addr, arg.v)
3428 }
3429 }
3430
3431 delete(s.vars, ptrVar)
3432 if inplace {
3433 delete(s.vars, lenVar)
3434 return nil
3435 }
3436 delete(s.vars, newlenVar)
3437 delete(s.vars, capVar)
3438
3439 return s.newValue3(ssa.OpSliceMake, n.Type(), p, nl, c)
3440 }
3441
3442
3443
3444
3445
3446 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3447 switch cond.Op() {
3448 case ir.OANDAND:
3449 cond := cond.(*ir.LogicalExpr)
3450 mid := s.f.NewBlock(ssa.BlockPlain)
3451 s.stmtList(cond.Init())
3452 s.condBranch(cond.X, mid, no, max8(likely, 0))
3453 s.startBlock(mid)
3454 s.condBranch(cond.Y, yes, no, likely)
3455 return
3456
3457
3458
3459
3460
3461
3462 case ir.OOROR:
3463 cond := cond.(*ir.LogicalExpr)
3464 mid := s.f.NewBlock(ssa.BlockPlain)
3465 s.stmtList(cond.Init())
3466 s.condBranch(cond.X, yes, mid, min8(likely, 0))
3467 s.startBlock(mid)
3468 s.condBranch(cond.Y, yes, no, likely)
3469 return
3470
3471
3472
3473 case ir.ONOT:
3474 cond := cond.(*ir.UnaryExpr)
3475 s.stmtList(cond.Init())
3476 s.condBranch(cond.X, no, yes, -likely)
3477 return
3478 case ir.OCONVNOP:
3479 cond := cond.(*ir.ConvExpr)
3480 s.stmtList(cond.Init())
3481 s.condBranch(cond.X, yes, no, likely)
3482 return
3483 }
3484 c := s.expr(cond)
3485 b := s.endBlock()
3486 b.Kind = ssa.BlockIf
3487 b.SetControl(c)
3488 b.Likely = ssa.BranchPrediction(likely)
3489 b.AddEdgeTo(yes)
3490 b.AddEdgeTo(no)
3491 }
3492
3493 type skipMask uint8
3494
3495 const (
3496 skipPtr skipMask = 1 << iota
3497 skipLen
3498 skipCap
3499 )
3500
3501
3502
3503
3504
3505
3506 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
3507 if left.Op() == ir.ONAME && ir.IsBlank(left) {
3508 return
3509 }
3510 t := left.Type()
3511 types.CalcSize(t)
3512 if s.canSSA(left) {
3513 if deref {
3514 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
3515 }
3516 if left.Op() == ir.ODOT {
3517
3518
3519
3520
3521
3522
3523
3524
3525
3526
3527 left := left.(*ir.SelectorExpr)
3528 t := left.X.Type()
3529 nf := t.NumFields()
3530 idx := fieldIdx(left)
3531
3532
3533 old := s.expr(left.X)
3534
3535
3536 new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t)
3537
3538
3539 for i := 0; i < nf; i++ {
3540 if i == idx {
3541 new.AddArg(right)
3542 } else {
3543 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
3544 }
3545 }
3546
3547
3548 s.assign(left.X, new, false, 0)
3549
3550 return
3551 }
3552 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
3553 left := left.(*ir.IndexExpr)
3554 s.pushLine(left.Pos())
3555 defer s.popLine()
3556
3557
3558 t := left.X.Type()
3559 n := t.NumElem()
3560
3561 i := s.expr(left.Index)
3562 if n == 0 {
3563
3564
3565 z := s.constInt(types.Types[types.TINT], 0)
3566 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3567 return
3568 }
3569 if n != 1 {
3570 s.Fatalf("assigning to non-1-length array")
3571 }
3572
3573 len := s.constInt(types.Types[types.TINT], 1)
3574 s.boundsCheck(i, len, ssa.BoundsIndex, false)
3575 v := s.newValue1(ssa.OpArrayMake1, t, right)
3576 s.assign(left.X, v, false, 0)
3577 return
3578 }
3579 left := left.(*ir.Name)
3580
3581 s.vars[left] = right
3582 s.addNamedValue(left, right)
3583 return
3584 }
3585
3586
3587
3588 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 {
3589 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
3590 }
3591
3592
3593 addr := s.addr(left)
3594 if ir.IsReflectHeaderDataField(left) {
3595
3596
3597
3598
3599
3600 t = types.Types[types.TUNSAFEPTR]
3601 }
3602 if deref {
3603
3604 if right == nil {
3605 s.zero(t, addr)
3606 } else {
3607 s.move(t, addr, right)
3608 }
3609 return
3610 }
3611
3612 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
3613 }
3614
3615
3616 func (s *state) zeroVal(t *types.Type) *ssa.Value {
3617 switch {
3618 case t.IsInteger():
3619 switch t.Size() {
3620 case 1:
3621 return s.constInt8(t, 0)
3622 case 2:
3623 return s.constInt16(t, 0)
3624 case 4:
3625 return s.constInt32(t, 0)
3626 case 8:
3627 return s.constInt64(t, 0)
3628 default:
3629 s.Fatalf("bad sized integer type %v", t)
3630 }
3631 case t.IsFloat():
3632 switch t.Size() {
3633 case 4:
3634 return s.constFloat32(t, 0)
3635 case 8:
3636 return s.constFloat64(t, 0)
3637 default:
3638 s.Fatalf("bad sized float type %v", t)
3639 }
3640 case t.IsComplex():
3641 switch t.Size() {
3642 case 8:
3643 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
3644 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
3645 case 16:
3646 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
3647 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
3648 default:
3649 s.Fatalf("bad sized complex type %v", t)
3650 }
3651
3652 case t.IsString():
3653 return s.constEmptyString(t)
3654 case t.IsPtrShaped():
3655 return s.constNil(t)
3656 case t.IsBoolean():
3657 return s.constBool(false)
3658 case t.IsInterface():
3659 return s.constInterface(t)
3660 case t.IsSlice():
3661 return s.constSlice(t)
3662 case t.IsStruct():
3663 n := t.NumFields()
3664 v := s.entryNewValue0(ssa.StructMakeOp(t.NumFields()), t)
3665 for i := 0; i < n; i++ {
3666 v.AddArg(s.zeroVal(t.FieldType(i)))
3667 }
3668 return v
3669 case t.IsArray():
3670 switch t.NumElem() {
3671 case 0:
3672 return s.entryNewValue0(ssa.OpArrayMake0, t)
3673 case 1:
3674 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
3675 }
3676 }
3677 s.Fatalf("zero for type %v not implemented", t)
3678 return nil
3679 }
3680
3681 type callKind int8
3682
3683 const (
3684 callNormal callKind = iota
3685 callDefer
3686 callDeferStack
3687 callGo
3688 callTail
3689 )
3690
3691 type sfRtCallDef struct {
3692 rtfn *obj.LSym
3693 rtype types.Kind
3694 }
3695
3696 var softFloatOps map[ssa.Op]sfRtCallDef
3697
3698 func softfloatInit() {
3699
3700 softFloatOps = map[ssa.Op]sfRtCallDef{
3701 ssa.OpAdd32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
3702 ssa.OpAdd64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
3703 ssa.OpSub32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
3704 ssa.OpSub64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
3705 ssa.OpMul32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
3706 ssa.OpMul64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
3707 ssa.OpDiv32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
3708 ssa.OpDiv64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
3709
3710 ssa.OpEq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
3711 ssa.OpEq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
3712 ssa.OpNeq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
3713 ssa.OpNeq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
3714 ssa.OpLess64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
3715 ssa.OpLess32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
3716 ssa.OpLeq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
3717 ssa.OpLeq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
3718
3719 ssa.OpCvt32to32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
3720 ssa.OpCvt32Fto32: sfRtCallDef{typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
3721 ssa.OpCvt64to32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
3722 ssa.OpCvt32Fto64: sfRtCallDef{typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
3723 ssa.OpCvt64Uto32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
3724 ssa.OpCvt32Fto64U: sfRtCallDef{typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
3725 ssa.OpCvt32to64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
3726 ssa.OpCvt64Fto32: sfRtCallDef{typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
3727 ssa.OpCvt64to64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
3728 ssa.OpCvt64Fto64: sfRtCallDef{typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
3729 ssa.OpCvt64Uto64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
3730 ssa.OpCvt64Fto64U: sfRtCallDef{typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
3731 ssa.OpCvt32Fto64F: sfRtCallDef{typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
3732 ssa.OpCvt64Fto32F: sfRtCallDef{typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
3733 }
3734 }
3735
3736
3737
3738 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
3739 f2i := func(t *types.Type) *types.Type {
3740 switch t.Kind() {
3741 case types.TFLOAT32:
3742 return types.Types[types.TUINT32]
3743 case types.TFLOAT64:
3744 return types.Types[types.TUINT64]
3745 }
3746 return t
3747 }
3748
3749 if callDef, ok := softFloatOps[op]; ok {
3750 switch op {
3751 case ssa.OpLess32F,
3752 ssa.OpLess64F,
3753 ssa.OpLeq32F,
3754 ssa.OpLeq64F:
3755 args[0], args[1] = args[1], args[0]
3756 case ssa.OpSub32F,
3757 ssa.OpSub64F:
3758 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
3759 }
3760
3761
3762
3763 for i, a := range args {
3764 if a.Type.IsFloat() {
3765 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
3766 }
3767 }
3768
3769 rt := types.Types[callDef.rtype]
3770 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
3771 if rt.IsFloat() {
3772 result = s.newValue1(ssa.OpCopy, rt, result)
3773 }
3774 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
3775 result = s.newValue1(ssa.OpNot, result.Type, result)
3776 }
3777 return result, true
3778 }
3779 return nil, false
3780 }
3781
3782 var intrinsics map[intrinsicKey]intrinsicBuilder
3783
3784
3785
3786 type intrinsicBuilder func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value
3787
3788 type intrinsicKey struct {
3789 arch *sys.Arch
3790 pkg string
3791 fn string
3792 }
3793
3794 func InitTables() {
3795 intrinsics = map[intrinsicKey]intrinsicBuilder{}
3796
3797 var all []*sys.Arch
3798 var p4 []*sys.Arch
3799 var p8 []*sys.Arch
3800 var lwatomics []*sys.Arch
3801 for _, a := range &sys.Archs {
3802 all = append(all, a)
3803 if a.PtrSize == 4 {
3804 p4 = append(p4, a)
3805 } else {
3806 p8 = append(p8, a)
3807 }
3808 if a.Family != sys.PPC64 {
3809 lwatomics = append(lwatomics, a)
3810 }
3811 }
3812
3813
3814 add := func(pkg, fn string, b intrinsicBuilder, archs ...*sys.Arch) {
3815 for _, a := range archs {
3816 intrinsics[intrinsicKey{a, pkg, fn}] = b
3817 }
3818 }
3819
3820 addF := func(pkg, fn string, b intrinsicBuilder, archFamilies ...sys.ArchFamily) {
3821 m := 0
3822 for _, f := range archFamilies {
3823 if f >= 32 {
3824 panic("too many architecture families")
3825 }
3826 m |= 1 << uint(f)
3827 }
3828 for _, a := range all {
3829 if m>>uint(a.Family)&1 != 0 {
3830 intrinsics[intrinsicKey{a, pkg, fn}] = b
3831 }
3832 }
3833 }
3834
3835 alias := func(pkg, fn, pkg2, fn2 string, archs ...*sys.Arch) {
3836 aliased := false
3837 for _, a := range archs {
3838 if b, ok := intrinsics[intrinsicKey{a, pkg2, fn2}]; ok {
3839 intrinsics[intrinsicKey{a, pkg, fn}] = b
3840 aliased = true
3841 }
3842 }
3843 if !aliased {
3844 panic(fmt.Sprintf("attempted to alias undefined intrinsic: %s.%s", pkg, fn))
3845 }
3846 }
3847
3848
3849 if !base.Flag.Cfg.Instrumenting {
3850 add("runtime", "slicebytetostringtmp",
3851 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3852
3853
3854
3855 return s.newValue2(ssa.OpStringMake, n.Type(), args[0], args[1])
3856 },
3857 all...)
3858 }
3859 addF("runtime/internal/math", "MulUintptr",
3860 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3861 if s.config.PtrSize == 4 {
3862 return s.newValue2(ssa.OpMul32uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
3863 }
3864 return s.newValue2(ssa.OpMul64uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
3865 },
3866 sys.AMD64, sys.I386, sys.MIPS64, sys.RISCV64)
3867 add("runtime", "KeepAlive",
3868 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3869 data := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, args[0])
3870 s.vars[memVar] = s.newValue2(ssa.OpKeepAlive, types.TypeMem, data, s.mem())
3871 return nil
3872 },
3873 all...)
3874 add("runtime", "getclosureptr",
3875 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3876 return s.newValue0(ssa.OpGetClosurePtr, s.f.Config.Types.Uintptr)
3877 },
3878 all...)
3879
3880 add("runtime", "getcallerpc",
3881 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3882 return s.newValue0(ssa.OpGetCallerPC, s.f.Config.Types.Uintptr)
3883 },
3884 all...)
3885
3886 add("runtime", "getcallersp",
3887 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3888 return s.newValue0(ssa.OpGetCallerSP, s.f.Config.Types.Uintptr)
3889 },
3890 all...)
3891
3892 addF("runtime", "publicationBarrier",
3893 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3894 s.vars[memVar] = s.newValue1(ssa.OpPubBarrier, types.TypeMem, s.mem())
3895 return nil
3896 },
3897 sys.ARM64)
3898
3899
3900 addF("runtime/internal/sys", "Ctz32",
3901 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3902 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], args[0])
3903 },
3904 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64)
3905 addF("runtime/internal/sys", "Ctz64",
3906 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3907 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], args[0])
3908 },
3909 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64)
3910 addF("runtime/internal/sys", "Bswap32",
3911 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3912 return s.newValue1(ssa.OpBswap32, types.Types[types.TUINT32], args[0])
3913 },
3914 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X)
3915 addF("runtime/internal/sys", "Bswap64",
3916 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3917 return s.newValue1(ssa.OpBswap64, types.Types[types.TUINT64], args[0])
3918 },
3919 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X)
3920
3921
3922 makePrefetchFunc := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3923 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3924 s.vars[memVar] = s.newValue2(op, types.TypeMem, args[0], s.mem())
3925 return nil
3926 }
3927 }
3928
3929
3930
3931 addF("runtime/internal/sys", "Prefetch", makePrefetchFunc(ssa.OpPrefetchCache),
3932 sys.AMD64, sys.ARM64, sys.PPC64)
3933 addF("runtime/internal/sys", "PrefetchStreamed", makePrefetchFunc(ssa.OpPrefetchCacheStreamed),
3934 sys.AMD64, sys.ARM64, sys.PPC64)
3935
3936
3937 addF("runtime/internal/atomic", "Load",
3938 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3939 v := s.newValue2(ssa.OpAtomicLoad32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
3940 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
3941 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
3942 },
3943 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3944 addF("runtime/internal/atomic", "Load8",
3945 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3946 v := s.newValue2(ssa.OpAtomicLoad8, types.NewTuple(types.Types[types.TUINT8], types.TypeMem), args[0], s.mem())
3947 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
3948 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT8], v)
3949 },
3950 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3951 addF("runtime/internal/atomic", "Load64",
3952 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3953 v := s.newValue2(ssa.OpAtomicLoad64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
3954 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
3955 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
3956 },
3957 sys.AMD64, sys.ARM64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3958 addF("runtime/internal/atomic", "LoadAcq",
3959 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3960 v := s.newValue2(ssa.OpAtomicLoadAcq32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
3961 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
3962 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
3963 },
3964 sys.PPC64, sys.S390X)
3965 addF("runtime/internal/atomic", "LoadAcq64",
3966 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3967 v := s.newValue2(ssa.OpAtomicLoadAcq64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
3968 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
3969 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
3970 },
3971 sys.PPC64)
3972 addF("runtime/internal/atomic", "Loadp",
3973 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3974 v := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(s.f.Config.Types.BytePtr, types.TypeMem), args[0], s.mem())
3975 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
3976 return s.newValue1(ssa.OpSelect0, s.f.Config.Types.BytePtr, v)
3977 },
3978 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3979
3980 addF("runtime/internal/atomic", "Store",
3981 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3982 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore32, types.TypeMem, args[0], args[1], s.mem())
3983 return nil
3984 },
3985 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3986 addF("runtime/internal/atomic", "Store8",
3987 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3988 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore8, types.TypeMem, args[0], args[1], s.mem())
3989 return nil
3990 },
3991 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3992 addF("runtime/internal/atomic", "Store64",
3993 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
3994 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore64, types.TypeMem, args[0], args[1], s.mem())
3995 return nil
3996 },
3997 sys.AMD64, sys.ARM64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
3998 addF("runtime/internal/atomic", "StorepNoWB",
3999 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4000 s.vars[memVar] = s.newValue3(ssa.OpAtomicStorePtrNoWB, types.TypeMem, args[0], args[1], s.mem())
4001 return nil
4002 },
4003 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.RISCV64, sys.S390X)
4004 addF("runtime/internal/atomic", "StoreRel",
4005 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4006 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel32, types.TypeMem, args[0], args[1], s.mem())
4007 return nil
4008 },
4009 sys.PPC64, sys.S390X)
4010 addF("runtime/internal/atomic", "StoreRel64",
4011 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4012 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel64, types.TypeMem, args[0], args[1], s.mem())
4013 return nil
4014 },
4015 sys.PPC64)
4016
4017 addF("runtime/internal/atomic", "Xchg",
4018 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4019 v := s.newValue3(ssa.OpAtomicExchange32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4020 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4021 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4022 },
4023 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4024 addF("runtime/internal/atomic", "Xchg64",
4025 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4026 v := s.newValue3(ssa.OpAtomicExchange64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4027 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4028 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4029 },
4030 sys.AMD64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4031
4032 type atomicOpEmitter func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind)
4033
4034 makeAtomicGuardedIntrinsicARM64 := func(op0, op1 ssa.Op, typ, rtyp types.Kind, emit atomicOpEmitter) intrinsicBuilder {
4035
4036 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4037
4038 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARM64HasATOMICS, s.sb)
4039 v := s.load(types.Types[types.TBOOL], addr)
4040 b := s.endBlock()
4041 b.Kind = ssa.BlockIf
4042 b.SetControl(v)
4043 bTrue := s.f.NewBlock(ssa.BlockPlain)
4044 bFalse := s.f.NewBlock(ssa.BlockPlain)
4045 bEnd := s.f.NewBlock(ssa.BlockPlain)
4046 b.AddEdgeTo(bTrue)
4047 b.AddEdgeTo(bFalse)
4048 b.Likely = ssa.BranchLikely
4049
4050
4051 s.startBlock(bTrue)
4052 emit(s, n, args, op1, typ)
4053 s.endBlock().AddEdgeTo(bEnd)
4054
4055
4056 s.startBlock(bFalse)
4057 emit(s, n, args, op0, typ)
4058 s.endBlock().AddEdgeTo(bEnd)
4059
4060
4061 s.startBlock(bEnd)
4062 if rtyp == types.TNIL {
4063 return nil
4064 } else {
4065 return s.variable(n, types.Types[rtyp])
4066 }
4067 }
4068 }
4069
4070 atomicXchgXaddEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4071 v := s.newValue3(op, types.NewTuple(types.Types[typ], types.TypeMem), args[0], args[1], s.mem())
4072 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4073 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4074 }
4075 addF("runtime/internal/atomic", "Xchg",
4076 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange32, ssa.OpAtomicExchange32Variant, types.TUINT32, types.TUINT32, atomicXchgXaddEmitterARM64),
4077 sys.ARM64)
4078 addF("runtime/internal/atomic", "Xchg64",
4079 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange64, ssa.OpAtomicExchange64Variant, types.TUINT64, types.TUINT64, atomicXchgXaddEmitterARM64),
4080 sys.ARM64)
4081
4082 addF("runtime/internal/atomic", "Xadd",
4083 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4084 v := s.newValue3(ssa.OpAtomicAdd32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4085 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4086 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4087 },
4088 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4089 addF("runtime/internal/atomic", "Xadd64",
4090 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4091 v := s.newValue3(ssa.OpAtomicAdd64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4092 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4093 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4094 },
4095 sys.AMD64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4096
4097 addF("runtime/internal/atomic", "Xadd",
4098 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd32, ssa.OpAtomicAdd32Variant, types.TUINT32, types.TUINT32, atomicXchgXaddEmitterARM64),
4099 sys.ARM64)
4100 addF("runtime/internal/atomic", "Xadd64",
4101 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd64, ssa.OpAtomicAdd64Variant, types.TUINT64, types.TUINT64, atomicXchgXaddEmitterARM64),
4102 sys.ARM64)
4103
4104 addF("runtime/internal/atomic", "Cas",
4105 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4106 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4107 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4108 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4109 },
4110 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4111 addF("runtime/internal/atomic", "Cas64",
4112 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4113 v := s.newValue4(ssa.OpAtomicCompareAndSwap64, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4114 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4115 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4116 },
4117 sys.AMD64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4118 addF("runtime/internal/atomic", "CasRel",
4119 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4120 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4121 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4122 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4123 },
4124 sys.PPC64)
4125
4126 atomicCasEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4127 v := s.newValue4(op, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4128 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4129 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4130 }
4131
4132 addF("runtime/internal/atomic", "Cas",
4133 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap32, ssa.OpAtomicCompareAndSwap32Variant, types.TUINT32, types.TBOOL, atomicCasEmitterARM64),
4134 sys.ARM64)
4135 addF("runtime/internal/atomic", "Cas64",
4136 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap64, ssa.OpAtomicCompareAndSwap64Variant, types.TUINT64, types.TBOOL, atomicCasEmitterARM64),
4137 sys.ARM64)
4138
4139 addF("runtime/internal/atomic", "And8",
4140 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4141 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd8, types.TypeMem, args[0], args[1], s.mem())
4142 return nil
4143 },
4144 sys.AMD64, sys.MIPS, sys.PPC64, sys.RISCV64, sys.S390X)
4145 addF("runtime/internal/atomic", "And",
4146 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4147 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd32, types.TypeMem, args[0], args[1], s.mem())
4148 return nil
4149 },
4150 sys.AMD64, sys.MIPS, sys.PPC64, sys.RISCV64, sys.S390X)
4151 addF("runtime/internal/atomic", "Or8",
4152 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4153 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr8, types.TypeMem, args[0], args[1], s.mem())
4154 return nil
4155 },
4156 sys.AMD64, sys.ARM64, sys.MIPS, sys.PPC64, sys.RISCV64, sys.S390X)
4157 addF("runtime/internal/atomic", "Or",
4158 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4159 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr32, types.TypeMem, args[0], args[1], s.mem())
4160 return nil
4161 },
4162 sys.AMD64, sys.MIPS, sys.PPC64, sys.RISCV64, sys.S390X)
4163
4164 atomicAndOrEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4165 s.vars[memVar] = s.newValue3(op, types.TypeMem, args[0], args[1], s.mem())
4166 }
4167
4168 addF("runtime/internal/atomic", "And8",
4169 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd8, ssa.OpAtomicAnd8Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4170 sys.ARM64)
4171 addF("runtime/internal/atomic", "And",
4172 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd32, ssa.OpAtomicAnd32Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4173 sys.ARM64)
4174 addF("runtime/internal/atomic", "Or8",
4175 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr8, ssa.OpAtomicOr8Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4176 sys.ARM64)
4177 addF("runtime/internal/atomic", "Or",
4178 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr32, ssa.OpAtomicOr32Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4179 sys.ARM64)
4180
4181
4182 alias("runtime/internal/atomic", "Loadint32", "runtime/internal/atomic", "Load", all...)
4183 alias("runtime/internal/atomic", "Loadint64", "runtime/internal/atomic", "Load64", all...)
4184 alias("runtime/internal/atomic", "Loaduintptr", "runtime/internal/atomic", "Load", p4...)
4185 alias("runtime/internal/atomic", "Loaduintptr", "runtime/internal/atomic", "Load64", p8...)
4186 alias("runtime/internal/atomic", "Loaduint", "runtime/internal/atomic", "Load", p4...)
4187 alias("runtime/internal/atomic", "Loaduint", "runtime/internal/atomic", "Load64", p8...)
4188 alias("runtime/internal/atomic", "LoadAcq", "runtime/internal/atomic", "Load", lwatomics...)
4189 alias("runtime/internal/atomic", "LoadAcq64", "runtime/internal/atomic", "Load64", lwatomics...)
4190 alias("runtime/internal/atomic", "LoadAcquintptr", "runtime/internal/atomic", "LoadAcq", p4...)
4191 alias("sync", "runtime_LoadAcquintptr", "runtime/internal/atomic", "LoadAcq", p4...)
4192 alias("runtime/internal/atomic", "LoadAcquintptr", "runtime/internal/atomic", "LoadAcq64", p8...)
4193 alias("sync", "runtime_LoadAcquintptr", "runtime/internal/atomic", "LoadAcq64", p8...)
4194
4195
4196 alias("runtime/internal/atomic", "Storeint32", "runtime/internal/atomic", "Store", all...)
4197 alias("runtime/internal/atomic", "Storeint64", "runtime/internal/atomic", "Store64", all...)
4198 alias("runtime/internal/atomic", "Storeuintptr", "runtime/internal/atomic", "Store", p4...)
4199 alias("runtime/internal/atomic", "Storeuintptr", "runtime/internal/atomic", "Store64", p8...)
4200 alias("runtime/internal/atomic", "StoreRel", "runtime/internal/atomic", "Store", lwatomics...)
4201 alias("runtime/internal/atomic", "StoreRel64", "runtime/internal/atomic", "Store64", lwatomics...)
4202 alias("runtime/internal/atomic", "StoreReluintptr", "runtime/internal/atomic", "StoreRel", p4...)
4203 alias("sync", "runtime_StoreReluintptr", "runtime/internal/atomic", "StoreRel", p4...)
4204 alias("runtime/internal/atomic", "StoreReluintptr", "runtime/internal/atomic", "StoreRel64", p8...)
4205 alias("sync", "runtime_StoreReluintptr", "runtime/internal/atomic", "StoreRel64", p8...)
4206
4207
4208 alias("runtime/internal/atomic", "Xchgint32", "runtime/internal/atomic", "Xchg", all...)
4209 alias("runtime/internal/atomic", "Xchgint64", "runtime/internal/atomic", "Xchg64", all...)
4210 alias("runtime/internal/atomic", "Xchguintptr", "runtime/internal/atomic", "Xchg", p4...)
4211 alias("runtime/internal/atomic", "Xchguintptr", "runtime/internal/atomic", "Xchg64", p8...)
4212
4213
4214 alias("runtime/internal/atomic", "Xaddint32", "runtime/internal/atomic", "Xadd", all...)
4215 alias("runtime/internal/atomic", "Xaddint64", "runtime/internal/atomic", "Xadd64", all...)
4216 alias("runtime/internal/atomic", "Xadduintptr", "runtime/internal/atomic", "Xadd", p4...)
4217 alias("runtime/internal/atomic", "Xadduintptr", "runtime/internal/atomic", "Xadd64", p8...)
4218
4219
4220 alias("runtime/internal/atomic", "Casint32", "runtime/internal/atomic", "Cas", all...)
4221 alias("runtime/internal/atomic", "Casint64", "runtime/internal/atomic", "Cas64", all...)
4222 alias("runtime/internal/atomic", "Casuintptr", "runtime/internal/atomic", "Cas", p4...)
4223 alias("runtime/internal/atomic", "Casuintptr", "runtime/internal/atomic", "Cas64", p8...)
4224 alias("runtime/internal/atomic", "Casp1", "runtime/internal/atomic", "Cas", p4...)
4225 alias("runtime/internal/atomic", "Casp1", "runtime/internal/atomic", "Cas64", p8...)
4226 alias("runtime/internal/atomic", "CasRel", "runtime/internal/atomic", "Cas", lwatomics...)
4227
4228
4229 addF("math", "Sqrt",
4230 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4231 return s.newValue1(ssa.OpSqrt, types.Types[types.TFLOAT64], args[0])
4232 },
4233 sys.I386, sys.AMD64, sys.ARM, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4234 addF("math", "Trunc",
4235 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4236 return s.newValue1(ssa.OpTrunc, types.Types[types.TFLOAT64], args[0])
4237 },
4238 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4239 addF("math", "Ceil",
4240 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4241 return s.newValue1(ssa.OpCeil, types.Types[types.TFLOAT64], args[0])
4242 },
4243 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4244 addF("math", "Floor",
4245 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4246 return s.newValue1(ssa.OpFloor, types.Types[types.TFLOAT64], args[0])
4247 },
4248 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4249 addF("math", "Round",
4250 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4251 return s.newValue1(ssa.OpRound, types.Types[types.TFLOAT64], args[0])
4252 },
4253 sys.ARM64, sys.PPC64, sys.S390X)
4254 addF("math", "RoundToEven",
4255 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4256 return s.newValue1(ssa.OpRoundToEven, types.Types[types.TFLOAT64], args[0])
4257 },
4258 sys.ARM64, sys.S390X, sys.Wasm)
4259 addF("math", "Abs",
4260 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4261 return s.newValue1(ssa.OpAbs, types.Types[types.TFLOAT64], args[0])
4262 },
4263 sys.ARM64, sys.ARM, sys.PPC64, sys.RISCV64, sys.Wasm)
4264 addF("math", "Copysign",
4265 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4266 return s.newValue2(ssa.OpCopysign, types.Types[types.TFLOAT64], args[0], args[1])
4267 },
4268 sys.PPC64, sys.RISCV64, sys.Wasm)
4269 addF("math", "FMA",
4270 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4271 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4272 },
4273 sys.ARM64, sys.PPC64, sys.RISCV64, sys.S390X)
4274 addF("math", "FMA",
4275 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4276 if !s.config.UseFMA {
4277 s.vars[n] = s.callResult(n, callNormal)
4278 return s.variable(n, types.Types[types.TFLOAT64])
4279 }
4280
4281 if buildcfg.GOAMD64 >= 3 {
4282 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4283 }
4284
4285 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasFMA)
4286 b := s.endBlock()
4287 b.Kind = ssa.BlockIf
4288 b.SetControl(v)
4289 bTrue := s.f.NewBlock(ssa.BlockPlain)
4290 bFalse := s.f.NewBlock(ssa.BlockPlain)
4291 bEnd := s.f.NewBlock(ssa.BlockPlain)
4292 b.AddEdgeTo(bTrue)
4293 b.AddEdgeTo(bFalse)
4294 b.Likely = ssa.BranchLikely
4295
4296
4297 s.startBlock(bTrue)
4298 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4299 s.endBlock().AddEdgeTo(bEnd)
4300
4301
4302 s.startBlock(bFalse)
4303 s.vars[n] = s.callResult(n, callNormal)
4304 s.endBlock().AddEdgeTo(bEnd)
4305
4306
4307 s.startBlock(bEnd)
4308 return s.variable(n, types.Types[types.TFLOAT64])
4309 },
4310 sys.AMD64)
4311 addF("math", "FMA",
4312 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4313 if !s.config.UseFMA {
4314 s.vars[n] = s.callResult(n, callNormal)
4315 return s.variable(n, types.Types[types.TFLOAT64])
4316 }
4317 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARMHasVFPv4, s.sb)
4318 v := s.load(types.Types[types.TBOOL], addr)
4319 b := s.endBlock()
4320 b.Kind = ssa.BlockIf
4321 b.SetControl(v)
4322 bTrue := s.f.NewBlock(ssa.BlockPlain)
4323 bFalse := s.f.NewBlock(ssa.BlockPlain)
4324 bEnd := s.f.NewBlock(ssa.BlockPlain)
4325 b.AddEdgeTo(bTrue)
4326 b.AddEdgeTo(bFalse)
4327 b.Likely = ssa.BranchLikely
4328
4329
4330 s.startBlock(bTrue)
4331 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4332 s.endBlock().AddEdgeTo(bEnd)
4333
4334
4335 s.startBlock(bFalse)
4336 s.vars[n] = s.callResult(n, callNormal)
4337 s.endBlock().AddEdgeTo(bEnd)
4338
4339
4340 s.startBlock(bEnd)
4341 return s.variable(n, types.Types[types.TFLOAT64])
4342 },
4343 sys.ARM)
4344
4345 makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4346 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4347 if buildcfg.GOAMD64 >= 2 {
4348 return s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4349 }
4350
4351 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasSSE41)
4352 b := s.endBlock()
4353 b.Kind = ssa.BlockIf
4354 b.SetControl(v)
4355 bTrue := s.f.NewBlock(ssa.BlockPlain)
4356 bFalse := s.f.NewBlock(ssa.BlockPlain)
4357 bEnd := s.f.NewBlock(ssa.BlockPlain)
4358 b.AddEdgeTo(bTrue)
4359 b.AddEdgeTo(bFalse)
4360 b.Likely = ssa.BranchLikely
4361
4362
4363 s.startBlock(bTrue)
4364 s.vars[n] = s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4365 s.endBlock().AddEdgeTo(bEnd)
4366
4367
4368 s.startBlock(bFalse)
4369 s.vars[n] = s.callResult(n, callNormal)
4370 s.endBlock().AddEdgeTo(bEnd)
4371
4372
4373 s.startBlock(bEnd)
4374 return s.variable(n, types.Types[types.TFLOAT64])
4375 }
4376 }
4377 addF("math", "RoundToEven",
4378 makeRoundAMD64(ssa.OpRoundToEven),
4379 sys.AMD64)
4380 addF("math", "Floor",
4381 makeRoundAMD64(ssa.OpFloor),
4382 sys.AMD64)
4383 addF("math", "Ceil",
4384 makeRoundAMD64(ssa.OpCeil),
4385 sys.AMD64)
4386 addF("math", "Trunc",
4387 makeRoundAMD64(ssa.OpTrunc),
4388 sys.AMD64)
4389
4390
4391 addF("math/bits", "TrailingZeros64",
4392 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4393 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], args[0])
4394 },
4395 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4396 addF("math/bits", "TrailingZeros32",
4397 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4398 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], args[0])
4399 },
4400 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4401 addF("math/bits", "TrailingZeros16",
4402 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4403 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4404 c := s.constInt32(types.Types[types.TUINT32], 1<<16)
4405 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4406 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4407 },
4408 sys.MIPS)
4409 addF("math/bits", "TrailingZeros16",
4410 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4411 return s.newValue1(ssa.OpCtz16, types.Types[types.TINT], args[0])
4412 },
4413 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4414 addF("math/bits", "TrailingZeros16",
4415 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4416 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4417 c := s.constInt64(types.Types[types.TUINT64], 1<<16)
4418 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4419 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4420 },
4421 sys.S390X, sys.PPC64)
4422 addF("math/bits", "TrailingZeros8",
4423 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4424 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4425 c := s.constInt32(types.Types[types.TUINT32], 1<<8)
4426 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4427 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4428 },
4429 sys.MIPS)
4430 addF("math/bits", "TrailingZeros8",
4431 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4432 return s.newValue1(ssa.OpCtz8, types.Types[types.TINT], args[0])
4433 },
4434 sys.AMD64, sys.ARM, sys.ARM64, sys.Wasm)
4435 addF("math/bits", "TrailingZeros8",
4436 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4437 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4438 c := s.constInt64(types.Types[types.TUINT64], 1<<8)
4439 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4440 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4441 },
4442 sys.S390X)
4443 alias("math/bits", "ReverseBytes64", "runtime/internal/sys", "Bswap64", all...)
4444 alias("math/bits", "ReverseBytes32", "runtime/internal/sys", "Bswap32", all...)
4445
4446
4447 addF("math/bits", "Len64",
4448 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4449 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4450 },
4451 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4452 addF("math/bits", "Len32",
4453 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4454 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4455 },
4456 sys.AMD64, sys.ARM64, sys.PPC64)
4457 addF("math/bits", "Len32",
4458 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4459 if s.config.PtrSize == 4 {
4460 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4461 }
4462 x := s.newValue1(ssa.OpZeroExt32to64, types.Types[types.TUINT64], args[0])
4463 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4464 },
4465 sys.ARM, sys.S390X, sys.MIPS, sys.Wasm)
4466 addF("math/bits", "Len16",
4467 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4468 if s.config.PtrSize == 4 {
4469 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4470 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4471 }
4472 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4473 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4474 },
4475 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4476 addF("math/bits", "Len16",
4477 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4478 return s.newValue1(ssa.OpBitLen16, types.Types[types.TINT], args[0])
4479 },
4480 sys.AMD64)
4481 addF("math/bits", "Len8",
4482 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4483 if s.config.PtrSize == 4 {
4484 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4485 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4486 }
4487 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4488 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4489 },
4490 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4491 addF("math/bits", "Len8",
4492 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4493 return s.newValue1(ssa.OpBitLen8, types.Types[types.TINT], args[0])
4494 },
4495 sys.AMD64)
4496 addF("math/bits", "Len",
4497 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4498 if s.config.PtrSize == 4 {
4499 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4500 }
4501 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4502 },
4503 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4504
4505 addF("math/bits", "Reverse64",
4506 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4507 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4508 },
4509 sys.ARM64)
4510 addF("math/bits", "Reverse32",
4511 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4512 return s.newValue1(ssa.OpBitRev32, types.Types[types.TINT], args[0])
4513 },
4514 sys.ARM64)
4515 addF("math/bits", "Reverse16",
4516 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4517 return s.newValue1(ssa.OpBitRev16, types.Types[types.TINT], args[0])
4518 },
4519 sys.ARM64)
4520 addF("math/bits", "Reverse8",
4521 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4522 return s.newValue1(ssa.OpBitRev8, types.Types[types.TINT], args[0])
4523 },
4524 sys.ARM64)
4525 addF("math/bits", "Reverse",
4526 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4527 if s.config.PtrSize == 4 {
4528 return s.newValue1(ssa.OpBitRev32, types.Types[types.TINT], args[0])
4529 }
4530 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4531 },
4532 sys.ARM64)
4533 addF("math/bits", "RotateLeft8",
4534 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4535 return s.newValue2(ssa.OpRotateLeft8, types.Types[types.TUINT8], args[0], args[1])
4536 },
4537 sys.AMD64)
4538 addF("math/bits", "RotateLeft16",
4539 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4540 return s.newValue2(ssa.OpRotateLeft16, types.Types[types.TUINT16], args[0], args[1])
4541 },
4542 sys.AMD64)
4543 addF("math/bits", "RotateLeft32",
4544 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4545 return s.newValue2(ssa.OpRotateLeft32, types.Types[types.TUINT32], args[0], args[1])
4546 },
4547 sys.AMD64, sys.ARM, sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm)
4548 addF("math/bits", "RotateLeft64",
4549 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4550 return s.newValue2(ssa.OpRotateLeft64, types.Types[types.TUINT64], args[0], args[1])
4551 },
4552 sys.AMD64, sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm)
4553 alias("math/bits", "RotateLeft", "math/bits", "RotateLeft64", p8...)
4554
4555 makeOnesCountAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4556 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4557 if buildcfg.GOAMD64 >= 2 {
4558 return s.newValue1(op, types.Types[types.TINT], args[0])
4559 }
4560
4561 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasPOPCNT)
4562 b := s.endBlock()
4563 b.Kind = ssa.BlockIf
4564 b.SetControl(v)
4565 bTrue := s.f.NewBlock(ssa.BlockPlain)
4566 bFalse := s.f.NewBlock(ssa.BlockPlain)
4567 bEnd := s.f.NewBlock(ssa.BlockPlain)
4568 b.AddEdgeTo(bTrue)
4569 b.AddEdgeTo(bFalse)
4570 b.Likely = ssa.BranchLikely
4571
4572
4573 s.startBlock(bTrue)
4574 s.vars[n] = s.newValue1(op, types.Types[types.TINT], args[0])
4575 s.endBlock().AddEdgeTo(bEnd)
4576
4577
4578 s.startBlock(bFalse)
4579 s.vars[n] = s.callResult(n, callNormal)
4580 s.endBlock().AddEdgeTo(bEnd)
4581
4582
4583 s.startBlock(bEnd)
4584 return s.variable(n, types.Types[types.TINT])
4585 }
4586 }
4587 addF("math/bits", "OnesCount64",
4588 makeOnesCountAMD64(ssa.OpPopCount64),
4589 sys.AMD64)
4590 addF("math/bits", "OnesCount64",
4591 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4592 return s.newValue1(ssa.OpPopCount64, types.Types[types.TINT], args[0])
4593 },
4594 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4595 addF("math/bits", "OnesCount32",
4596 makeOnesCountAMD64(ssa.OpPopCount32),
4597 sys.AMD64)
4598 addF("math/bits", "OnesCount32",
4599 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4600 return s.newValue1(ssa.OpPopCount32, types.Types[types.TINT], args[0])
4601 },
4602 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4603 addF("math/bits", "OnesCount16",
4604 makeOnesCountAMD64(ssa.OpPopCount16),
4605 sys.AMD64)
4606 addF("math/bits", "OnesCount16",
4607 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4608 return s.newValue1(ssa.OpPopCount16, types.Types[types.TINT], args[0])
4609 },
4610 sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm)
4611 addF("math/bits", "OnesCount8",
4612 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4613 return s.newValue1(ssa.OpPopCount8, types.Types[types.TINT], args[0])
4614 },
4615 sys.S390X, sys.PPC64, sys.Wasm)
4616 addF("math/bits", "OnesCount",
4617 makeOnesCountAMD64(ssa.OpPopCount64),
4618 sys.AMD64)
4619 addF("math/bits", "Mul64",
4620 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4621 return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1])
4622 },
4623 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.MIPS64, sys.RISCV64)
4624 alias("math/bits", "Mul", "math/bits", "Mul64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X, sys.ArchMIPS64, sys.ArchMIPS64LE, sys.ArchRISCV64)
4625 alias("runtime/internal/math", "Mul64", "math/bits", "Mul64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X, sys.ArchMIPS64, sys.ArchMIPS64LE, sys.ArchRISCV64)
4626 addF("math/bits", "Add64",
4627 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4628 return s.newValue3(ssa.OpAdd64carry, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
4629 },
4630 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X)
4631 alias("math/bits", "Add", "math/bits", "Add64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X)
4632 addF("math/bits", "Sub64",
4633 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4634 return s.newValue3(ssa.OpSub64borrow, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
4635 },
4636 sys.AMD64, sys.ARM64, sys.S390X)
4637 alias("math/bits", "Sub", "math/bits", "Sub64", sys.ArchAMD64, sys.ArchARM64, sys.ArchS390X)
4638 addF("math/bits", "Div64",
4639 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4640
4641 cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64]))
4642 s.check(cmpZero, ir.Syms.Panicdivide)
4643 cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2])
4644 s.check(cmpOverflow, ir.Syms.Panicoverflow)
4645 return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
4646 },
4647 sys.AMD64)
4648 alias("math/bits", "Div", "math/bits", "Div64", sys.ArchAMD64)
4649
4650 alias("runtime/internal/sys", "Ctz8", "math/bits", "TrailingZeros8", all...)
4651 alias("runtime/internal/sys", "TrailingZeros8", "math/bits", "TrailingZeros8", all...)
4652 alias("runtime/internal/sys", "TrailingZeros64", "math/bits", "TrailingZeros64", all...)
4653 alias("runtime/internal/sys", "Len8", "math/bits", "Len8", all...)
4654 alias("runtime/internal/sys", "Len64", "math/bits", "Len64", all...)
4655 alias("runtime/internal/sys", "OnesCount64", "math/bits", "OnesCount64", all...)
4656
4657
4658
4659
4660 alias("sync/atomic", "LoadInt32", "runtime/internal/atomic", "Load", all...)
4661 alias("sync/atomic", "LoadInt64", "runtime/internal/atomic", "Load64", all...)
4662 alias("sync/atomic", "LoadPointer", "runtime/internal/atomic", "Loadp", all...)
4663 alias("sync/atomic", "LoadUint32", "runtime/internal/atomic", "Load", all...)
4664 alias("sync/atomic", "LoadUint64", "runtime/internal/atomic", "Load64", all...)
4665 alias("sync/atomic", "LoadUintptr", "runtime/internal/atomic", "Load", p4...)
4666 alias("sync/atomic", "LoadUintptr", "runtime/internal/atomic", "Load64", p8...)
4667
4668 alias("sync/atomic", "StoreInt32", "runtime/internal/atomic", "Store", all...)
4669 alias("sync/atomic", "StoreInt64", "runtime/internal/atomic", "Store64", all...)
4670
4671 alias("sync/atomic", "StoreUint32", "runtime/internal/atomic", "Store", all...)
4672 alias("sync/atomic", "StoreUint64", "runtime/internal/atomic", "Store64", all...)
4673 alias("sync/atomic", "StoreUintptr", "runtime/internal/atomic", "Store", p4...)
4674 alias("sync/atomic", "StoreUintptr", "runtime/internal/atomic", "Store64", p8...)
4675
4676 alias("sync/atomic", "SwapInt32", "runtime/internal/atomic", "Xchg", all...)
4677 alias("sync/atomic", "SwapInt64", "runtime/internal/atomic", "Xchg64", all...)
4678 alias("sync/atomic", "SwapUint32", "runtime/internal/atomic", "Xchg", all...)
4679 alias("sync/atomic", "SwapUint64", "runtime/internal/atomic", "Xchg64", all...)
4680 alias("sync/atomic", "SwapUintptr", "runtime/internal/atomic", "Xchg", p4...)
4681 alias("sync/atomic", "SwapUintptr", "runtime/internal/atomic", "Xchg64", p8...)
4682
4683 alias("sync/atomic", "CompareAndSwapInt32", "runtime/internal/atomic", "Cas", all...)
4684 alias("sync/atomic", "CompareAndSwapInt64", "runtime/internal/atomic", "Cas64", all...)
4685 alias("sync/atomic", "CompareAndSwapUint32", "runtime/internal/atomic", "Cas", all...)
4686 alias("sync/atomic", "CompareAndSwapUint64", "runtime/internal/atomic", "Cas64", all...)
4687 alias("sync/atomic", "CompareAndSwapUintptr", "runtime/internal/atomic", "Cas", p4...)
4688 alias("sync/atomic", "CompareAndSwapUintptr", "runtime/internal/atomic", "Cas64", p8...)
4689
4690 alias("sync/atomic", "AddInt32", "runtime/internal/atomic", "Xadd", all...)
4691 alias("sync/atomic", "AddInt64", "runtime/internal/atomic", "Xadd64", all...)
4692 alias("sync/atomic", "AddUint32", "runtime/internal/atomic", "Xadd", all...)
4693 alias("sync/atomic", "AddUint64", "runtime/internal/atomic", "Xadd64", all...)
4694 alias("sync/atomic", "AddUintptr", "runtime/internal/atomic", "Xadd", p4...)
4695 alias("sync/atomic", "AddUintptr", "runtime/internal/atomic", "Xadd64", p8...)
4696
4697
4698 add("math/big", "mulWW",
4699 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4700 return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1])
4701 },
4702 sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64LE, sys.ArchPPC64, sys.ArchS390X)
4703 }
4704
4705
4706
4707 func findIntrinsic(sym *types.Sym) intrinsicBuilder {
4708 if sym == nil || sym.Pkg == nil {
4709 return nil
4710 }
4711 pkg := sym.Pkg.Path
4712 if sym.Pkg == types.LocalPkg {
4713 pkg = base.Ctxt.Pkgpath
4714 }
4715 if sym.Pkg == ir.Pkgs.Runtime {
4716 pkg = "runtime"
4717 }
4718 if base.Flag.Race && pkg == "sync/atomic" {
4719
4720
4721 return nil
4722 }
4723
4724
4725 if Arch.SoftFloat && pkg == "math" {
4726 return nil
4727 }
4728
4729 fn := sym.Name
4730 if ssa.IntrinsicsDisable {
4731 if pkg == "runtime" && (fn == "getcallerpc" || fn == "getcallersp" || fn == "getclosureptr") {
4732
4733 } else {
4734 return nil
4735 }
4736 }
4737 return intrinsics[intrinsicKey{Arch.LinkArch.Arch, pkg, fn}]
4738 }
4739
4740 func IsIntrinsicCall(n *ir.CallExpr) bool {
4741 if n == nil {
4742 return false
4743 }
4744 name, ok := n.X.(*ir.Name)
4745 if !ok {
4746 return false
4747 }
4748 return findIntrinsic(name.Sym()) != nil
4749 }
4750
4751
4752 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4753 v := findIntrinsic(n.X.Sym())(s, n, s.intrinsicArgs(n))
4754 if ssa.IntrinsicsDebug > 0 {
4755 x := v
4756 if x == nil {
4757 x = s.mem()
4758 }
4759 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4760 x = x.Args[0]
4761 }
4762 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.X.Sym().Name, x.LongString())
4763 }
4764 return v
4765 }
4766
4767
4768 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4769 args := make([]*ssa.Value, len(n.Args))
4770 for i, n := range n.Args {
4771 args[i] = s.expr(n)
4772 }
4773 return args
4774 }
4775
4776
4777
4778
4779
4780
4781
4782 func (s *state) openDeferRecord(n *ir.CallExpr) {
4783 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.X.Type().NumResults() != 0 {
4784 s.Fatalf("defer call with arguments or results: %v", n)
4785 }
4786
4787 opendefer := &openDeferInfo{
4788 n: n,
4789 }
4790 fn := n.X
4791
4792
4793
4794 closureVal := s.expr(fn)
4795 closure := s.openDeferSave(fn.Type(), closureVal)
4796 opendefer.closureNode = closure.Aux.(*ir.Name)
4797 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4798 opendefer.closure = closure
4799 }
4800 index := len(s.openDefers)
4801 s.openDefers = append(s.openDefers, opendefer)
4802
4803
4804
4805 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4806 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4807 s.vars[deferBitsVar] = newDeferBits
4808 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4809 }
4810
4811
4812
4813
4814
4815
4816 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4817 if !TypeOK(t) {
4818 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4819 }
4820 if !t.HasPointers() {
4821 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4822 }
4823 pos := val.Pos
4824 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4825 temp.SetOpenDeferSlot(true)
4826 var addrTemp *ssa.Value
4827
4828
4829 if s.curBlock.ID != s.f.Entry.ID {
4830
4831
4832
4833 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4834 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4835 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4836 } else {
4837
4838
4839
4840 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4841 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4842 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4843 }
4844
4845
4846
4847
4848
4849 temp.SetNeedzero(true)
4850
4851
4852 s.store(t, addrTemp, val)
4853 return addrTemp
4854 }
4855
4856
4857
4858
4859
4860 func (s *state) openDeferExit() {
4861 deferExit := s.f.NewBlock(ssa.BlockPlain)
4862 s.endBlock().AddEdgeTo(deferExit)
4863 s.startBlock(deferExit)
4864 s.lastDeferExit = deferExit
4865 s.lastDeferCount = len(s.openDefers)
4866 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4867
4868 for i := len(s.openDefers) - 1; i >= 0; i-- {
4869 r := s.openDefers[i]
4870 bCond := s.f.NewBlock(ssa.BlockPlain)
4871 bEnd := s.f.NewBlock(ssa.BlockPlain)
4872
4873 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4874
4875
4876 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4877 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4878 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4879 b := s.endBlock()
4880 b.Kind = ssa.BlockIf
4881 b.SetControl(eqVal)
4882 b.AddEdgeTo(bEnd)
4883 b.AddEdgeTo(bCond)
4884 bCond.AddEdgeTo(bEnd)
4885 s.startBlock(bCond)
4886
4887
4888
4889 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4890 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4891 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4892
4893
4894 s.vars[deferBitsVar] = maskedval
4895
4896
4897
4898
4899 fn := r.n.X
4900 stksize := fn.Type().ArgWidth()
4901 var callArgs []*ssa.Value
4902 var call *ssa.Value
4903 if r.closure != nil {
4904 v := s.load(r.closure.Type.Elem(), r.closure)
4905 s.maybeNilCheckClosure(v, callDefer)
4906 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4907 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil, nil))
4908 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4909 } else {
4910 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil, nil))
4911 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4912 }
4913 callArgs = append(callArgs, s.mem())
4914 call.AddArgs(callArgs...)
4915 call.AuxInt = stksize
4916 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4917
4918
4919
4920
4921 if r.closureNode != nil {
4922 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4923 }
4924
4925 s.endBlock()
4926 s.startBlock(bEnd)
4927 }
4928 }
4929
4930 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4931 return s.call(n, k, false)
4932 }
4933
4934 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4935 return s.call(n, k, true)
4936 }
4937
4938
4939
4940 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Value {
4941 s.prevCall = nil
4942 var callee *ir.Name
4943 var closure *ssa.Value
4944 var codeptr *ssa.Value
4945 var rcvr *ssa.Value
4946 fn := n.X
4947 var ACArgs []*types.Type
4948 var ACResults []*types.Type
4949 var callArgs []*ssa.Value
4950
4951 callABI := s.f.ABIDefault
4952
4953 if !buildcfg.Experiment.RegabiArgs {
4954 var magicFnNameSym *types.Sym
4955 if fn.Name() != nil {
4956 magicFnNameSym = fn.Name().Sym()
4957 ss := magicFnNameSym.Name
4958 if strings.HasSuffix(ss, magicNameDotSuffix) {
4959 callABI = s.f.ABI1
4960 }
4961 }
4962 if magicFnNameSym == nil && n.Op() == ir.OCALLINTER {
4963 magicFnNameSym = fn.(*ir.SelectorExpr).Sym()
4964 ss := magicFnNameSym.Name
4965 if strings.HasSuffix(ss, magicNameDotSuffix[1:]) {
4966 callABI = s.f.ABI1
4967 }
4968 }
4969 }
4970
4971 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.X.Type().NumResults() != 0) {
4972 s.Fatalf("go/defer call with arguments: %v", n)
4973 }
4974
4975 switch n.Op() {
4976 case ir.OCALLFUNC:
4977 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4978 fn := fn.(*ir.Name)
4979 callee = fn
4980 if buildcfg.Experiment.RegabiArgs {
4981
4982
4983
4984
4985
4986 if fn.Func != nil {
4987 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4988 }
4989 } else {
4990
4991 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4992 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4993 if inRegistersImported || inRegistersSamePackage {
4994 callABI = s.f.ABI1
4995 }
4996 }
4997 break
4998 }
4999 closure = s.expr(fn)
5000 if k != callDefer && k != callDeferStack {
5001
5002
5003 s.maybeNilCheckClosure(closure, k)
5004 }
5005 case ir.OCALLINTER:
5006 if fn.Op() != ir.ODOTINTER {
5007 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5008 }
5009 fn := fn.(*ir.SelectorExpr)
5010 var iclosure *ssa.Value
5011 iclosure, rcvr = s.getClosureAndRcvr(fn)
5012 if k == callNormal {
5013 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5014 } else {
5015 closure = iclosure
5016 }
5017 }
5018
5019 if !buildcfg.Experiment.RegabiArgs {
5020 if regAbiForFuncType(n.X.Type().FuncType()) {
5021
5022 callABI = s.f.ABI1
5023 }
5024 }
5025
5026 params := callABI.ABIAnalyze(n.X.Type(), false )
5027 types.CalcSize(fn.Type())
5028 stksize := params.ArgWidth()
5029
5030 res := n.X.Type().Results()
5031 if k == callNormal || k == callTail {
5032 for _, p := range params.OutParams() {
5033 ACResults = append(ACResults, p.Type)
5034 }
5035 }
5036
5037 var call *ssa.Value
5038 if k == callDeferStack {
5039
5040 if stksize != 0 {
5041 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5042 }
5043
5044 t := deferstruct()
5045 d := typecheck.TempAt(n.Pos(), s.curfn, t)
5046
5047 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, d, s.mem())
5048 addr := s.addr(d)
5049
5050
5051
5052
5053
5054
5055
5056
5057 s.store(closure.Type,
5058 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(5), addr),
5059 closure)
5060
5061
5062
5063
5064
5065
5066
5067 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5068 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(nil, ACArgs, ACResults))
5069 callArgs = append(callArgs, addr, s.mem())
5070 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5071 call.AddArgs(callArgs...)
5072 call.AuxInt = int64(types.PtrSize)
5073 } else {
5074
5075
5076 argStart := base.Ctxt.FixedFrameSize()
5077
5078 if k != callNormal && k != callTail {
5079
5080 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5081 callArgs = append(callArgs, closure)
5082 stksize += int64(types.PtrSize)
5083 argStart += int64(types.PtrSize)
5084 }
5085
5086
5087 if rcvr != nil {
5088 callArgs = append(callArgs, rcvr)
5089 }
5090
5091
5092 t := n.X.Type()
5093 args := n.Args
5094
5095 for _, p := range params.InParams() {
5096 ACArgs = append(ACArgs, p.Type)
5097 }
5098
5099
5100
5101
5102 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5103 b := s.endBlock()
5104 b.Kind = ssa.BlockPlain
5105 curb := s.f.NewBlock(ssa.BlockPlain)
5106 b.AddEdgeTo(curb)
5107 s.startBlock(curb)
5108 }
5109
5110 for i, n := range args {
5111 callArgs = append(callArgs, s.putArg(n, t.Params().Field(i).Type))
5112 }
5113
5114 callArgs = append(callArgs, s.mem())
5115
5116
5117 switch {
5118 case k == callDefer:
5119 aux := ssa.StaticAuxCall(ir.Syms.Deferproc, s.f.ABIDefault.ABIAnalyzeTypes(nil, ACArgs, ACResults))
5120 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5121 case k == callGo:
5122 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(nil, ACArgs, ACResults))
5123 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5124 case closure != nil:
5125
5126
5127
5128
5129
5130 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5131 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(nil, ACArgs, ACResults))
5132 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5133 case codeptr != nil:
5134
5135 aux := ssa.InterfaceAuxCall(params)
5136 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5137 case callee != nil:
5138 aux := ssa.StaticAuxCall(callTargetLSym(callee), params)
5139 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5140 if k == callTail {
5141 call.Op = ssa.OpTailLECall
5142 stksize = 0
5143 }
5144 default:
5145 s.Fatalf("bad call type %v %v", n.Op(), n)
5146 }
5147 call.AddArgs(callArgs...)
5148 call.AuxInt = stksize
5149 }
5150 s.prevCall = call
5151 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5152
5153 for _, name := range n.KeepAlive {
5154 s.stmt(ir.NewUnaryExpr(n.Pos(), ir.OVARLIVE, name))
5155 }
5156
5157
5158 if k == callDefer || k == callDeferStack {
5159 b := s.endBlock()
5160 b.Kind = ssa.BlockDefer
5161 b.SetControl(call)
5162 bNext := s.f.NewBlock(ssa.BlockPlain)
5163 b.AddEdgeTo(bNext)
5164
5165 r := s.f.NewBlock(ssa.BlockPlain)
5166 s.startBlock(r)
5167 s.exit()
5168 b.AddEdgeTo(r)
5169 b.Likely = ssa.BranchLikely
5170 s.startBlock(bNext)
5171 }
5172
5173 if res.NumFields() == 0 || k != callNormal {
5174
5175 return nil
5176 }
5177 fp := res.Field(0)
5178 if returnResultAddr {
5179 return s.resultAddrOfCall(call, 0, fp.Type)
5180 }
5181 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5182 }
5183
5184
5185
5186 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5187 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5188
5189
5190 s.nilCheck(closure)
5191 }
5192 }
5193
5194
5195
5196 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5197 i := s.expr(fn.X)
5198 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5199 s.nilCheck(itab)
5200 itabidx := fn.Offset() + 2*int64(types.PtrSize) + 8
5201 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5202 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5203 return closure, rcvr
5204 }
5205
5206
5207
5208 func etypesign(e types.Kind) int8 {
5209 switch e {
5210 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5211 return -1
5212 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5213 return +1
5214 }
5215 return 0
5216 }
5217
5218
5219
5220 func (s *state) addr(n ir.Node) *ssa.Value {
5221 if n.Op() != ir.ONAME {
5222 s.pushLine(n.Pos())
5223 defer s.popLine()
5224 }
5225
5226 if s.canSSA(n) {
5227 s.Fatalf("addr of canSSA expression: %+v", n)
5228 }
5229
5230 t := types.NewPtr(n.Type())
5231 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5232 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5233
5234 if offset != 0 {
5235 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5236 }
5237 return v
5238 }
5239 switch n.Op() {
5240 case ir.OLINKSYMOFFSET:
5241 no := n.(*ir.LinksymOffsetExpr)
5242 return linksymOffset(no.Linksym, no.Offset_)
5243 case ir.ONAME:
5244 n := n.(*ir.Name)
5245 if n.Heapaddr != nil {
5246 return s.expr(n.Heapaddr)
5247 }
5248 switch n.Class {
5249 case ir.PEXTERN:
5250
5251 return linksymOffset(n.Linksym(), 0)
5252 case ir.PPARAM:
5253
5254 v := s.decladdrs[n]
5255 if v != nil {
5256 return v
5257 }
5258 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5259 return nil
5260 case ir.PAUTO:
5261 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5262
5263 case ir.PPARAMOUT:
5264
5265
5266 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5267 default:
5268 s.Fatalf("variable address class %v not implemented", n.Class)
5269 return nil
5270 }
5271 case ir.ORESULT:
5272
5273 n := n.(*ir.ResultExpr)
5274 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5275 case ir.OINDEX:
5276 n := n.(*ir.IndexExpr)
5277 if n.X.Type().IsSlice() {
5278 a := s.expr(n.X)
5279 i := s.expr(n.Index)
5280 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5281 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5282 p := s.newValue1(ssa.OpSlicePtr, t, a)
5283 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5284 } else {
5285 a := s.addr(n.X)
5286 i := s.expr(n.Index)
5287 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5288 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5289 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5290 }
5291 case ir.ODEREF:
5292 n := n.(*ir.StarExpr)
5293 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5294 case ir.ODOT:
5295 n := n.(*ir.SelectorExpr)
5296 p := s.addr(n.X)
5297 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5298 case ir.ODOTPTR:
5299 n := n.(*ir.SelectorExpr)
5300 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5301 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5302 case ir.OCONVNOP:
5303 n := n.(*ir.ConvExpr)
5304 if n.Type() == n.X.Type() {
5305 return s.addr(n.X)
5306 }
5307 addr := s.addr(n.X)
5308 return s.newValue1(ssa.OpCopy, t, addr)
5309 case ir.OCALLFUNC, ir.OCALLINTER:
5310 n := n.(*ir.CallExpr)
5311 return s.callAddr(n, callNormal)
5312 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5313 var v *ssa.Value
5314 if n.Op() == ir.ODOTTYPE {
5315 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5316 } else {
5317 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5318 }
5319 if v.Op != ssa.OpLoad {
5320 s.Fatalf("dottype of non-load")
5321 }
5322 if v.Args[1] != s.mem() {
5323 s.Fatalf("memory no longer live from dottype load")
5324 }
5325 return v.Args[0]
5326 default:
5327 s.Fatalf("unhandled addr %v", n.Op())
5328 return nil
5329 }
5330 }
5331
5332
5333
5334 func (s *state) canSSA(n ir.Node) bool {
5335 if base.Flag.N != 0 {
5336 return false
5337 }
5338 for {
5339 nn := n
5340 if nn.Op() == ir.ODOT {
5341 nn := nn.(*ir.SelectorExpr)
5342 n = nn.X
5343 continue
5344 }
5345 if nn.Op() == ir.OINDEX {
5346 nn := nn.(*ir.IndexExpr)
5347 if nn.X.Type().IsArray() {
5348 n = nn.X
5349 continue
5350 }
5351 }
5352 break
5353 }
5354 if n.Op() != ir.ONAME {
5355 return false
5356 }
5357 return s.canSSAName(n.(*ir.Name)) && TypeOK(n.Type())
5358 }
5359
5360 func (s *state) canSSAName(name *ir.Name) bool {
5361 if name.Addrtaken() || !name.OnStack() {
5362 return false
5363 }
5364 switch name.Class {
5365 case ir.PPARAMOUT:
5366 if s.hasdefer {
5367
5368
5369
5370
5371
5372 return false
5373 }
5374 if s.cgoUnsafeArgs {
5375
5376
5377 return false
5378 }
5379 }
5380 return true
5381
5382 }
5383
5384
5385 func TypeOK(t *types.Type) bool {
5386 types.CalcSize(t)
5387 if t.Size() > int64(4*types.PtrSize) {
5388
5389
5390
5391 return false
5392 }
5393 switch t.Kind() {
5394 case types.TARRAY:
5395
5396
5397
5398 if t.NumElem() <= 1 {
5399 return TypeOK(t.Elem())
5400 }
5401 return false
5402 case types.TSTRUCT:
5403 if t.NumFields() > ssa.MaxStruct {
5404 return false
5405 }
5406 for _, t1 := range t.Fields().Slice() {
5407 if !TypeOK(t1.Type) {
5408 return false
5409 }
5410 }
5411 return true
5412 default:
5413 return true
5414 }
5415 }
5416
5417
5418 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5419 p := s.expr(n)
5420 if bounded || n.NonNil() {
5421 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5422 s.f.Warnl(lineno, "removed nil check")
5423 }
5424 return p
5425 }
5426 s.nilCheck(p)
5427 return p
5428 }
5429
5430
5431
5432
5433 func (s *state) nilCheck(ptr *ssa.Value) {
5434 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5435 return
5436 }
5437 s.newValue2(ssa.OpNilCheck, types.TypeVoid, ptr, s.mem())
5438 }
5439
5440
5441
5442
5443
5444
5445
5446 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5447 idx = s.extendIndex(idx, len, kind, bounded)
5448
5449 if bounded || base.Flag.B != 0 {
5450
5451
5452
5453
5454
5455
5456
5457
5458
5459
5460
5461
5462
5463
5464
5465
5466
5467
5468
5469
5470 return idx
5471 }
5472
5473 bNext := s.f.NewBlock(ssa.BlockPlain)
5474 bPanic := s.f.NewBlock(ssa.BlockExit)
5475
5476 if !idx.Type.IsSigned() {
5477 switch kind {
5478 case ssa.BoundsIndex:
5479 kind = ssa.BoundsIndexU
5480 case ssa.BoundsSliceAlen:
5481 kind = ssa.BoundsSliceAlenU
5482 case ssa.BoundsSliceAcap:
5483 kind = ssa.BoundsSliceAcapU
5484 case ssa.BoundsSliceB:
5485 kind = ssa.BoundsSliceBU
5486 case ssa.BoundsSlice3Alen:
5487 kind = ssa.BoundsSlice3AlenU
5488 case ssa.BoundsSlice3Acap:
5489 kind = ssa.BoundsSlice3AcapU
5490 case ssa.BoundsSlice3B:
5491 kind = ssa.BoundsSlice3BU
5492 case ssa.BoundsSlice3C:
5493 kind = ssa.BoundsSlice3CU
5494 }
5495 }
5496
5497 var cmp *ssa.Value
5498 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5499 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5500 } else {
5501 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5502 }
5503 b := s.endBlock()
5504 b.Kind = ssa.BlockIf
5505 b.SetControl(cmp)
5506 b.Likely = ssa.BranchLikely
5507 b.AddEdgeTo(bNext)
5508 b.AddEdgeTo(bPanic)
5509
5510 s.startBlock(bPanic)
5511 if Arch.LinkArch.Family == sys.Wasm {
5512
5513
5514 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5515 } else {
5516 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5517 s.endBlock().SetControl(mem)
5518 }
5519 s.startBlock(bNext)
5520
5521
5522 if base.Flag.Cfg.SpectreIndex {
5523 op := ssa.OpSpectreIndex
5524 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5525 op = ssa.OpSpectreSliceIndex
5526 }
5527 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5528 }
5529
5530 return idx
5531 }
5532
5533
5534 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5535 b := s.endBlock()
5536 b.Kind = ssa.BlockIf
5537 b.SetControl(cmp)
5538 b.Likely = ssa.BranchLikely
5539 bNext := s.f.NewBlock(ssa.BlockPlain)
5540 line := s.peekPos()
5541 pos := base.Ctxt.PosTable.Pos(line)
5542 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5543 bPanic := s.panics[fl]
5544 if bPanic == nil {
5545 bPanic = s.f.NewBlock(ssa.BlockPlain)
5546 s.panics[fl] = bPanic
5547 s.startBlock(bPanic)
5548
5549
5550 s.rtcall(fn, false, nil)
5551 }
5552 b.AddEdgeTo(bNext)
5553 b.AddEdgeTo(bPanic)
5554 s.startBlock(bNext)
5555 }
5556
5557 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5558 needcheck := true
5559 switch b.Op {
5560 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5561 if b.AuxInt != 0 {
5562 needcheck = false
5563 }
5564 }
5565 if needcheck {
5566
5567 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5568 s.check(cmp, ir.Syms.Panicdivide)
5569 }
5570 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5571 }
5572
5573
5574
5575
5576
5577 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5578 s.prevCall = nil
5579
5580 off := base.Ctxt.FixedFrameSize()
5581 var callArgs []*ssa.Value
5582 var callArgTypes []*types.Type
5583
5584 for _, arg := range args {
5585 t := arg.Type
5586 off = types.Rnd(off, t.Alignment())
5587 size := t.Size()
5588 callArgs = append(callArgs, arg)
5589 callArgTypes = append(callArgTypes, t)
5590 off += size
5591 }
5592 off = types.Rnd(off, int64(types.RegSize))
5593
5594
5595 offR := off
5596 for _, t := range results {
5597 offR = types.Rnd(offR, t.Alignment())
5598 offR += t.Size()
5599 }
5600
5601
5602 var call *ssa.Value
5603 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(nil, callArgTypes, results))
5604 callArgs = append(callArgs, s.mem())
5605 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5606 call.AddArgs(callArgs...)
5607 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5608
5609 if !returns {
5610
5611 b := s.endBlock()
5612 b.Kind = ssa.BlockExit
5613 b.SetControl(call)
5614 call.AuxInt = off - base.Ctxt.FixedFrameSize()
5615 if len(results) > 0 {
5616 s.Fatalf("panic call can't have results")
5617 }
5618 return nil
5619 }
5620
5621
5622 res := make([]*ssa.Value, len(results))
5623 for i, t := range results {
5624 off = types.Rnd(off, t.Alignment())
5625 res[i] = s.resultOfCall(call, int64(i), t)
5626 off += t.Size()
5627 }
5628 off = types.Rnd(off, int64(types.PtrSize))
5629
5630
5631 call.AuxInt = off
5632
5633 return res
5634 }
5635
5636
5637 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5638 s.instrument(t, left, instrumentWrite)
5639
5640 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5641
5642 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5643 return
5644 }
5645
5646
5647
5648
5649
5650
5651 s.storeTypeScalars(t, left, right, skip)
5652 if skip&skipPtr == 0 && t.HasPointers() {
5653 s.storeTypePtrs(t, left, right)
5654 }
5655 }
5656
5657
5658 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5659 switch {
5660 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5661 s.store(t, left, right)
5662 case t.IsPtrShaped():
5663 if t.IsPtr() && t.Elem().NotInHeap() {
5664 s.store(t, left, right)
5665 }
5666
5667 case t.IsString():
5668 if skip&skipLen != 0 {
5669 return
5670 }
5671 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5672 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5673 s.store(types.Types[types.TINT], lenAddr, len)
5674 case t.IsSlice():
5675 if skip&skipLen == 0 {
5676 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5677 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5678 s.store(types.Types[types.TINT], lenAddr, len)
5679 }
5680 if skip&skipCap == 0 {
5681 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5682 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5683 s.store(types.Types[types.TINT], capAddr, cap)
5684 }
5685 case t.IsInterface():
5686
5687 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5688 s.store(types.Types[types.TUINTPTR], left, itab)
5689 case t.IsStruct():
5690 n := t.NumFields()
5691 for i := 0; i < n; i++ {
5692 ft := t.FieldType(i)
5693 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5694 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5695 s.storeTypeScalars(ft, addr, val, 0)
5696 }
5697 case t.IsArray() && t.NumElem() == 0:
5698
5699 case t.IsArray() && t.NumElem() == 1:
5700 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5701 default:
5702 s.Fatalf("bad write barrier type %v", t)
5703 }
5704 }
5705
5706
5707 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5708 switch {
5709 case t.IsPtrShaped():
5710 if t.IsPtr() && t.Elem().NotInHeap() {
5711 break
5712 }
5713 s.store(t, left, right)
5714 case t.IsString():
5715 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5716 s.store(s.f.Config.Types.BytePtr, left, ptr)
5717 case t.IsSlice():
5718 elType := types.NewPtr(t.Elem())
5719 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5720 s.store(elType, left, ptr)
5721 case t.IsInterface():
5722
5723 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5724 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5725 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5726 case t.IsStruct():
5727 n := t.NumFields()
5728 for i := 0; i < n; i++ {
5729 ft := t.FieldType(i)
5730 if !ft.HasPointers() {
5731 continue
5732 }
5733 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5734 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5735 s.storeTypePtrs(ft, addr, val)
5736 }
5737 case t.IsArray() && t.NumElem() == 0:
5738
5739 case t.IsArray() && t.NumElem() == 1:
5740 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5741 default:
5742 s.Fatalf("bad write barrier type %v", t)
5743 }
5744 }
5745
5746
5747 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5748 var a *ssa.Value
5749 if !TypeOK(t) {
5750 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5751 } else {
5752 a = s.expr(n)
5753 }
5754 return a
5755 }
5756
5757 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
5758 pt := types.NewPtr(t)
5759 var addr *ssa.Value
5760 if base == s.sp {
5761
5762 addr = s.constOffPtrSP(pt, off)
5763 } else {
5764 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
5765 }
5766
5767 if !TypeOK(t) {
5768 a := s.addr(n)
5769 s.move(t, addr, a)
5770 return
5771 }
5772
5773 a := s.expr(n)
5774 s.storeType(t, addr, a, 0, false)
5775 }
5776
5777
5778
5779
5780 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5781 t := v.Type
5782 var ptr, len, cap *ssa.Value
5783 switch {
5784 case t.IsSlice():
5785 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5786 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5787 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5788 case t.IsString():
5789 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5790 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5791 cap = len
5792 case t.IsPtr():
5793 if !t.Elem().IsArray() {
5794 s.Fatalf("bad ptr to array in slice %v\n", t)
5795 }
5796 s.nilCheck(v)
5797 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), v)
5798 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5799 cap = len
5800 default:
5801 s.Fatalf("bad type in slice %v\n", t)
5802 }
5803
5804
5805 if i == nil {
5806 i = s.constInt(types.Types[types.TINT], 0)
5807 }
5808 if j == nil {
5809 j = len
5810 }
5811 three := true
5812 if k == nil {
5813 three = false
5814 k = cap
5815 }
5816
5817
5818
5819
5820 if three {
5821 if k != cap {
5822 kind := ssa.BoundsSlice3Alen
5823 if t.IsSlice() {
5824 kind = ssa.BoundsSlice3Acap
5825 }
5826 k = s.boundsCheck(k, cap, kind, bounded)
5827 }
5828 if j != k {
5829 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5830 }
5831 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5832 } else {
5833 if j != k {
5834 kind := ssa.BoundsSliceAlen
5835 if t.IsSlice() {
5836 kind = ssa.BoundsSliceAcap
5837 }
5838 j = s.boundsCheck(j, k, kind, bounded)
5839 }
5840 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5841 }
5842
5843
5844 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5845 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5846 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5847
5848
5849
5850
5851
5852 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5853 rcap := rlen
5854 if j != k && !t.IsString() {
5855 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5856 }
5857
5858 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5859
5860 return ptr, rlen, rcap
5861 }
5862
5863
5864
5865
5866
5867
5868
5869
5870
5871
5872
5873
5874
5875
5876
5877 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5878
5879
5880 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5881
5882
5883
5884 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5885 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5886
5887
5888 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5889
5890 return rptr, rlen, rcap
5891 }
5892
5893 type u642fcvtTab struct {
5894 leq, cvt2F, and, rsh, or, add ssa.Op
5895 one func(*state, *types.Type, int64) *ssa.Value
5896 }
5897
5898 var u64_f64 = u642fcvtTab{
5899 leq: ssa.OpLeq64,
5900 cvt2F: ssa.OpCvt64to64F,
5901 and: ssa.OpAnd64,
5902 rsh: ssa.OpRsh64Ux64,
5903 or: ssa.OpOr64,
5904 add: ssa.OpAdd64F,
5905 one: (*state).constInt64,
5906 }
5907
5908 var u64_f32 = u642fcvtTab{
5909 leq: ssa.OpLeq64,
5910 cvt2F: ssa.OpCvt64to32F,
5911 and: ssa.OpAnd64,
5912 rsh: ssa.OpRsh64Ux64,
5913 or: ssa.OpOr64,
5914 add: ssa.OpAdd32F,
5915 one: (*state).constInt64,
5916 }
5917
5918 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5919 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5920 }
5921
5922 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5923 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5924 }
5925
5926 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5927
5928
5929
5930
5931
5932
5933
5934
5935
5936
5937
5938
5939
5940
5941
5942
5943
5944
5945
5946
5947
5948
5949
5950
5951 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5952 b := s.endBlock()
5953 b.Kind = ssa.BlockIf
5954 b.SetControl(cmp)
5955 b.Likely = ssa.BranchLikely
5956
5957 bThen := s.f.NewBlock(ssa.BlockPlain)
5958 bElse := s.f.NewBlock(ssa.BlockPlain)
5959 bAfter := s.f.NewBlock(ssa.BlockPlain)
5960
5961 b.AddEdgeTo(bThen)
5962 s.startBlock(bThen)
5963 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5964 s.vars[n] = a0
5965 s.endBlock()
5966 bThen.AddEdgeTo(bAfter)
5967
5968 b.AddEdgeTo(bElse)
5969 s.startBlock(bElse)
5970 one := cvttab.one(s, ft, 1)
5971 y := s.newValue2(cvttab.and, ft, x, one)
5972 z := s.newValue2(cvttab.rsh, ft, x, one)
5973 z = s.newValue2(cvttab.or, ft, z, y)
5974 a := s.newValue1(cvttab.cvt2F, tt, z)
5975 a1 := s.newValue2(cvttab.add, tt, a, a)
5976 s.vars[n] = a1
5977 s.endBlock()
5978 bElse.AddEdgeTo(bAfter)
5979
5980 s.startBlock(bAfter)
5981 return s.variable(n, n.Type())
5982 }
5983
5984 type u322fcvtTab struct {
5985 cvtI2F, cvtF2F ssa.Op
5986 }
5987
5988 var u32_f64 = u322fcvtTab{
5989 cvtI2F: ssa.OpCvt32to64F,
5990 cvtF2F: ssa.OpCopy,
5991 }
5992
5993 var u32_f32 = u322fcvtTab{
5994 cvtI2F: ssa.OpCvt32to32F,
5995 cvtF2F: ssa.OpCvt64Fto32F,
5996 }
5997
5998 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5999 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
6000 }
6001
6002 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6003 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
6004 }
6005
6006 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6007
6008
6009
6010
6011
6012 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
6013 b := s.endBlock()
6014 b.Kind = ssa.BlockIf
6015 b.SetControl(cmp)
6016 b.Likely = ssa.BranchLikely
6017
6018 bThen := s.f.NewBlock(ssa.BlockPlain)
6019 bElse := s.f.NewBlock(ssa.BlockPlain)
6020 bAfter := s.f.NewBlock(ssa.BlockPlain)
6021
6022 b.AddEdgeTo(bThen)
6023 s.startBlock(bThen)
6024 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
6025 s.vars[n] = a0
6026 s.endBlock()
6027 bThen.AddEdgeTo(bAfter)
6028
6029 b.AddEdgeTo(bElse)
6030 s.startBlock(bElse)
6031 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
6032 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
6033 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
6034 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
6035
6036 s.vars[n] = a3
6037 s.endBlock()
6038 bElse.AddEdgeTo(bAfter)
6039
6040 s.startBlock(bAfter)
6041 return s.variable(n, n.Type())
6042 }
6043
6044
6045 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6046 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6047 s.Fatalf("node must be a map or a channel")
6048 }
6049
6050
6051
6052
6053
6054
6055
6056
6057 lenType := n.Type()
6058 nilValue := s.constNil(types.Types[types.TUINTPTR])
6059 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6060 b := s.endBlock()
6061 b.Kind = ssa.BlockIf
6062 b.SetControl(cmp)
6063 b.Likely = ssa.BranchUnlikely
6064
6065 bThen := s.f.NewBlock(ssa.BlockPlain)
6066 bElse := s.f.NewBlock(ssa.BlockPlain)
6067 bAfter := s.f.NewBlock(ssa.BlockPlain)
6068
6069
6070 b.AddEdgeTo(bThen)
6071 s.startBlock(bThen)
6072 s.vars[n] = s.zeroVal(lenType)
6073 s.endBlock()
6074 bThen.AddEdgeTo(bAfter)
6075
6076 b.AddEdgeTo(bElse)
6077 s.startBlock(bElse)
6078 switch n.Op() {
6079 case ir.OLEN:
6080
6081 s.vars[n] = s.load(lenType, x)
6082 case ir.OCAP:
6083
6084 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6085 s.vars[n] = s.load(lenType, sw)
6086 default:
6087 s.Fatalf("op must be OLEN or OCAP")
6088 }
6089 s.endBlock()
6090 bElse.AddEdgeTo(bAfter)
6091
6092 s.startBlock(bAfter)
6093 return s.variable(n, lenType)
6094 }
6095
6096 type f2uCvtTab struct {
6097 ltf, cvt2U, subf, or ssa.Op
6098 floatValue func(*state, *types.Type, float64) *ssa.Value
6099 intValue func(*state, *types.Type, int64) *ssa.Value
6100 cutoff uint64
6101 }
6102
6103 var f32_u64 = f2uCvtTab{
6104 ltf: ssa.OpLess32F,
6105 cvt2U: ssa.OpCvt32Fto64,
6106 subf: ssa.OpSub32F,
6107 or: ssa.OpOr64,
6108 floatValue: (*state).constFloat32,
6109 intValue: (*state).constInt64,
6110 cutoff: 1 << 63,
6111 }
6112
6113 var f64_u64 = f2uCvtTab{
6114 ltf: ssa.OpLess64F,
6115 cvt2U: ssa.OpCvt64Fto64,
6116 subf: ssa.OpSub64F,
6117 or: ssa.OpOr64,
6118 floatValue: (*state).constFloat64,
6119 intValue: (*state).constInt64,
6120 cutoff: 1 << 63,
6121 }
6122
6123 var f32_u32 = f2uCvtTab{
6124 ltf: ssa.OpLess32F,
6125 cvt2U: ssa.OpCvt32Fto32,
6126 subf: ssa.OpSub32F,
6127 or: ssa.OpOr32,
6128 floatValue: (*state).constFloat32,
6129 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6130 cutoff: 1 << 31,
6131 }
6132
6133 var f64_u32 = f2uCvtTab{
6134 ltf: ssa.OpLess64F,
6135 cvt2U: ssa.OpCvt64Fto32,
6136 subf: ssa.OpSub64F,
6137 or: ssa.OpOr32,
6138 floatValue: (*state).constFloat64,
6139 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6140 cutoff: 1 << 31,
6141 }
6142
6143 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6144 return s.floatToUint(&f32_u64, n, x, ft, tt)
6145 }
6146 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6147 return s.floatToUint(&f64_u64, n, x, ft, tt)
6148 }
6149
6150 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6151 return s.floatToUint(&f32_u32, n, x, ft, tt)
6152 }
6153
6154 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6155 return s.floatToUint(&f64_u32, n, x, ft, tt)
6156 }
6157
6158 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6159
6160
6161
6162
6163
6164
6165
6166
6167 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6168 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6169 b := s.endBlock()
6170 b.Kind = ssa.BlockIf
6171 b.SetControl(cmp)
6172 b.Likely = ssa.BranchLikely
6173
6174 bThen := s.f.NewBlock(ssa.BlockPlain)
6175 bElse := s.f.NewBlock(ssa.BlockPlain)
6176 bAfter := s.f.NewBlock(ssa.BlockPlain)
6177
6178 b.AddEdgeTo(bThen)
6179 s.startBlock(bThen)
6180 a0 := s.newValue1(cvttab.cvt2U, tt, x)
6181 s.vars[n] = a0
6182 s.endBlock()
6183 bThen.AddEdgeTo(bAfter)
6184
6185 b.AddEdgeTo(bElse)
6186 s.startBlock(bElse)
6187 y := s.newValue2(cvttab.subf, ft, x, cutoff)
6188 y = s.newValue1(cvttab.cvt2U, tt, y)
6189 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6190 a1 := s.newValue2(cvttab.or, tt, y, z)
6191 s.vars[n] = a1
6192 s.endBlock()
6193 bElse.AddEdgeTo(bAfter)
6194
6195 s.startBlock(bAfter)
6196 return s.variable(n, n.Type())
6197 }
6198
6199
6200
6201
6202 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6203 iface := s.expr(n.X)
6204 target := s.reflectType(n.Type())
6205 var targetItab *ssa.Value
6206 if n.Itab != nil {
6207 targetItab = s.expr(n.Itab)
6208 }
6209 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, target, targetItab, commaok)
6210 }
6211
6212 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6213 iface := s.expr(n.X)
6214 target := s.expr(n.T)
6215 var itab *ssa.Value
6216 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6217 byteptr := s.f.Config.Types.BytePtr
6218 itab = target
6219 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab))
6220 }
6221 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, target, itab, commaok)
6222 }
6223
6224
6225
6226
6227
6228
6229 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, target, targetItab *ssa.Value, commaok bool) (res, resok *ssa.Value) {
6230 byteptr := s.f.Config.Types.BytePtr
6231 if dst.IsInterface() {
6232 if dst.IsEmptyInterface() {
6233
6234
6235 if base.Debug.TypeAssert > 0 {
6236 base.WarnfAt(pos, "type assertion inlined")
6237 }
6238
6239
6240 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6241
6242 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6243
6244 if src.IsEmptyInterface() && commaok {
6245
6246 return iface, cond
6247 }
6248
6249
6250 b := s.endBlock()
6251 b.Kind = ssa.BlockIf
6252 b.SetControl(cond)
6253 b.Likely = ssa.BranchLikely
6254 bOk := s.f.NewBlock(ssa.BlockPlain)
6255 bFail := s.f.NewBlock(ssa.BlockPlain)
6256 b.AddEdgeTo(bOk)
6257 b.AddEdgeTo(bFail)
6258
6259 if !commaok {
6260
6261 s.startBlock(bFail)
6262 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6263
6264
6265 s.startBlock(bOk)
6266 if src.IsEmptyInterface() {
6267 res = iface
6268 return
6269 }
6270
6271 off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
6272 typ := s.load(byteptr, off)
6273 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6274 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6275 return
6276 }
6277
6278 s.startBlock(bOk)
6279
6280
6281 off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
6282 s.vars[typVar] = s.load(byteptr, off)
6283 s.endBlock()
6284
6285
6286 s.startBlock(bFail)
6287 s.vars[typVar] = itab
6288 s.endBlock()
6289
6290
6291 bEnd := s.f.NewBlock(ssa.BlockPlain)
6292 bOk.AddEdgeTo(bEnd)
6293 bFail.AddEdgeTo(bEnd)
6294 s.startBlock(bEnd)
6295 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6296 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6297 resok = cond
6298 delete(s.vars, typVar)
6299 return
6300 }
6301
6302 if base.Debug.TypeAssert > 0 {
6303 base.WarnfAt(pos, "type assertion not inlined")
6304 }
6305 if !commaok {
6306 fn := ir.Syms.AssertI2I
6307 if src.IsEmptyInterface() {
6308 fn = ir.Syms.AssertE2I
6309 }
6310 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6311 tab := s.newValue1(ssa.OpITab, byteptr, iface)
6312 tab = s.rtcall(fn, true, []*types.Type{byteptr}, target, tab)[0]
6313 return s.newValue2(ssa.OpIMake, dst, tab, data), nil
6314 }
6315 fn := ir.Syms.AssertI2I2
6316 if src.IsEmptyInterface() {
6317 fn = ir.Syms.AssertE2I2
6318 }
6319 res = s.rtcall(fn, true, []*types.Type{dst}, target, iface)[0]
6320 resok = s.newValue2(ssa.OpNeqInter, types.Types[types.TBOOL], res, s.constInterface(dst))
6321 return
6322 }
6323
6324 if base.Debug.TypeAssert > 0 {
6325 base.WarnfAt(pos, "type assertion inlined")
6326 }
6327
6328
6329 direct := types.IsDirectIface(dst)
6330 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6331 if base.Debug.TypeAssert > 0 {
6332 base.WarnfAt(pos, "type assertion inlined")
6333 }
6334 var wantedFirstWord *ssa.Value
6335 if src.IsEmptyInterface() {
6336
6337 wantedFirstWord = target
6338 } else {
6339
6340 wantedFirstWord = targetItab
6341 }
6342
6343 var tmp ir.Node
6344 var addr *ssa.Value
6345 if commaok && !TypeOK(dst) {
6346
6347
6348 tmp, addr = s.temp(pos, dst)
6349 }
6350
6351 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6352 b := s.endBlock()
6353 b.Kind = ssa.BlockIf
6354 b.SetControl(cond)
6355 b.Likely = ssa.BranchLikely
6356
6357 bOk := s.f.NewBlock(ssa.BlockPlain)
6358 bFail := s.f.NewBlock(ssa.BlockPlain)
6359 b.AddEdgeTo(bOk)
6360 b.AddEdgeTo(bFail)
6361
6362 if !commaok {
6363
6364 s.startBlock(bFail)
6365 taddr := s.reflectType(src)
6366 if src.IsEmptyInterface() {
6367 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6368 } else {
6369 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6370 }
6371
6372
6373 s.startBlock(bOk)
6374 if direct {
6375 return s.newValue1(ssa.OpIData, dst, iface), nil
6376 }
6377 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6378 return s.load(dst, p), nil
6379 }
6380
6381
6382
6383 bEnd := s.f.NewBlock(ssa.BlockPlain)
6384
6385
6386 valVar := ssaMarker("val")
6387
6388
6389 s.startBlock(bOk)
6390 if tmp == nil {
6391 if direct {
6392 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6393 } else {
6394 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6395 s.vars[valVar] = s.load(dst, p)
6396 }
6397 } else {
6398 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6399 s.move(dst, addr, p)
6400 }
6401 s.vars[okVar] = s.constBool(true)
6402 s.endBlock()
6403 bOk.AddEdgeTo(bEnd)
6404
6405
6406 s.startBlock(bFail)
6407 if tmp == nil {
6408 s.vars[valVar] = s.zeroVal(dst)
6409 } else {
6410 s.zero(dst, addr)
6411 }
6412 s.vars[okVar] = s.constBool(false)
6413 s.endBlock()
6414 bFail.AddEdgeTo(bEnd)
6415
6416
6417 s.startBlock(bEnd)
6418 if tmp == nil {
6419 res = s.variable(valVar, dst)
6420 delete(s.vars, valVar)
6421 } else {
6422 res = s.load(dst, addr)
6423 s.vars[memVar] = s.newValue1A(ssa.OpVarKill, types.TypeMem, tmp.(*ir.Name), s.mem())
6424 }
6425 resok = s.variable(okVar, types.Types[types.TBOOL])
6426 delete(s.vars, okVar)
6427 return res, resok
6428 }
6429
6430
6431 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6432 tmp := typecheck.TempAt(pos, s.curfn, t)
6433 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6434 addr := s.addr(tmp)
6435 return tmp, addr
6436 }
6437
6438
6439 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6440 v := s.vars[n]
6441 if v != nil {
6442 return v
6443 }
6444 v = s.fwdVars[n]
6445 if v != nil {
6446 return v
6447 }
6448
6449 if s.curBlock == s.f.Entry {
6450
6451 s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, n, v)
6452 }
6453
6454
6455 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6456 s.fwdVars[n] = v
6457 if n.Op() == ir.ONAME {
6458 s.addNamedValue(n.(*ir.Name), v)
6459 }
6460 return v
6461 }
6462
6463 func (s *state) mem() *ssa.Value {
6464 return s.variable(memVar, types.TypeMem)
6465 }
6466
6467 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6468 if n.Class == ir.Pxxx {
6469
6470 return
6471 }
6472 if ir.IsAutoTmp(n) {
6473
6474 return
6475 }
6476 if n.Class == ir.PPARAMOUT {
6477
6478
6479 return
6480 }
6481 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6482 values, ok := s.f.NamedValues[loc]
6483 if !ok {
6484 s.f.Names = append(s.f.Names, &loc)
6485 s.f.CanonicalLocalSlots[loc] = &loc
6486 }
6487 s.f.NamedValues[loc] = append(values, v)
6488 }
6489
6490
6491 type Branch struct {
6492 P *obj.Prog
6493 B *ssa.Block
6494 }
6495
6496
6497 type State struct {
6498 ABI obj.ABI
6499
6500 pp *objw.Progs
6501
6502
6503
6504 Branches []Branch
6505
6506
6507 bstart []*obj.Prog
6508
6509 maxarg int64
6510
6511
6512
6513 livenessMap liveness.Map
6514
6515
6516
6517 partLiveArgs map[*ir.Name]bool
6518
6519
6520
6521
6522 lineRunStart *obj.Prog
6523
6524
6525 OnWasmStackSkipped int
6526 }
6527
6528 func (s *State) FuncInfo() *obj.FuncInfo {
6529 return s.pp.CurFunc.LSym.Func()
6530 }
6531
6532
6533 func (s *State) Prog(as obj.As) *obj.Prog {
6534 p := s.pp.Prog(as)
6535 if objw.LosesStmtMark(as) {
6536 return p
6537 }
6538
6539
6540 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6541 s.lineRunStart = p
6542 } else if p.Pos.IsStmt() == src.PosIsStmt {
6543 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6544 p.Pos = p.Pos.WithNotStmt()
6545 }
6546 return p
6547 }
6548
6549
6550 func (s *State) Pc() *obj.Prog {
6551 return s.pp.Next
6552 }
6553
6554
6555 func (s *State) SetPos(pos src.XPos) {
6556 s.pp.Pos = pos
6557 }
6558
6559
6560
6561
6562 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6563 p := s.Prog(op)
6564 p.To.Type = obj.TYPE_BRANCH
6565 s.Branches = append(s.Branches, Branch{P: p, B: target})
6566 return p
6567 }
6568
6569
6570
6571
6572
6573
6574 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6575 switch v.Op {
6576 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6577
6578 s.SetPos(v.Pos.WithNotStmt())
6579 default:
6580 p := v.Pos
6581 if p != src.NoXPos {
6582
6583
6584
6585
6586 if p.IsStmt() != src.PosIsStmt {
6587 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6588
6589
6590
6591
6592
6593
6594
6595
6596
6597
6598
6599
6600
6601 return
6602 }
6603 p = p.WithNotStmt()
6604
6605 }
6606 s.SetPos(p)
6607 } else {
6608 s.SetPos(s.pp.Pos.WithNotStmt())
6609 }
6610 }
6611 }
6612
6613
6614 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6615 ft := e.curfn.Type()
6616 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6617 return
6618 }
6619
6620 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6621 x.Set(obj.AttrContentAddressable, true)
6622 e.curfn.LSym.Func().ArgInfo = x
6623
6624
6625 p := pp.Prog(obj.AFUNCDATA)
6626 p.From.SetConst(objabi.FUNCDATA_ArgInfo)
6627 p.To.Type = obj.TYPE_MEM
6628 p.To.Name = obj.NAME_EXTERN
6629 p.To.Sym = x
6630 }
6631
6632
6633 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6634 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6635
6636
6637
6638
6639 PtrSize := int64(types.PtrSize)
6640 uintptrTyp := types.Types[types.TUINTPTR]
6641
6642 isAggregate := func(t *types.Type) bool {
6643 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6644 }
6645
6646
6647
6648
6649
6650
6651
6652
6653
6654
6655
6656
6657
6658
6659 const (
6660 _endSeq = 0xff
6661 _startAgg = 0xfe
6662 _endAgg = 0xfd
6663 _dotdotdot = 0xfc
6664 _offsetTooLarge = 0xfb
6665 _special = 0xf0
6666 )
6667
6668 const (
6669 limit = 10
6670 maxDepth = 5
6671
6672
6673
6674
6675
6676
6677 maxLen = (maxDepth*3+2)*limit + 1
6678 )
6679
6680 wOff := 0
6681 n := 0
6682 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6683
6684
6685 write1 := func(sz, offset int64) {
6686 if offset >= _special {
6687 writebyte(_offsetTooLarge)
6688 } else {
6689 writebyte(uint8(offset))
6690 writebyte(uint8(sz))
6691 }
6692 n++
6693 }
6694
6695
6696
6697 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6698 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6699 if n >= limit {
6700 writebyte(_dotdotdot)
6701 return false
6702 }
6703 if !isAggregate(t) {
6704 write1(t.Size(), baseOffset)
6705 return true
6706 }
6707 writebyte(_startAgg)
6708 depth++
6709 if depth >= maxDepth {
6710 writebyte(_dotdotdot)
6711 writebyte(_endAgg)
6712 n++
6713 return true
6714 }
6715 switch {
6716 case t.IsInterface(), t.IsString():
6717 _ = visitType(baseOffset, uintptrTyp, depth) &&
6718 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6719 case t.IsSlice():
6720 _ = visitType(baseOffset, uintptrTyp, depth) &&
6721 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6722 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6723 case t.IsComplex():
6724 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6725 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6726 case t.IsArray():
6727 if t.NumElem() == 0 {
6728 n++
6729 break
6730 }
6731 for i := int64(0); i < t.NumElem(); i++ {
6732 if !visitType(baseOffset, t.Elem(), depth) {
6733 break
6734 }
6735 baseOffset += t.Elem().Size()
6736 }
6737 case t.IsStruct():
6738 if t.NumFields() == 0 {
6739 n++
6740 break
6741 }
6742 for _, field := range t.Fields().Slice() {
6743 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6744 break
6745 }
6746 }
6747 }
6748 writebyte(_endAgg)
6749 return true
6750 }
6751
6752 start := 0
6753 if strings.Contains(f.LSym.Name, "[") {
6754
6755 start = 1
6756 }
6757
6758 for _, a := range abiInfo.InParams()[start:] {
6759 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6760 break
6761 }
6762 }
6763 writebyte(_endSeq)
6764 if wOff > maxLen {
6765 base.Fatalf("ArgInfo too large")
6766 }
6767
6768 return x
6769 }
6770
6771
6772 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6773 if base.Ctxt.Flag_linkshared {
6774
6775
6776 return
6777 }
6778
6779 wfn := e.curfn.WrappedFunc
6780 if wfn == nil {
6781 return
6782 }
6783
6784 wsym := wfn.Linksym()
6785 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6786 objw.SymPtrOff(x, 0, wsym)
6787 x.Set(obj.AttrContentAddressable, true)
6788 })
6789 e.curfn.LSym.Func().WrapInfo = x
6790
6791
6792 p := pp.Prog(obj.AFUNCDATA)
6793 p.From.SetConst(objabi.FUNCDATA_WrapInfo)
6794 p.To.Type = obj.TYPE_MEM
6795 p.To.Name = obj.NAME_EXTERN
6796 p.To.Sym = x
6797 }
6798
6799
6800 func genssa(f *ssa.Func, pp *objw.Progs) {
6801 var s State
6802 s.ABI = f.OwnAux.Fn.ABI()
6803
6804 e := f.Frontend().(*ssafn)
6805
6806 s.livenessMap, s.partLiveArgs = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
6807 emitArgInfo(e, f, pp)
6808 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6809
6810 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6811 if openDeferInfo != nil {
6812
6813
6814 p := pp.Prog(obj.AFUNCDATA)
6815 p.From.SetConst(objabi.FUNCDATA_OpenCodedDeferInfo)
6816 p.To.Type = obj.TYPE_MEM
6817 p.To.Name = obj.NAME_EXTERN
6818 p.To.Sym = openDeferInfo
6819 }
6820
6821 emitWrappedFuncInfo(e, pp)
6822
6823
6824 s.bstart = make([]*obj.Prog, f.NumBlocks())
6825 s.pp = pp
6826 var progToValue map[*obj.Prog]*ssa.Value
6827 var progToBlock map[*obj.Prog]*ssa.Block
6828 var valueToProgAfter []*obj.Prog
6829 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6830 if gatherPrintInfo {
6831 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6832 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6833 f.Logf("genssa %s\n", f.Name)
6834 progToBlock[s.pp.Next] = f.Blocks[0]
6835 }
6836
6837 if base.Ctxt.Flag_locationlists {
6838 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6839 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6840 }
6841 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6842 for i := range valueToProgAfter {
6843 valueToProgAfter[i] = nil
6844 }
6845 }
6846
6847
6848
6849 firstPos := src.NoXPos
6850 for _, v := range f.Entry.Values {
6851 if v.Pos.IsStmt() == src.PosIsStmt {
6852 firstPos = v.Pos
6853 v.Pos = firstPos.WithDefaultStmt()
6854 break
6855 }
6856 }
6857
6858
6859
6860
6861 var inlMarks map[*obj.Prog]int32
6862 var inlMarkList []*obj.Prog
6863
6864
6865
6866 var inlMarksByPos map[src.XPos][]*obj.Prog
6867
6868 var argLiveIdx int = -1
6869
6870
6871 for i, b := range f.Blocks {
6872 s.bstart[b.ID] = s.pp.Next
6873 s.lineRunStart = nil
6874 s.SetPos(s.pp.Pos.WithNotStmt())
6875
6876
6877
6878
6879
6880
6881
6882 s.pp.NextLive = objw.LivenessIndex{StackMapIndex: -1, IsUnsafePoint: liveness.IsUnsafe(f)}
6883
6884 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6885 argLiveIdx = idx
6886 p := s.pp.Prog(obj.APCDATA)
6887 p.From.SetConst(objabi.PCDATA_ArgLiveIndex)
6888 p.To.SetConst(int64(idx))
6889 }
6890
6891
6892 Arch.SSAMarkMoves(&s, b)
6893 for _, v := range b.Values {
6894 x := s.pp.Next
6895 s.DebugFriendlySetPosFrom(v)
6896
6897 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6898 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6899 }
6900
6901 switch v.Op {
6902 case ssa.OpInitMem:
6903
6904 case ssa.OpArg:
6905
6906 case ssa.OpSP, ssa.OpSB:
6907
6908 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6909
6910 case ssa.OpGetG:
6911
6912
6913 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpVarKill:
6914
6915 case ssa.OpPhi:
6916 CheckLoweredPhi(v)
6917 case ssa.OpConvert:
6918
6919 if v.Args[0].Reg() != v.Reg() {
6920 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6921 }
6922 case ssa.OpInlMark:
6923 p := Arch.Ginsnop(s.pp)
6924 if inlMarks == nil {
6925 inlMarks = map[*obj.Prog]int32{}
6926 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6927 }
6928 inlMarks[p] = v.AuxInt32()
6929 inlMarkList = append(inlMarkList, p)
6930 pos := v.Pos.AtColumn1()
6931 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6932
6933 default:
6934
6935 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6936 s.SetPos(firstPos)
6937 firstPos = src.NoXPos
6938 }
6939
6940
6941 s.pp.NextLive = s.livenessMap.Get(v)
6942
6943
6944 Arch.SSAGenValue(&s, v)
6945 }
6946
6947 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6948 argLiveIdx = idx
6949 p := s.pp.Prog(obj.APCDATA)
6950 p.From.SetConst(objabi.PCDATA_ArgLiveIndex)
6951 p.To.SetConst(int64(idx))
6952 }
6953
6954 if base.Ctxt.Flag_locationlists {
6955 valueToProgAfter[v.ID] = s.pp.Next
6956 }
6957
6958 if gatherPrintInfo {
6959 for ; x != s.pp.Next; x = x.Link {
6960 progToValue[x] = v
6961 }
6962 }
6963 }
6964
6965 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6966 p := Arch.Ginsnop(s.pp)
6967 p.Pos = p.Pos.WithIsStmt()
6968 if b.Pos == src.NoXPos {
6969 b.Pos = p.Pos
6970 if b.Pos == src.NoXPos {
6971 b.Pos = pp.Text.Pos
6972 }
6973 }
6974 b.Pos = b.Pos.WithBogusLine()
6975 }
6976
6977 var next *ssa.Block
6978 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6979
6980
6981
6982
6983 next = f.Blocks[i+1]
6984 }
6985 x := s.pp.Next
6986 s.SetPos(b.Pos)
6987 Arch.SSAGenBlock(&s, b, next)
6988 if gatherPrintInfo {
6989 for ; x != s.pp.Next; x = x.Link {
6990 progToBlock[x] = b
6991 }
6992 }
6993 }
6994 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6995
6996
6997
6998
6999 Arch.Ginsnop(pp)
7000 }
7001 if openDeferInfo != nil {
7002
7003
7004
7005 s.pp.NextLive = s.livenessMap.DeferReturn
7006 p := pp.Prog(obj.ACALL)
7007 p.To.Type = obj.TYPE_MEM
7008 p.To.Name = obj.NAME_EXTERN
7009 p.To.Sym = ir.Syms.Deferreturn
7010
7011
7012
7013
7014
7015 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7016 n := o.Name.(*ir.Name)
7017 rts, offs := o.RegisterTypesAndOffsets()
7018 for i := range o.Registers {
7019 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7020 }
7021 }
7022
7023 pp.Prog(obj.ARET)
7024 }
7025
7026 if inlMarks != nil {
7027
7028
7029
7030 for p := pp.Text; p != nil; p = p.Link {
7031 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.APCALIGN || Arch.LinkArch.Family == sys.Wasm {
7032
7033
7034
7035
7036
7037 continue
7038 }
7039 if _, ok := inlMarks[p]; ok {
7040
7041
7042 continue
7043 }
7044 pos := p.Pos.AtColumn1()
7045 s := inlMarksByPos[pos]
7046 if len(s) == 0 {
7047 continue
7048 }
7049 for _, m := range s {
7050
7051
7052
7053 p.Pos = p.Pos.WithIsStmt()
7054 pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7055
7056 m.As = obj.ANOP
7057 m.Pos = src.NoXPos
7058 m.From = obj.Addr{}
7059 m.To = obj.Addr{}
7060 }
7061 delete(inlMarksByPos, pos)
7062 }
7063
7064 for _, p := range inlMarkList {
7065 if p.As != obj.ANOP {
7066 pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7067 }
7068 }
7069 }
7070
7071 if base.Ctxt.Flag_locationlists {
7072 var debugInfo *ssa.FuncDebug
7073 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7074 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7075 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7076 } else {
7077 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7078 }
7079 bstart := s.bstart
7080 idToIdx := make([]int, f.NumBlocks())
7081 for i, b := range f.Blocks {
7082 idToIdx[b.ID] = i
7083 }
7084
7085
7086
7087 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7088 switch v {
7089 case ssa.BlockStart.ID:
7090 if b == f.Entry.ID {
7091 return 0
7092
7093 }
7094 return bstart[b].Pc
7095 case ssa.BlockEnd.ID:
7096 blk := f.Blocks[idToIdx[b]]
7097 nv := len(blk.Values)
7098 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7099 case ssa.FuncEnd.ID:
7100 return e.curfn.LSym.Size
7101 default:
7102 return valueToProgAfter[v].Pc
7103 }
7104 }
7105 }
7106
7107
7108 for _, br := range s.Branches {
7109 br.P.To.SetTarget(s.bstart[br.B.ID])
7110 if br.P.Pos.IsStmt() != src.PosIsStmt {
7111 br.P.Pos = br.P.Pos.WithNotStmt()
7112 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7113 br.P.Pos = br.P.Pos.WithNotStmt()
7114 }
7115
7116 }
7117
7118 if e.log {
7119 filename := ""
7120 for p := pp.Text; p != nil; p = p.Link {
7121 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7122 filename = p.InnermostFilename()
7123 f.Logf("# %s\n", filename)
7124 }
7125
7126 var s string
7127 if v, ok := progToValue[p]; ok {
7128 s = v.String()
7129 } else if b, ok := progToBlock[p]; ok {
7130 s = b.String()
7131 } else {
7132 s = " "
7133 }
7134 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7135 }
7136 }
7137 if f.HTMLWriter != nil {
7138 var buf bytes.Buffer
7139 buf.WriteString("<code>")
7140 buf.WriteString("<dl class=\"ssa-gen\">")
7141 filename := ""
7142 for p := pp.Text; p != nil; p = p.Link {
7143
7144
7145 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7146 filename = p.InnermostFilename()
7147 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7148 buf.WriteString(html.EscapeString("# " + filename))
7149 buf.WriteString("</dd>")
7150 }
7151
7152 buf.WriteString("<dt class=\"ssa-prog-src\">")
7153 if v, ok := progToValue[p]; ok {
7154 buf.WriteString(v.HTML())
7155 } else if b, ok := progToBlock[p]; ok {
7156 buf.WriteString("<b>" + b.HTML() + "</b>")
7157 }
7158 buf.WriteString("</dt>")
7159 buf.WriteString("<dd class=\"ssa-prog\">")
7160 buf.WriteString(fmt.Sprintf("%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString())))
7161 buf.WriteString("</dd>")
7162 }
7163 buf.WriteString("</dl>")
7164 buf.WriteString("</code>")
7165 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7166 }
7167 if ssa.GenssaDump[f.Name] {
7168 fi := f.DumpFileForPhase("genssa")
7169 if fi != nil {
7170
7171
7172 inliningDiffers := func(a, b []src.Pos) bool {
7173 if len(a) != len(b) {
7174 return true
7175 }
7176 for i := range a {
7177 if a[i].Filename() != b[i].Filename() {
7178 return true
7179 }
7180 if i > 0 && a[i].Line() != b[i].Line() {
7181 return true
7182 }
7183 }
7184 return false
7185 }
7186
7187 var allPosOld []src.Pos
7188 var allPos []src.Pos
7189
7190 for p := pp.Text; p != nil; p = p.Link {
7191 if p.Pos.IsKnown() {
7192 allPos = p.AllPos(allPos)
7193 if inliningDiffers(allPos, allPosOld) {
7194 for i := len(allPos) - 1; i >= 0; i-- {
7195 pos := allPos[i]
7196 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7197 }
7198 allPos, allPosOld = allPosOld, allPos
7199 }
7200 }
7201
7202 var s string
7203 if v, ok := progToValue[p]; ok {
7204 s = v.String()
7205 } else if b, ok := progToBlock[p]; ok {
7206 s = b.String()
7207 } else {
7208 s = " "
7209 }
7210 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7211 }
7212 fi.Close()
7213 }
7214 }
7215
7216 defframe(&s, e, f)
7217
7218 f.HTMLWriter.Close()
7219 f.HTMLWriter = nil
7220 }
7221
7222 func defframe(s *State, e *ssafn, f *ssa.Func) {
7223 pp := s.pp
7224
7225 frame := types.Rnd(s.maxarg+e.stksize, int64(types.RegSize))
7226 if Arch.PadFrame != nil {
7227 frame = Arch.PadFrame(frame)
7228 }
7229
7230
7231 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7232 pp.Text.To.Val = int32(types.Rnd(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7233 pp.Text.To.Offset = frame
7234
7235 p := pp.Text
7236
7237
7238
7239
7240
7241
7242
7243
7244
7245
7246 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7247
7248
7249 type nameOff struct {
7250 n *ir.Name
7251 off int64
7252 }
7253 partLiveArgsSpilled := make(map[nameOff]bool)
7254 for _, v := range f.Entry.Values {
7255 if v.Op.IsCall() {
7256 break
7257 }
7258 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7259 continue
7260 }
7261 n, off := ssa.AutoVar(v)
7262 if n.Class != ir.PPARAM || n.Addrtaken() || !TypeOK(n.Type()) || !s.partLiveArgs[n] {
7263 continue
7264 }
7265 partLiveArgsSpilled[nameOff{n, off}] = true
7266 }
7267
7268
7269 for _, a := range f.OwnAux.ABIInfo().InParams() {
7270 n, ok := a.Name.(*ir.Name)
7271 if !ok || n.Addrtaken() || !TypeOK(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7272 continue
7273 }
7274 rts, offs := a.RegisterTypesAndOffsets()
7275 for i := range a.Registers {
7276 if !rts[i].HasPointers() {
7277 continue
7278 }
7279 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7280 continue
7281 }
7282 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7283 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7284 }
7285 }
7286 }
7287
7288
7289
7290
7291 var lo, hi int64
7292
7293
7294
7295 var state uint32
7296
7297
7298
7299 for _, n := range e.curfn.Dcl {
7300 if !n.Needzero() {
7301 continue
7302 }
7303 if n.Class != ir.PAUTO {
7304 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7305 }
7306 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7307 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7308 }
7309
7310 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7311
7312 lo = n.FrameOffset()
7313 continue
7314 }
7315
7316
7317 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7318
7319
7320 lo = n.FrameOffset()
7321 hi = lo + n.Type().Size()
7322 }
7323
7324
7325 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7326 }
7327
7328
7329 type IndexJump struct {
7330 Jump obj.As
7331 Index int
7332 }
7333
7334 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7335 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7336 p.Pos = b.Pos
7337 }
7338
7339
7340
7341 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7342 switch next {
7343 case b.Succs[0].Block():
7344 s.oneJump(b, &jumps[0][0])
7345 s.oneJump(b, &jumps[0][1])
7346 case b.Succs[1].Block():
7347 s.oneJump(b, &jumps[1][0])
7348 s.oneJump(b, &jumps[1][1])
7349 default:
7350 var q *obj.Prog
7351 if b.Likely != ssa.BranchUnlikely {
7352 s.oneJump(b, &jumps[1][0])
7353 s.oneJump(b, &jumps[1][1])
7354 q = s.Br(obj.AJMP, b.Succs[1].Block())
7355 } else {
7356 s.oneJump(b, &jumps[0][0])
7357 s.oneJump(b, &jumps[0][1])
7358 q = s.Br(obj.AJMP, b.Succs[0].Block())
7359 }
7360 q.Pos = b.Pos
7361 }
7362 }
7363
7364
7365 func AddAux(a *obj.Addr, v *ssa.Value) {
7366 AddAux2(a, v, v.AuxInt)
7367 }
7368 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7369 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7370 v.Fatalf("bad AddAux addr %v", a)
7371 }
7372
7373 a.Offset += offset
7374
7375
7376 if v.Aux == nil {
7377 return
7378 }
7379
7380 switch n := v.Aux.(type) {
7381 case *ssa.AuxCall:
7382 a.Name = obj.NAME_EXTERN
7383 a.Sym = n.Fn
7384 case *obj.LSym:
7385 a.Name = obj.NAME_EXTERN
7386 a.Sym = n
7387 case *ir.Name:
7388 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7389 a.Name = obj.NAME_PARAM
7390 a.Sym = ir.Orig(n).(*ir.Name).Linksym()
7391 a.Offset += n.FrameOffset()
7392 break
7393 }
7394 a.Name = obj.NAME_AUTO
7395 if n.Class == ir.PPARAMOUT {
7396 a.Sym = ir.Orig(n).(*ir.Name).Linksym()
7397 } else {
7398 a.Sym = n.Linksym()
7399 }
7400 a.Offset += n.FrameOffset()
7401 default:
7402 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7403 }
7404 }
7405
7406
7407
7408 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7409 size := idx.Type.Size()
7410 if size == s.config.PtrSize {
7411 return idx
7412 }
7413 if size > s.config.PtrSize {
7414
7415
7416 var lo *ssa.Value
7417 if idx.Type.IsSigned() {
7418 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7419 } else {
7420 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7421 }
7422 if bounded || base.Flag.B != 0 {
7423 return lo
7424 }
7425 bNext := s.f.NewBlock(ssa.BlockPlain)
7426 bPanic := s.f.NewBlock(ssa.BlockExit)
7427 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7428 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7429 if !idx.Type.IsSigned() {
7430 switch kind {
7431 case ssa.BoundsIndex:
7432 kind = ssa.BoundsIndexU
7433 case ssa.BoundsSliceAlen:
7434 kind = ssa.BoundsSliceAlenU
7435 case ssa.BoundsSliceAcap:
7436 kind = ssa.BoundsSliceAcapU
7437 case ssa.BoundsSliceB:
7438 kind = ssa.BoundsSliceBU
7439 case ssa.BoundsSlice3Alen:
7440 kind = ssa.BoundsSlice3AlenU
7441 case ssa.BoundsSlice3Acap:
7442 kind = ssa.BoundsSlice3AcapU
7443 case ssa.BoundsSlice3B:
7444 kind = ssa.BoundsSlice3BU
7445 case ssa.BoundsSlice3C:
7446 kind = ssa.BoundsSlice3CU
7447 }
7448 }
7449 b := s.endBlock()
7450 b.Kind = ssa.BlockIf
7451 b.SetControl(cmp)
7452 b.Likely = ssa.BranchLikely
7453 b.AddEdgeTo(bNext)
7454 b.AddEdgeTo(bPanic)
7455
7456 s.startBlock(bPanic)
7457 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7458 s.endBlock().SetControl(mem)
7459 s.startBlock(bNext)
7460
7461 return lo
7462 }
7463
7464
7465 var op ssa.Op
7466 if idx.Type.IsSigned() {
7467 switch 10*size + s.config.PtrSize {
7468 case 14:
7469 op = ssa.OpSignExt8to32
7470 case 18:
7471 op = ssa.OpSignExt8to64
7472 case 24:
7473 op = ssa.OpSignExt16to32
7474 case 28:
7475 op = ssa.OpSignExt16to64
7476 case 48:
7477 op = ssa.OpSignExt32to64
7478 default:
7479 s.Fatalf("bad signed index extension %s", idx.Type)
7480 }
7481 } else {
7482 switch 10*size + s.config.PtrSize {
7483 case 14:
7484 op = ssa.OpZeroExt8to32
7485 case 18:
7486 op = ssa.OpZeroExt8to64
7487 case 24:
7488 op = ssa.OpZeroExt16to32
7489 case 28:
7490 op = ssa.OpZeroExt16to64
7491 case 48:
7492 op = ssa.OpZeroExt32to64
7493 default:
7494 s.Fatalf("bad unsigned index extension %s", idx.Type)
7495 }
7496 }
7497 return s.newValue1(op, types.Types[types.TINT], idx)
7498 }
7499
7500
7501
7502 func CheckLoweredPhi(v *ssa.Value) {
7503 if v.Op != ssa.OpPhi {
7504 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7505 }
7506 if v.Type.IsMemory() {
7507 return
7508 }
7509 f := v.Block.Func
7510 loc := f.RegAlloc[v.ID]
7511 for _, a := range v.Args {
7512 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7513 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7514 }
7515 }
7516 }
7517
7518
7519
7520
7521
7522 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7523 entry := v.Block.Func.Entry
7524 if entry != v.Block {
7525 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7526 }
7527 for _, w := range entry.Values {
7528 if w == v {
7529 break
7530 }
7531 switch w.Op {
7532 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7533
7534 default:
7535 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7536 }
7537 }
7538 }
7539
7540
7541 func CheckArgReg(v *ssa.Value) {
7542 entry := v.Block.Func.Entry
7543 if entry != v.Block {
7544 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7545 }
7546 }
7547
7548 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7549 n, off := ssa.AutoVar(v)
7550 a.Type = obj.TYPE_MEM
7551 a.Sym = n.Linksym()
7552 a.Reg = int16(Arch.REGSP)
7553 a.Offset = n.FrameOffset() + off
7554 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7555 a.Name = obj.NAME_PARAM
7556 } else {
7557 a.Name = obj.NAME_AUTO
7558 }
7559 }
7560
7561
7562
7563 func (s *State) Call(v *ssa.Value) *obj.Prog {
7564 pPosIsStmt := s.pp.Pos.IsStmt()
7565 s.PrepareCall(v)
7566
7567 p := s.Prog(obj.ACALL)
7568 if pPosIsStmt == src.PosIsStmt {
7569 p.Pos = v.Pos.WithIsStmt()
7570 } else {
7571 p.Pos = v.Pos.WithNotStmt()
7572 }
7573 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7574 p.To.Type = obj.TYPE_MEM
7575 p.To.Name = obj.NAME_EXTERN
7576 p.To.Sym = sym.Fn
7577 } else {
7578
7579 switch Arch.LinkArch.Family {
7580 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7581 p.To.Type = obj.TYPE_REG
7582 case sys.ARM, sys.ARM64, sys.MIPS, sys.MIPS64:
7583 p.To.Type = obj.TYPE_MEM
7584 default:
7585 base.Fatalf("unknown indirect call family")
7586 }
7587 p.To.Reg = v.Args[0].Reg()
7588 }
7589 return p
7590 }
7591
7592
7593
7594 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7595 p := s.Call(v)
7596 p.As = obj.ARET
7597 return p
7598 }
7599
7600
7601
7602
7603 func (s *State) PrepareCall(v *ssa.Value) {
7604 idx := s.livenessMap.Get(v)
7605 if !idx.StackMapValid() {
7606
7607 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.Typedmemclr || sym.Fn == ir.Syms.Typedmemmove) {
7608 base.Fatalf("missing stack map index for %v", v.LongString())
7609 }
7610 }
7611
7612 call, ok := v.Aux.(*ssa.AuxCall)
7613
7614 if ok {
7615
7616
7617 if nowritebarrierrecCheck != nil {
7618 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7619 }
7620 }
7621
7622 if s.maxarg < v.AuxInt {
7623 s.maxarg = v.AuxInt
7624 }
7625 }
7626
7627
7628
7629 func (s *State) UseArgs(n int64) {
7630 if s.maxarg < n {
7631 s.maxarg = n
7632 }
7633 }
7634
7635
7636 func fieldIdx(n *ir.SelectorExpr) int {
7637 t := n.X.Type()
7638 if !t.IsStruct() {
7639 panic("ODOT's LHS is not a struct")
7640 }
7641
7642 for i, f := range t.Fields().Slice() {
7643 if f.Sym == n.Sel {
7644 if f.Offset != n.Offset() {
7645 panic("field offset doesn't match")
7646 }
7647 return i
7648 }
7649 }
7650 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7651
7652
7653
7654 }
7655
7656
7657
7658 type ssafn struct {
7659 curfn *ir.Func
7660 strings map[string]*obj.LSym
7661 stksize int64
7662 stkptrsize int64
7663 log bool
7664 }
7665
7666
7667
7668 func (e *ssafn) StringData(s string) *obj.LSym {
7669 if aux, ok := e.strings[s]; ok {
7670 return aux
7671 }
7672 if e.strings == nil {
7673 e.strings = make(map[string]*obj.LSym)
7674 }
7675 data := staticdata.StringSym(e.curfn.Pos(), s)
7676 e.strings[s] = data
7677 return data
7678 }
7679
7680 func (e *ssafn) Auto(pos src.XPos, t *types.Type) *ir.Name {
7681 return typecheck.TempAt(pos, e.curfn, t)
7682 }
7683
7684
7685 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7686 node := parent.N
7687
7688 if node.Class != ir.PAUTO || node.Addrtaken() {
7689
7690 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7691 }
7692
7693 s := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7694 n := ir.NewNameAt(parent.N.Pos(), s)
7695 s.Def = n
7696 ir.AsNode(s.Def).Name().SetUsed(true)
7697 n.SetType(t)
7698 n.Class = ir.PAUTO
7699 n.SetEsc(ir.EscNever)
7700 n.Curfn = e.curfn
7701 e.curfn.Dcl = append(e.curfn.Dcl, n)
7702 types.CalcSize(t)
7703 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7704 }
7705
7706 func (e *ssafn) CanSSA(t *types.Type) bool {
7707 return TypeOK(t)
7708 }
7709
7710 func (e *ssafn) Line(pos src.XPos) string {
7711 return base.FmtPos(pos)
7712 }
7713
7714
7715 func (e *ssafn) Logf(msg string, args ...interface{}) {
7716 if e.log {
7717 fmt.Printf(msg, args...)
7718 }
7719 }
7720
7721 func (e *ssafn) Log() bool {
7722 return e.log
7723 }
7724
7725
7726 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7727 base.Pos = pos
7728 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7729 base.Fatalf("'%s': "+msg, nargs...)
7730 }
7731
7732
7733
7734 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7735 base.WarnfAt(pos, fmt_, args...)
7736 }
7737
7738 func (e *ssafn) Debug_checknil() bool {
7739 return base.Debug.Nil != 0
7740 }
7741
7742 func (e *ssafn) UseWriteBarrier() bool {
7743 return base.Flag.WB
7744 }
7745
7746 func (e *ssafn) Syslook(name string) *obj.LSym {
7747 switch name {
7748 case "goschedguarded":
7749 return ir.Syms.Goschedguarded
7750 case "writeBarrier":
7751 return ir.Syms.WriteBarrier
7752 case "gcWriteBarrier":
7753 return ir.Syms.GCWriteBarrier
7754 case "typedmemmove":
7755 return ir.Syms.Typedmemmove
7756 case "typedmemclr":
7757 return ir.Syms.Typedmemclr
7758 }
7759 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7760 return nil
7761 }
7762
7763 func (e *ssafn) SetWBPos(pos src.XPos) {
7764 e.curfn.SetWBPos(pos)
7765 }
7766
7767 func (e *ssafn) MyImportPath() string {
7768 return base.Ctxt.Pkgpath
7769 }
7770
7771 func clobberBase(n ir.Node) ir.Node {
7772 if n.Op() == ir.ODOT {
7773 n := n.(*ir.SelectorExpr)
7774 if n.X.Type().NumFields() == 1 {
7775 return clobberBase(n.X)
7776 }
7777 }
7778 if n.Op() == ir.OINDEX {
7779 n := n.(*ir.IndexExpr)
7780 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7781 return clobberBase(n.X)
7782 }
7783 }
7784 return n
7785 }
7786
7787
7788 func callTargetLSym(callee *ir.Name) *obj.LSym {
7789 if callee.Func == nil {
7790
7791
7792
7793
7794 return callee.Linksym()
7795 }
7796
7797 return callee.LinksymABI(callee.Func.ABI)
7798 }
7799
7800 func min8(a, b int8) int8 {
7801 if a < b {
7802 return a
7803 }
7804 return b
7805 }
7806
7807 func max8(a, b int8) int8 {
7808 if a > b {
7809 return a
7810 }
7811 return b
7812 }
7813
7814
7815 func deferstruct() *types.Type {
7816 makefield := func(name string, typ *types.Type) *types.Field {
7817
7818
7819
7820 sym := &types.Sym{Name: name, Pkg: types.LocalPkg}
7821 return types.NewField(src.NoXPos, sym, typ)
7822 }
7823
7824
7825 fields := []*types.Field{
7826 makefield("started", types.Types[types.TBOOL]),
7827 makefield("heap", types.Types[types.TBOOL]),
7828 makefield("openDefer", types.Types[types.TBOOL]),
7829 makefield("sp", types.Types[types.TUINTPTR]),
7830 makefield("pc", types.Types[types.TUINTPTR]),
7831
7832
7833
7834 makefield("fn", types.Types[types.TUINTPTR]),
7835 makefield("_panic", types.Types[types.TUINTPTR]),
7836 makefield("link", types.Types[types.TUINTPTR]),
7837 makefield("fd", types.Types[types.TUINTPTR]),
7838 makefield("varp", types.Types[types.TUINTPTR]),
7839 makefield("framepc", types.Types[types.TUINTPTR]),
7840 }
7841
7842
7843 s := types.NewStruct(types.NoPkg, fields)
7844 s.SetNoalg(true)
7845 types.CalcStructSize(s)
7846 return s
7847 }
7848
7849
7850
7851
7852
7853 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7854 return obj.Addr{
7855 Name: obj.NAME_NONE,
7856 Type: obj.TYPE_MEM,
7857 Reg: baseReg,
7858 Offset: spill.Offset + extraOffset,
7859 }
7860 }
7861
7862 var (
7863 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7864 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
7865 )
7866
7867
7868 var GCWriteBarrierReg map[int16]*obj.LSym
7869
View as plain text