1
2
3
4
5 package walk
6
7 import (
8 "go/constant"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/reflectdata"
13 "cmd/compile/internal/typecheck"
14 "cmd/compile/internal/types"
15 "cmd/internal/src"
16 )
17
18
19 func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
20 init.Append(ir.TakeInit(n)...)
21
22 var left, right ir.Node
23 switch n.Op() {
24 case ir.OAS:
25 n := n.(*ir.AssignStmt)
26 left, right = n.X, n.Y
27 case ir.OASOP:
28 n := n.(*ir.AssignOpStmt)
29 left, right = n.X, n.Y
30 }
31
32
33
34 var mapAppend *ir.CallExpr
35 if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
36 left := left.(*ir.IndexExpr)
37 mapAppend = right.(*ir.CallExpr)
38 if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
39 base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
40 }
41 }
42
43 left = walkExpr(left, init)
44 left = safeExpr(left, init)
45 if mapAppend != nil {
46 mapAppend.Args[0] = left
47 }
48
49 if n.Op() == ir.OASOP {
50
51 n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
52 } else {
53 n.(*ir.AssignStmt).X = left
54 }
55 as := n.(*ir.AssignStmt)
56
57 if oaslit(as, init) {
58 return ir.NewBlockStmt(as.Pos(), nil)
59 }
60
61 if as.Y == nil {
62
63 return as
64 }
65
66 if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
67 return as
68 }
69
70 switch as.Y.Op() {
71 default:
72 as.Y = walkExpr(as.Y, init)
73
74 case ir.ORECV:
75
76
77 recv := as.Y.(*ir.UnaryExpr)
78 recv.X = walkExpr(recv.X, init)
79
80 n1 := typecheck.NodAddr(as.X)
81 r := recv.X
82 return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
83
84 case ir.OAPPEND:
85
86 call := as.Y.(*ir.CallExpr)
87 if call.Type().Elem().NotInHeap() {
88 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
89 }
90 var r ir.Node
91 switch {
92 case isAppendOfMake(call):
93
94 r = extendSlice(call, init)
95 case call.IsDDD:
96 r = appendSlice(call, init)
97 default:
98 r = walkAppend(call, init, as)
99 }
100 as.Y = r
101 if r.Op() == ir.OAPPEND {
102
103
104
105 r.(*ir.CallExpr).X = reflectdata.TypePtr(r.Type().Elem())
106 return as
107 }
108
109
110 }
111
112 if as.X != nil && as.Y != nil {
113 return convas(as, init)
114 }
115 return as
116 }
117
118
119 func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
120 walkExprListSafe(n.Lhs, init)
121 n.Rhs[0] = walkExpr(n.Rhs[0], init)
122 return n
123 }
124
125
126 func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
127 init.Append(ir.TakeInit(n)...)
128
129 r := n.Rhs[0]
130 walkExprListSafe(n.Lhs, init)
131 r = walkExpr(r, init)
132
133 if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
134 n.Rhs = []ir.Node{r}
135 return n
136 }
137 init.Append(r)
138
139 ll := ascompatet(n.Lhs, r.Type())
140 return ir.NewBlockStmt(src.NoXPos, ll)
141 }
142
143
144 func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
145 init.Append(ir.TakeInit(n)...)
146 return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
147 }
148
149
150 func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
151 init.Append(ir.TakeInit(n)...)
152
153 r := n.Rhs[0].(*ir.IndexExpr)
154 walkExprListSafe(n.Lhs, init)
155 r.X = walkExpr(r.X, init)
156 r.Index = walkExpr(r.Index, init)
157 t := r.X.Type()
158
159 fast := mapfast(t)
160 key := mapKeyArg(fast, r, r.Index)
161
162
163
164
165
166
167 a := n.Lhs[0]
168
169 var call *ir.CallExpr
170 if w := t.Elem().Size(); w <= zeroValSize {
171 fn := mapfn(mapaccess2[fast], t, false)
172 call = mkcall1(fn, fn.Type().Results(), init, reflectdata.TypePtr(t), r.X, key)
173 } else {
174 fn := mapfn("mapaccess2_fat", t, true)
175 z := reflectdata.ZeroAddr(w)
176 call = mkcall1(fn, fn.Type().Results(), init, reflectdata.TypePtr(t), r.X, key, z)
177 }
178
179
180
181
182 if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
183 call.Type().Field(1).Type = ok.Type()
184 }
185 n.Rhs = []ir.Node{call}
186 n.SetOp(ir.OAS2FUNC)
187
188
189 if ir.IsBlank(a) {
190 return walkExpr(typecheck.Stmt(n), init)
191 }
192
193 var_ := typecheck.Temp(types.NewPtr(t.Elem()))
194 var_.SetTypecheck(1)
195 var_.MarkNonNil()
196
197 n.Lhs[0] = var_
198 init.Append(walkExpr(n, init))
199
200 as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
201 return walkExpr(typecheck.Stmt(as), init)
202 }
203
204
205 func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
206 init.Append(ir.TakeInit(n)...)
207
208 r := n.Rhs[0].(*ir.UnaryExpr)
209 walkExprListSafe(n.Lhs, init)
210 r.X = walkExpr(r.X, init)
211 var n1 ir.Node
212 if ir.IsBlank(n.Lhs[0]) {
213 n1 = typecheck.NodNil()
214 } else {
215 n1 = typecheck.NodAddr(n.Lhs[0])
216 }
217 fn := chanfn("chanrecv2", 2, r.X.Type())
218 ok := n.Lhs[1]
219 call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
220 return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
221 }
222
223
224 func walkReturn(n *ir.ReturnStmt) ir.Node {
225 fn := ir.CurFunc
226
227 fn.NumReturns++
228 if len(n.Results) == 0 {
229 return n
230 }
231
232 results := fn.Type().Results().FieldSlice()
233 dsts := make([]ir.Node, len(results))
234 for i, v := range results {
235
236 dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
237 }
238
239 n.Results = ascompatee(n.Op(), dsts, n.Results)
240 return n
241 }
242
243
244
245
246 func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
247 if len(nl) != nr.NumFields() {
248 base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
249 }
250
251 var nn ir.Nodes
252 for i, l := range nl {
253 if ir.IsBlank(l) {
254 continue
255 }
256 r := nr.Field(i)
257
258
259
260 if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
261 base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
262 }
263
264 res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
265 res.Index = int64(i)
266 res.SetType(r.Type)
267 res.SetTypecheck(1)
268
269 nn.Append(ir.NewAssignStmt(base.Pos, l, res))
270 }
271 return nn
272 }
273
274
275
276
277 func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
278
279 if len(nl) != len(nr) {
280 base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
281 }
282
283 var assigned ir.NameSet
284 var memWrite, deferResultWrite bool
285
286
287
288 affected := func(n ir.Node) bool {
289 if deferResultWrite {
290 return true
291 }
292 return ir.Any(n, func(n ir.Node) bool {
293 if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
294 return true
295 }
296 if memWrite && readsMemory(n) {
297 return true
298 }
299 return false
300 })
301 }
302
303
304
305
306 var early ir.Nodes
307 save := func(np *ir.Node) {
308 if n := *np; affected(n) {
309 *np = copyExpr(n, n.Type(), &early)
310 }
311 }
312
313 var late ir.Nodes
314 for i, lorig := range nl {
315 l, r := lorig, nr[i]
316
317
318 if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
319 continue
320 }
321
322
323
324 for {
325
326
327
328 init := ir.TakeInit(l)
329 walkStmtList(init)
330 early.Append(init...)
331
332 switch ll := l.(type) {
333 case *ir.IndexExpr:
334 if ll.X.Type().IsArray() {
335 save(&ll.Index)
336 l = ll.X
337 continue
338 }
339 case *ir.ParenExpr:
340 l = ll.X
341 continue
342 case *ir.SelectorExpr:
343 if ll.Op() == ir.ODOT {
344 l = ll.X
345 continue
346 }
347 }
348 break
349 }
350
351 var name *ir.Name
352 switch l.Op() {
353 default:
354 base.Fatalf("unexpected lvalue %v", l.Op())
355 case ir.ONAME:
356 name = l.(*ir.Name)
357 case ir.OINDEX, ir.OINDEXMAP:
358 l := l.(*ir.IndexExpr)
359 save(&l.X)
360 save(&l.Index)
361 case ir.ODEREF:
362 l := l.(*ir.StarExpr)
363 save(&l.X)
364 case ir.ODOTPTR:
365 l := l.(*ir.SelectorExpr)
366 save(&l.X)
367 }
368
369
370 save(&r)
371
372 appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
373
374
375
376
377 if name == nil {
378
379
380 memWrite = true
381 continue
382 }
383
384 if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
385
386
387
388 deferResultWrite = true
389 continue
390 }
391
392 if sym := types.OrigSym(name.Sym()); sym == nil || sym.IsBlank() {
393
394
395 continue
396 }
397
398 if name.Addrtaken() || !name.OnStack() {
399
400
401 memWrite = true
402 continue
403 }
404
405
406
407 assigned.Add(name)
408 }
409
410 early.Append(late.Take()...)
411 return early
412 }
413
414
415
416 func readsMemory(n ir.Node) bool {
417 switch n.Op() {
418 case ir.ONAME:
419 n := n.(*ir.Name)
420 if n.Class == ir.PFUNC {
421 return false
422 }
423 return n.Addrtaken() || !n.OnStack()
424
425 case ir.OADD,
426 ir.OAND,
427 ir.OANDAND,
428 ir.OANDNOT,
429 ir.OBITNOT,
430 ir.OCONV,
431 ir.OCONVIFACE,
432 ir.OCONVIDATA,
433 ir.OCONVNOP,
434 ir.ODIV,
435 ir.ODOT,
436 ir.ODOTTYPE,
437 ir.OLITERAL,
438 ir.OLSH,
439 ir.OMOD,
440 ir.OMUL,
441 ir.ONEG,
442 ir.ONIL,
443 ir.OOR,
444 ir.OOROR,
445 ir.OPAREN,
446 ir.OPLUS,
447 ir.ORSH,
448 ir.OSUB,
449 ir.OXOR:
450 return false
451 }
452
453
454 return true
455 }
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471 func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
472 walkAppendArgs(n, init)
473
474 l1 := n.Args[0]
475 l2 := n.Args[1]
476 l2 = cheapExpr(l2, init)
477 n.Args[1] = l2
478
479 var nodes ir.Nodes
480
481
482 s := typecheck.Temp(l1.Type())
483 nodes.Append(ir.NewAssignStmt(base.Pos, s, l1))
484
485 elemtype := s.Type().Elem()
486
487
488 nn := typecheck.Temp(types.Types[types.TINT])
489 nodes.Append(ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))))
490
491
492 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
493 nuint := typecheck.Conv(nn, types.Types[types.TUINT])
494 scapuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
495 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, scapuint)
496
497
498 fn := typecheck.LookupRuntime("growslice")
499 fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
500
501
502 nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), reflectdata.TypePtr(elemtype), s, nn))}
503 nodes.Append(nif)
504
505
506 nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
507 nt.SetBounded(true)
508 nodes.Append(ir.NewAssignStmt(base.Pos, s, nt))
509
510 var ncopy ir.Node
511 if elemtype.HasPointers() {
512
513 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
514 slice.SetType(s.Type())
515
516 ir.CurFunc.SetWBPos(n.Pos())
517
518
519 fn := typecheck.LookupRuntime("typedslicecopy")
520 fn = typecheck.SubstArgTypes(fn, l1.Type().Elem(), l2.Type().Elem())
521 ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
522 ptr2, len2 := backingArrayPtrLen(l2)
523 ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.TypePtr(elemtype), ptr1, len1, ptr2, len2)
524 } else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
525
526
527
528 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
529 slice.SetType(s.Type())
530
531 ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
532 ptr2, len2 := backingArrayPtrLen(l2)
533
534 fn := typecheck.LookupRuntime("slicecopy")
535 fn = typecheck.SubstArgTypes(fn, ptr1.Type().Elem(), ptr2.Type().Elem())
536 ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(elemtype.Size()))
537 } else {
538
539 ix := ir.NewIndexExpr(base.Pos, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1))
540 ix.SetBounded(true)
541 addr := typecheck.NodAddr(ix)
542
543 sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
544
545 nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
546 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(elemtype.Size()))
547
548
549 fn := typecheck.LookupRuntime("memmove")
550 fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
551 ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
552 }
553 ln := append(nodes, ncopy)
554
555 typecheck.Stmts(ln)
556 walkStmtList(ln)
557 init.Append(ln...)
558 return s
559 }
560
561
562
563 func isAppendOfMake(n ir.Node) bool {
564 if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
565 return false
566 }
567
568 if n.Typecheck() == 0 {
569 base.Fatalf("missing typecheck: %+v", n)
570 }
571
572 if n.Op() != ir.OAPPEND {
573 return false
574 }
575 call := n.(*ir.CallExpr)
576 if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
577 return false
578 }
579
580 mk := call.Args[1].(*ir.MakeExpr)
581 if mk.Cap != nil {
582 return false
583 }
584
585
586
587
588
589
590
591 y := mk.Len
592 if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
593 return false
594 }
595
596 return true
597 }
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626 func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
627
628
629
630 l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
631 l2 = typecheck.Expr(l2)
632 n.Args[1] = l2
633
634 walkAppendArgs(n, init)
635
636 l1 := n.Args[0]
637 l2 = n.Args[1]
638
639 var nodes []ir.Node
640
641
642 nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(0)), nil, nil)
643 nifneg.Likely = true
644
645
646 nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
647 nodes = append(nodes, nifneg)
648
649
650 s := typecheck.Temp(l1.Type())
651 nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
652
653 elemtype := s.Type().Elem()
654
655
656 nn := typecheck.Temp(types.Types[types.TINT])
657 nodes = append(nodes, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
658
659
660 nuint := typecheck.Conv(nn, types.Types[types.TUINT])
661 capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
662 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, capuint), nil, nil)
663
664
665 fn := typecheck.LookupRuntime("growslice")
666 fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
667
668
669 nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), reflectdata.TypePtr(elemtype), s, nn))}
670 nodes = append(nodes, nif)
671
672
673 nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
674 nt.SetBounded(true)
675 nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, nt))
676
677
678 l1ptr := typecheck.Temp(l1.Type().Elem().PtrTo())
679 tmp := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l1)
680 nodes = append(nodes, ir.NewAssignStmt(base.Pos, l1ptr, tmp))
681
682
683 sptr := typecheck.Temp(elemtype.PtrTo())
684 tmp = ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
685 nodes = append(nodes, ir.NewAssignStmt(base.Pos, sptr, tmp))
686
687
688 ix := ir.NewIndexExpr(base.Pos, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1))
689 ix.SetBounded(true)
690 hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
691
692
693 hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(elemtype.Size())), types.Types[types.TUINTPTR])
694
695 clrname := "memclrNoHeapPointers"
696 hasPointers := elemtype.HasPointers()
697 if hasPointers {
698 clrname = "memclrHasPointers"
699 ir.CurFunc.SetWBPos(n.Pos())
700 }
701
702 var clr ir.Nodes
703 clrfn := mkcall(clrname, nil, &clr, hp, hn)
704 clr.Append(clrfn)
705
706 if hasPointers {
707
708 nifclr := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OEQ, l1ptr, sptr), nil, nil)
709 nifclr.Body = clr
710 nodes = append(nodes, nifclr)
711 } else {
712 nodes = append(nodes, clr...)
713 }
714
715 typecheck.Stmts(nodes)
716 walkStmtList(nodes)
717 init.Append(nodes...)
718 return s
719 }
720
View as plain text