1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "internal/buildcfg"
11 "strings"
12
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/ir"
15 "cmd/compile/internal/reflectdata"
16 "cmd/compile/internal/staticdata"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
19 "cmd/internal/obj"
20 )
21
22
23
24 func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
25 if n == nil {
26 return n
27 }
28
29 if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
30
31
32
33 base.Fatalf("walkExpr init == &n->ninit")
34 }
35
36 if len(n.Init()) != 0 {
37 walkStmtList(n.Init())
38 init.Append(ir.TakeInit(n)...)
39 }
40
41 lno := ir.SetPos(n)
42
43 if base.Flag.LowerW > 1 {
44 ir.Dump("before walk expr", n)
45 }
46
47 if n.Typecheck() != 1 {
48 base.Fatalf("missed typecheck: %+v", n)
49 }
50
51 if n.Type().IsUntyped() {
52 base.Fatalf("expression has untyped type: %+v", n)
53 }
54
55 n = walkExpr1(n, init)
56
57
58 if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
59 types.CheckSize(typ)
60 }
61 if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
62 types.CheckSize(n.Heapaddr.Type())
63 }
64 if ir.IsConst(n, constant.String) {
65
66
67 _ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
68 }
69
70 if base.Flag.LowerW != 0 && n != nil {
71 ir.Dump("after walk expr", n)
72 }
73
74 base.Pos = lno
75 return n
76 }
77
78 func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
79 switch n.Op() {
80 default:
81 ir.Dump("walk", n)
82 base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
83 panic("unreachable")
84
85 case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
86 return n
87
88 case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
89
90
91
92
93 return n
94
95 case ir.OMETHEXPR:
96
97 n := n.(*ir.SelectorExpr)
98 return n.FuncName()
99
100 case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
101 n := n.(*ir.UnaryExpr)
102 n.X = walkExpr(n.X, init)
103 return n
104
105 case ir.ODOTMETH, ir.ODOTINTER:
106 n := n.(*ir.SelectorExpr)
107 n.X = walkExpr(n.X, init)
108 return n
109
110 case ir.OADDR:
111 n := n.(*ir.AddrExpr)
112 n.X = walkExpr(n.X, init)
113 return n
114
115 case ir.ODEREF:
116 n := n.(*ir.StarExpr)
117 n.X = walkExpr(n.X, init)
118 return n
119
120 case ir.OEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
121 ir.OUNSAFEADD:
122 n := n.(*ir.BinaryExpr)
123 n.X = walkExpr(n.X, init)
124 n.Y = walkExpr(n.Y, init)
125 return n
126
127 case ir.OUNSAFESLICE:
128 n := n.(*ir.BinaryExpr)
129 return walkUnsafeSlice(n, init)
130
131 case ir.ODOT, ir.ODOTPTR:
132 n := n.(*ir.SelectorExpr)
133 return walkDot(n, init)
134
135 case ir.ODOTTYPE, ir.ODOTTYPE2:
136 n := n.(*ir.TypeAssertExpr)
137 return walkDotType(n, init)
138
139 case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
140 n := n.(*ir.DynamicTypeAssertExpr)
141 return walkDynamicDotType(n, init)
142
143 case ir.OLEN, ir.OCAP:
144 n := n.(*ir.UnaryExpr)
145 return walkLenCap(n, init)
146
147 case ir.OCOMPLEX:
148 n := n.(*ir.BinaryExpr)
149 n.X = walkExpr(n.X, init)
150 n.Y = walkExpr(n.Y, init)
151 return n
152
153 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
154 n := n.(*ir.BinaryExpr)
155 return walkCompare(n, init)
156
157 case ir.OANDAND, ir.OOROR:
158 n := n.(*ir.LogicalExpr)
159 return walkLogical(n, init)
160
161 case ir.OPRINT, ir.OPRINTN:
162 return walkPrint(n.(*ir.CallExpr), init)
163
164 case ir.OPANIC:
165 n := n.(*ir.UnaryExpr)
166 return mkcall("gopanic", nil, init, n.X)
167
168 case ir.ORECOVERFP:
169 return walkRecoverFP(n.(*ir.CallExpr), init)
170
171 case ir.OCFUNC:
172 return n
173
174 case ir.OCALLINTER, ir.OCALLFUNC:
175 n := n.(*ir.CallExpr)
176 return walkCall(n, init)
177
178 case ir.OAS, ir.OASOP:
179 return walkAssign(init, n)
180
181 case ir.OAS2:
182 n := n.(*ir.AssignListStmt)
183 return walkAssignList(init, n)
184
185
186 case ir.OAS2FUNC:
187 n := n.(*ir.AssignListStmt)
188 return walkAssignFunc(init, n)
189
190
191
192 case ir.OAS2RECV:
193 n := n.(*ir.AssignListStmt)
194 return walkAssignRecv(init, n)
195
196
197 case ir.OAS2MAPR:
198 n := n.(*ir.AssignListStmt)
199 return walkAssignMapRead(init, n)
200
201 case ir.ODELETE:
202 n := n.(*ir.CallExpr)
203 return walkDelete(init, n)
204
205 case ir.OAS2DOTTYPE:
206 n := n.(*ir.AssignListStmt)
207 return walkAssignDotType(n, init)
208
209 case ir.OCONVIFACE:
210 n := n.(*ir.ConvExpr)
211 return walkConvInterface(n, init)
212
213 case ir.OCONVIDATA:
214 n := n.(*ir.ConvExpr)
215 return walkConvIData(n, init)
216
217 case ir.OCONV, ir.OCONVNOP:
218 n := n.(*ir.ConvExpr)
219 return walkConv(n, init)
220
221 case ir.OSLICE2ARRPTR:
222 n := n.(*ir.ConvExpr)
223 n.X = walkExpr(n.X, init)
224 return n
225
226 case ir.ODIV, ir.OMOD:
227 n := n.(*ir.BinaryExpr)
228 return walkDivMod(n, init)
229
230 case ir.OINDEX:
231 n := n.(*ir.IndexExpr)
232 return walkIndex(n, init)
233
234 case ir.OINDEXMAP:
235 n := n.(*ir.IndexExpr)
236 return walkIndexMap(n, init)
237
238 case ir.ORECV:
239 base.Fatalf("walkExpr ORECV")
240 panic("unreachable")
241
242 case ir.OSLICEHEADER:
243 n := n.(*ir.SliceHeaderExpr)
244 return walkSliceHeader(n, init)
245
246 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
247 n := n.(*ir.SliceExpr)
248 return walkSlice(n, init)
249
250 case ir.ONEW:
251 n := n.(*ir.UnaryExpr)
252 return walkNew(n, init)
253
254 case ir.OADDSTR:
255 return walkAddString(n.(*ir.AddStringExpr), init)
256
257 case ir.OAPPEND:
258
259 base.Fatalf("append outside assignment")
260 panic("unreachable")
261
262 case ir.OCOPY:
263 return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
264
265 case ir.OCLOSE:
266 n := n.(*ir.UnaryExpr)
267 return walkClose(n, init)
268
269 case ir.OMAKECHAN:
270 n := n.(*ir.MakeExpr)
271 return walkMakeChan(n, init)
272
273 case ir.OMAKEMAP:
274 n := n.(*ir.MakeExpr)
275 return walkMakeMap(n, init)
276
277 case ir.OMAKESLICE:
278 n := n.(*ir.MakeExpr)
279 return walkMakeSlice(n, init)
280
281 case ir.OMAKESLICECOPY:
282 n := n.(*ir.MakeExpr)
283 return walkMakeSliceCopy(n, init)
284
285 case ir.ORUNESTR:
286 n := n.(*ir.ConvExpr)
287 return walkRuneToString(n, init)
288
289 case ir.OBYTES2STR, ir.ORUNES2STR:
290 n := n.(*ir.ConvExpr)
291 return walkBytesRunesToString(n, init)
292
293 case ir.OBYTES2STRTMP:
294 n := n.(*ir.ConvExpr)
295 return walkBytesToStringTemp(n, init)
296
297 case ir.OSTR2BYTES:
298 n := n.(*ir.ConvExpr)
299 return walkStringToBytes(n, init)
300
301 case ir.OSTR2BYTESTMP:
302 n := n.(*ir.ConvExpr)
303 return walkStringToBytesTemp(n, init)
304
305 case ir.OSTR2RUNES:
306 n := n.(*ir.ConvExpr)
307 return walkStringToRunes(n, init)
308
309 case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
310 return walkCompLit(n, init)
311
312 case ir.OSEND:
313 n := n.(*ir.SendStmt)
314 return walkSend(n, init)
315
316 case ir.OCLOSURE:
317 return walkClosure(n.(*ir.ClosureExpr), init)
318
319 case ir.OMETHVALUE:
320 return walkMethodValue(n.(*ir.SelectorExpr), init)
321 }
322
323
324
325
326 }
327
328
329
330
331
332
333 func walkExprList(s []ir.Node, init *ir.Nodes) {
334 for i := range s {
335 s[i] = walkExpr(s[i], init)
336 }
337 }
338
339 func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
340 for i, n := range s {
341 s[i] = cheapExpr(n, init)
342 s[i] = walkExpr(s[i], init)
343 }
344 }
345
346 func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
347 for i, n := range s {
348 s[i] = safeExpr(n, init)
349 s[i] = walkExpr(s[i], init)
350 }
351 }
352
353
354
355 func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
356 switch n.Op() {
357 case ir.ONAME, ir.OLITERAL, ir.ONIL:
358 return n
359 }
360
361 return copyExpr(n, n.Type(), init)
362 }
363
364
365
366 func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
367 if n == nil {
368 return nil
369 }
370
371 if len(n.Init()) != 0 {
372 walkStmtList(n.Init())
373 init.Append(ir.TakeInit(n)...)
374 }
375
376 switch n.Op() {
377 case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
378 return n
379
380 case ir.OLEN, ir.OCAP:
381 n := n.(*ir.UnaryExpr)
382 l := safeExpr(n.X, init)
383 if l == n.X {
384 return n
385 }
386 a := ir.Copy(n).(*ir.UnaryExpr)
387 a.X = l
388 return walkExpr(typecheck.Expr(a), init)
389
390 case ir.ODOT, ir.ODOTPTR:
391 n := n.(*ir.SelectorExpr)
392 l := safeExpr(n.X, init)
393 if l == n.X {
394 return n
395 }
396 a := ir.Copy(n).(*ir.SelectorExpr)
397 a.X = l
398 return walkExpr(typecheck.Expr(a), init)
399
400 case ir.ODEREF:
401 n := n.(*ir.StarExpr)
402 l := safeExpr(n.X, init)
403 if l == n.X {
404 return n
405 }
406 a := ir.Copy(n).(*ir.StarExpr)
407 a.X = l
408 return walkExpr(typecheck.Expr(a), init)
409
410 case ir.OINDEX, ir.OINDEXMAP:
411 n := n.(*ir.IndexExpr)
412 l := safeExpr(n.X, init)
413 r := safeExpr(n.Index, init)
414 if l == n.X && r == n.Index {
415 return n
416 }
417 a := ir.Copy(n).(*ir.IndexExpr)
418 a.X = l
419 a.Index = r
420 return walkExpr(typecheck.Expr(a), init)
421
422 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
423 n := n.(*ir.CompLitExpr)
424 if isStaticCompositeLiteral(n) {
425 return n
426 }
427 }
428
429
430 if ir.IsAddressable(n) {
431 base.Fatalf("missing lvalue case in safeExpr: %v", n)
432 }
433 return cheapExpr(n, init)
434 }
435
436 func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
437 l := typecheck.Temp(t)
438 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
439 return l
440 }
441
442 func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
443 c := len(n.List)
444
445 if c < 2 {
446 base.Fatalf("walkAddString count %d too small", c)
447 }
448
449 buf := typecheck.NodNil()
450 if n.Esc() == ir.EscNone {
451 sz := int64(0)
452 for _, n1 := range n.List {
453 if n1.Op() == ir.OLITERAL {
454 sz += int64(len(ir.StringVal(n1)))
455 }
456 }
457
458
459 if sz < tmpstringbufsize {
460
461 buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
462 }
463 }
464
465
466 args := []ir.Node{buf}
467 for _, n2 := range n.List {
468 args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
469 }
470
471 var fn string
472 if c <= 5 {
473
474
475 fn = fmt.Sprintf("concatstring%d", c)
476 } else {
477
478 fn = "concatstrings"
479
480 t := types.NewSlice(types.Types[types.TSTRING])
481
482 slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(t), args[1:])
483 slice.Prealloc = n.Prealloc
484 args = []ir.Node{buf, slice}
485 slice.SetEsc(ir.EscNone)
486 }
487
488 cat := typecheck.LookupRuntime(fn)
489 r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
490 r.Args = args
491 r1 := typecheck.Expr(r)
492 r1 = walkExpr(r1, init)
493 r1.SetType(n.Type())
494
495 return r1
496 }
497
498
499 func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
500 if n.Op() == ir.OCALLMETH {
501 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
502 }
503 if n.Op() == ir.OCALLINTER || n.X.Op() == ir.OMETHEXPR {
504
505
506 usemethod(n)
507 }
508 if n.Op() == ir.OCALLINTER {
509 reflectdata.MarkUsedIfaceMethod(n)
510 }
511
512 if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.OCLOSURE {
513 directClosureCall(n)
514 }
515
516 if isFuncPCIntrinsic(n) {
517
518
519 name := n.X.(*ir.Name).Sym().Name
520 arg := n.Args[0]
521 var wantABI obj.ABI
522 switch name {
523 case "FuncPCABI0":
524 wantABI = obj.ABI0
525 case "FuncPCABIInternal":
526 wantABI = obj.ABIInternal
527 }
528 if isIfaceOfFunc(arg) {
529 fn := arg.(*ir.ConvExpr).X.(*ir.Name)
530 abi := fn.Func.ABI
531 if abi != wantABI {
532 base.ErrorfAt(n.Pos(), "internal/abi.%s expects an %v function, %s is defined as %v", name, wantABI, fn.Sym().Name, abi)
533 }
534 var e ir.Node = ir.NewLinksymExpr(n.Pos(), fn.Sym().LinksymABI(abi), types.Types[types.TUINTPTR])
535 e = ir.NewAddrExpr(n.Pos(), e)
536 e.SetType(types.Types[types.TUINTPTR].PtrTo())
537 e = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, n.Type(), e)
538 return e
539 }
540
541
542 if wantABI != obj.ABIInternal {
543 base.ErrorfAt(n.Pos(), "internal/abi.%s does not accept func expression, which is ABIInternal", name)
544 }
545 arg = walkExpr(arg, init)
546 var e ir.Node = ir.NewUnaryExpr(n.Pos(), ir.OIDATA, arg)
547 e.SetType(n.Type().PtrTo())
548 e = ir.NewStarExpr(n.Pos(), e)
549 e.SetType(n.Type())
550 return e
551 }
552
553 walkCall1(n, init)
554 return n
555 }
556
557 func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
558 if n.Walked() {
559 return
560 }
561 n.SetWalked(true)
562
563 if n.Op() == ir.OCALLMETH {
564 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
565 }
566
567 args := n.Args
568 params := n.X.Type().Params()
569
570 n.X = walkExpr(n.X, init)
571 walkExprList(args, init)
572
573 for i, arg := range args {
574
575 param := params.Field(i)
576 if !types.Identical(arg.Type(), param.Type) {
577 base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
578 }
579
580
581
582
583 if mayCall(arg) {
584
585 tmp := typecheck.Temp(param.Type)
586 init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
587
588 args[i] = tmp
589 }
590 }
591
592 n.Args = args
593 }
594
595
596 func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
597 n.X = walkExpr(n.X, init)
598 n.Y = walkExpr(n.Y, init)
599
600
601 et := n.X.Type().Kind()
602
603 if types.IsComplex[et] && n.Op() == ir.ODIV {
604 t := n.Type()
605 call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
606 return typecheck.Conv(call, t)
607 }
608
609
610 if types.IsFloat[et] {
611 return n
612 }
613
614
615
616
617 if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
618 if n.Y.Op() == ir.OLITERAL {
619
620
621 switch et {
622 case types.TINT64:
623 c := ir.Int64Val(n.Y)
624 if c < 0 {
625 c = -c
626 }
627 if c != 0 && c&(c-1) == 0 {
628 return n
629 }
630 case types.TUINT64:
631 c := ir.Uint64Val(n.Y)
632 if c < 1<<16 {
633 return n
634 }
635 if c != 0 && c&(c-1) == 0 {
636 return n
637 }
638 }
639 }
640 var fn string
641 if et == types.TINT64 {
642 fn = "int64"
643 } else {
644 fn = "uint64"
645 }
646 if n.Op() == ir.ODIV {
647 fn += "div"
648 } else {
649 fn += "mod"
650 }
651 return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
652 }
653 return n
654 }
655
656
657 func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
658 usefield(n)
659 n.X = walkExpr(n.X, init)
660 return n
661 }
662
663
664 func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
665 n.X = walkExpr(n.X, init)
666
667 if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
668 n.Itab = reflectdata.ITabAddr(n.Type(), n.X.Type())
669 }
670 return n
671 }
672
673
674 func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
675 n.X = walkExpr(n.X, init)
676 n.T = walkExpr(n.T, init)
677 return n
678 }
679
680
681 func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
682 n.X = walkExpr(n.X, init)
683
684
685
686 r := n.Index
687
688 n.Index = walkExpr(n.Index, init)
689
690
691
692 if n.Bounded() {
693 return n
694 }
695 t := n.X.Type()
696 if t != nil && t.IsPtr() {
697 t = t.Elem()
698 }
699 if t.IsArray() {
700 n.SetBounded(bounded(r, t.NumElem()))
701 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
702 base.Warn("index bounds check elided")
703 }
704 if ir.IsSmallIntConst(n.Index) && !n.Bounded() {
705 base.Errorf("index out of bounds")
706 }
707 } else if ir.IsConst(n.X, constant.String) {
708 n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
709 if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
710 base.Warn("index bounds check elided")
711 }
712 if ir.IsSmallIntConst(n.Index) && !n.Bounded() {
713 base.Errorf("index out of bounds")
714 }
715 }
716
717 if ir.IsConst(n.Index, constant.Int) {
718 if v := n.Index.Val(); constant.Sign(v) < 0 || ir.ConstOverflow(v, types.Types[types.TINT]) {
719 base.Errorf("index out of bounds")
720 }
721 }
722 return n
723 }
724
725
726
727
728
729
730 func mapKeyArg(fast int, n, key ir.Node) ir.Node {
731 switch fast {
732 case mapslow:
733
734
735 return typecheck.NodAddr(key)
736 case mapfast32ptr:
737
738 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
739 case mapfast64ptr:
740
741 return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
742 default:
743
744 return key
745 }
746 }
747
748
749 func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
750
751 n.X = walkExpr(n.X, init)
752 n.Index = walkExpr(n.Index, init)
753 map_ := n.X
754 key := n.Index
755 t := map_.Type()
756 var call *ir.CallExpr
757 if n.Assigned {
758
759 fast := mapfast(t)
760 if fast == mapslow {
761
762
763 key = typecheck.NodAddr(key)
764 }
765 call = mkcall1(mapfn(mapassign[fast], t, false), nil, init, reflectdata.TypePtr(t), map_, key)
766 } else {
767
768 fast := mapfast(t)
769 key = mapKeyArg(fast, n, key)
770 if w := t.Elem().Size(); w <= zeroValSize {
771 call = mkcall1(mapfn(mapaccess1[fast], t, false), types.NewPtr(t.Elem()), init, reflectdata.TypePtr(t), map_, key)
772 } else {
773 z := reflectdata.ZeroAddr(w)
774 call = mkcall1(mapfn("mapaccess1_fat", t, true), types.NewPtr(t.Elem()), init, reflectdata.TypePtr(t), map_, key, z)
775 }
776 }
777 call.SetType(types.NewPtr(t.Elem()))
778 call.MarkNonNil()
779 star := ir.NewStarExpr(base.Pos, call)
780 star.SetType(t.Elem())
781 star.SetTypecheck(1)
782 return star
783 }
784
785
786 func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
787 n.X = walkExpr(n.X, init)
788
789
790
791
792 var ll ir.Nodes
793
794 n.Y = walkExpr(n.Y, &ll)
795 n.Y = ir.InitExpr(ll, n.Y)
796 return n
797 }
798
799
800 func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
801 n1 := n.Value
802 n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
803 n1 = walkExpr(n1, init)
804 n1 = typecheck.NodAddr(n1)
805 return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
806 }
807
808
809 func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
810 n.X = walkExpr(n.X, init)
811 n.Low = walkExpr(n.Low, init)
812 if n.Low != nil && ir.IsZero(n.Low) {
813
814 n.Low = nil
815 }
816 n.High = walkExpr(n.High, init)
817 n.Max = walkExpr(n.Max, init)
818
819 if n.Op().IsSlice3() {
820 if n.Max != nil && n.Max.Op() == ir.OCAP && ir.SameSafeExpr(n.X, n.Max.(*ir.UnaryExpr).X) {
821
822 if n.Op() == ir.OSLICE3 {
823 n.SetOp(ir.OSLICE)
824 } else {
825 n.SetOp(ir.OSLICEARR)
826 }
827 return reduceSlice(n)
828 }
829 return n
830 }
831 return reduceSlice(n)
832 }
833
834
835 func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
836 n.Ptr = walkExpr(n.Ptr, init)
837 n.Len = walkExpr(n.Len, init)
838 n.Cap = walkExpr(n.Cap, init)
839 return n
840 }
841
842
843 func reduceSlice(n *ir.SliceExpr) ir.Node {
844 if n.High != nil && n.High.Op() == ir.OLEN && ir.SameSafeExpr(n.X, n.High.(*ir.UnaryExpr).X) {
845
846 n.High = nil
847 }
848 if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
849
850 if base.Debug.Slice > 0 {
851 base.Warn("slice: omit slice operation")
852 }
853 return n.X
854 }
855 return n
856 }
857
858
859 func bounded(n ir.Node, max int64) bool {
860 if n.Type() == nil || !n.Type().IsInteger() {
861 return false
862 }
863
864 sign := n.Type().IsSigned()
865 bits := int32(8 * n.Type().Size())
866
867 if ir.IsSmallIntConst(n) {
868 v := ir.Int64Val(n)
869 return 0 <= v && v < max
870 }
871
872 switch n.Op() {
873 case ir.OAND, ir.OANDNOT:
874 n := n.(*ir.BinaryExpr)
875 v := int64(-1)
876 switch {
877 case ir.IsSmallIntConst(n.X):
878 v = ir.Int64Val(n.X)
879 case ir.IsSmallIntConst(n.Y):
880 v = ir.Int64Val(n.Y)
881 if n.Op() == ir.OANDNOT {
882 v = ^v
883 if !sign {
884 v &= 1<<uint(bits) - 1
885 }
886 }
887 }
888 if 0 <= v && v < max {
889 return true
890 }
891
892 case ir.OMOD:
893 n := n.(*ir.BinaryExpr)
894 if !sign && ir.IsSmallIntConst(n.Y) {
895 v := ir.Int64Val(n.Y)
896 if 0 <= v && v <= max {
897 return true
898 }
899 }
900
901 case ir.ODIV:
902 n := n.(*ir.BinaryExpr)
903 if !sign && ir.IsSmallIntConst(n.Y) {
904 v := ir.Int64Val(n.Y)
905 for bits > 0 && v >= 2 {
906 bits--
907 v >>= 1
908 }
909 }
910
911 case ir.ORSH:
912 n := n.(*ir.BinaryExpr)
913 if !sign && ir.IsSmallIntConst(n.Y) {
914 v := ir.Int64Val(n.Y)
915 if v > int64(bits) {
916 return true
917 }
918 bits -= int32(v)
919 }
920 }
921
922 if !sign && bits <= 62 && 1<<uint(bits) <= max {
923 return true
924 }
925
926 return false
927 }
928
929
930 func usemethod(n *ir.CallExpr) {
931
932
933
934 if base.Ctxt.Pkgpath == "reflect" {
935 switch ir.CurFunc.Nname.Sym().Name {
936 case "(*rtype).Method", "(*rtype).MethodByName", "(*interfaceType).Method", "(*interfaceType).MethodByName":
937 return
938 }
939 }
940
941 dot, ok := n.X.(*ir.SelectorExpr)
942 if !ok {
943 return
944 }
945
946
947
948
949 var pKind types.Kind
950
951 switch dot.Sel.Name {
952 case "Method":
953 pKind = types.TINT
954 case "MethodByName":
955 pKind = types.TSTRING
956 default:
957 return
958 }
959
960 t := dot.Selection.Type
961 if t.NumParams() != 1 || t.Params().Field(0).Type.Kind() != pKind {
962 return
963 }
964 switch t.NumResults() {
965 case 1:
966
967 case 2:
968 if t.Results().Field(1).Type.Kind() != types.TBOOL {
969 return
970 }
971 default:
972 return
973 }
974
975
976
977 if s := t.Results().Field(0).Type.Sym(); s != nil && s.Name == "Method" && types.IsReflectPkg(s.Pkg) {
978 ir.CurFunc.SetReflectMethod(true)
979
980 ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
981 }
982 }
983
984 func usefield(n *ir.SelectorExpr) {
985 if !buildcfg.Experiment.FieldTrack {
986 return
987 }
988
989 switch n.Op() {
990 default:
991 base.Fatalf("usefield %v", n.Op())
992
993 case ir.ODOT, ir.ODOTPTR:
994 break
995 }
996
997 field := n.Selection
998 if field == nil {
999 base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
1000 }
1001 if field.Sym != n.Sel {
1002 base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
1003 }
1004 if !strings.Contains(field.Note, "go:\"track\"") {
1005 return
1006 }
1007
1008 outer := n.X.Type()
1009 if outer.IsPtr() {
1010 outer = outer.Elem()
1011 }
1012 if outer.Sym() == nil {
1013 base.Errorf("tracked field must be in named struct type")
1014 }
1015 if !types.IsExported(field.Sym.Name) {
1016 base.Errorf("tracked field must be exported (upper case)")
1017 }
1018
1019 sym := reflectdata.TrackSym(outer, field)
1020 if ir.CurFunc.FieldTrack == nil {
1021 ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
1022 }
1023 ir.CurFunc.FieldTrack[sym] = struct{}{}
1024 }
1025
View as plain text