1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 package liveness
16
17 import (
18 "crypto/sha1"
19 "fmt"
20 "os"
21 "sort"
22 "strings"
23
24 "cmd/compile/internal/abi"
25 "cmd/compile/internal/base"
26 "cmd/compile/internal/bitvec"
27 "cmd/compile/internal/ir"
28 "cmd/compile/internal/objw"
29 "cmd/compile/internal/reflectdata"
30 "cmd/compile/internal/ssa"
31 "cmd/compile/internal/typebits"
32 "cmd/compile/internal/types"
33 "cmd/internal/obj"
34 "cmd/internal/objabi"
35 "cmd/internal/src"
36 )
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94 type blockEffects struct {
95
96
97
98
99
100 uevar bitvec.BitVec
101 varkill bitvec.BitVec
102
103
104
105
106
107 livein bitvec.BitVec
108 liveout bitvec.BitVec
109 }
110
111
112 type liveness struct {
113 fn *ir.Func
114 f *ssa.Func
115 vars []*ir.Name
116 idx map[*ir.Name]int32
117 stkptrsize int64
118
119 be []blockEffects
120
121
122
123 allUnsafe bool
124
125
126 unsafePoints bitvec.BitVec
127
128
129
130
131
132
133 livevars []bitvec.BitVec
134
135
136
137 livenessMap Map
138 stackMapSet bvecSet
139 stackMaps []bitvec.BitVec
140
141 cache progeffectscache
142
143
144
145
146 partLiveArgs map[*ir.Name]bool
147
148 doClobber bool
149 noClobberArgs bool
150 }
151
152
153 type Map struct {
154 Vals map[ssa.ID]objw.LivenessIndex
155
156
157 DeferReturn objw.LivenessIndex
158 }
159
160 func (m *Map) reset() {
161 if m.Vals == nil {
162 m.Vals = make(map[ssa.ID]objw.LivenessIndex)
163 } else {
164 for k := range m.Vals {
165 delete(m.Vals, k)
166 }
167 }
168 m.DeferReturn = objw.LivenessDontCare
169 }
170
171 func (m *Map) set(v *ssa.Value, i objw.LivenessIndex) {
172 m.Vals[v.ID] = i
173 }
174
175 func (m Map) Get(v *ssa.Value) objw.LivenessIndex {
176
177
178 if idx, ok := m.Vals[v.ID]; ok {
179 return idx
180 }
181 return objw.LivenessIndex{StackMapIndex: objw.StackMapDontCare, IsUnsafePoint: false}
182 }
183
184 type progeffectscache struct {
185 retuevar []int32
186 tailuevar []int32
187 initialized bool
188 }
189
190
191
192
193
194
195
196 func shouldTrack(n *ir.Name) bool {
197 return (n.Class == ir.PAUTO && n.Esc() != ir.EscHeap || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
198 }
199
200
201
202 func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
203 var vars []*ir.Name
204 for _, n := range fn.Dcl {
205 if shouldTrack(n) {
206 vars = append(vars, n)
207 }
208 }
209 idx := make(map[*ir.Name]int32, len(vars))
210 for i, n := range vars {
211 idx[n] = int32(i)
212 }
213 return vars, idx
214 }
215
216 func (lv *liveness) initcache() {
217 if lv.cache.initialized {
218 base.Fatalf("liveness cache initialized twice")
219 return
220 }
221 lv.cache.initialized = true
222
223 for i, node := range lv.vars {
224 switch node.Class {
225 case ir.PPARAM:
226
227
228
229
230
231
232 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
233
234 case ir.PPARAMOUT:
235
236
237
238 lv.cache.retuevar = append(lv.cache.retuevar, int32(i))
239 }
240 }
241 }
242
243
244
245
246
247
248
249
250 type liveEffect int
251
252 const (
253 uevar liveEffect = 1 << iota
254 varkill
255 )
256
257
258
259
260 func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
261 n, e := affectedVar(v)
262 if e == 0 || n == nil {
263 return -1, 0
264 }
265
266
267
268
269 switch v.Op {
270 case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive:
271 if !n.Used() {
272 return -1, 0
273 }
274 }
275
276 if n.Class == ir.PPARAM && !n.Addrtaken() && n.Type().Size() > int64(types.PtrSize) {
277
278
279 lv.partLiveArgs[n] = true
280 }
281
282 var effect liveEffect
283
284
285
286
287
288
289 if e&(ssa.SymRead|ssa.SymAddr) != 0 {
290 effect |= uevar
291 }
292 if e&ssa.SymWrite != 0 && (!isfat(n.Type()) || v.Op == ssa.OpVarDef) {
293 effect |= varkill
294 }
295
296 if effect == 0 {
297 return -1, 0
298 }
299
300 if pos, ok := lv.idx[n]; ok {
301 return pos, effect
302 }
303 return -1, 0
304 }
305
306
307 func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
308
309 switch v.Op {
310 case ssa.OpLoadReg:
311 n, _ := ssa.AutoVar(v.Args[0])
312 return n, ssa.SymRead
313 case ssa.OpStoreReg:
314 n, _ := ssa.AutoVar(v)
315 return n, ssa.SymWrite
316
317 case ssa.OpArgIntReg:
318
319
320
321
322
323
324
325
326
327
328
329
330 n, _ := ssa.AutoVar(v)
331 return n, ssa.SymRead
332
333 case ssa.OpVarLive:
334 return v.Aux.(*ir.Name), ssa.SymRead
335 case ssa.OpVarDef, ssa.OpVarKill:
336 return v.Aux.(*ir.Name), ssa.SymWrite
337 case ssa.OpKeepAlive:
338 n, _ := ssa.AutoVar(v.Args[0])
339 return n, ssa.SymRead
340 }
341
342 e := v.Op.SymEffect()
343 if e == 0 {
344 return nil, 0
345 }
346
347 switch a := v.Aux.(type) {
348 case nil, *obj.LSym:
349
350 return nil, e
351 case *ir.Name:
352 return a, e
353 default:
354 base.Fatalf("weird aux: %s", v.LongString())
355 return nil, e
356 }
357 }
358
359 type livenessFuncCache struct {
360 be []blockEffects
361 livenessMap Map
362 }
363
364
365
366
367 func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *liveness {
368 lv := &liveness{
369 fn: fn,
370 f: f,
371 vars: vars,
372 idx: idx,
373 stkptrsize: stkptrsize,
374 }
375
376
377
378
379 if lc, _ := f.Cache.Liveness.(*livenessFuncCache); lc == nil {
380
381 f.Cache.Liveness = new(livenessFuncCache)
382 } else {
383 if cap(lc.be) >= f.NumBlocks() {
384 lv.be = lc.be[:f.NumBlocks()]
385 }
386 lv.livenessMap = Map{Vals: lc.livenessMap.Vals, DeferReturn: objw.LivenessDontCare}
387 lc.livenessMap.Vals = nil
388 }
389 if lv.be == nil {
390 lv.be = make([]blockEffects, f.NumBlocks())
391 }
392
393 nblocks := int32(len(f.Blocks))
394 nvars := int32(len(vars))
395 bulk := bitvec.NewBulk(nvars, nblocks*7)
396 for _, b := range f.Blocks {
397 be := lv.blockEffects(b)
398
399 be.uevar = bulk.Next()
400 be.varkill = bulk.Next()
401 be.livein = bulk.Next()
402 be.liveout = bulk.Next()
403 }
404 lv.livenessMap.reset()
405
406 lv.markUnsafePoints()
407
408 lv.partLiveArgs = make(map[*ir.Name]bool)
409
410 lv.enableClobber()
411
412 return lv
413 }
414
415 func (lv *liveness) blockEffects(b *ssa.Block) *blockEffects {
416 return &lv.be[b.ID]
417 }
418
419
420
421
422 func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
423 for i := int32(0); ; i++ {
424 i = liveout.Next(i)
425 if i < 0 {
426 break
427 }
428 node := vars[i]
429 switch node.Class {
430 case ir.PPARAM, ir.PPARAMOUT:
431 if !node.IsOutputParamInRegisters() {
432 if node.FrameOffset() < 0 {
433 lv.f.Fatalf("Node %v has frameoffset %d\n", node.Sym().Name, node.FrameOffset())
434 }
435 typebits.Set(node.Type(), node.FrameOffset(), args)
436 break
437 }
438 fallthrough
439 case ir.PAUTO:
440 typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
441 }
442 }
443 }
444
445
446
447 func IsUnsafe(f *ssa.Func) bool {
448
449
450
451
452
453
454
455
456
457 return base.Flag.CompilingRuntime || f.NoSplit
458 }
459
460
461 func (lv *liveness) markUnsafePoints() {
462 if IsUnsafe(lv.f) {
463
464 lv.allUnsafe = true
465 return
466 }
467
468 lv.unsafePoints = bitvec.New(int32(lv.f.NumValues()))
469
470
471 for _, b := range lv.f.Blocks {
472 for _, v := range b.Values {
473 if v.Op.UnsafePoint() {
474 lv.unsafePoints.Set(int32(v.ID))
475 }
476 }
477 }
478
479
480 for _, wbBlock := range lv.f.WBLoads {
481 if wbBlock.Kind == ssa.BlockPlain && len(wbBlock.Values) == 0 {
482
483
484
485 continue
486 }
487
488 if len(wbBlock.Succs) != 2 {
489 lv.f.Fatalf("expected branch at write barrier block %v", wbBlock)
490 }
491 s0, s1 := wbBlock.Succs[0].Block(), wbBlock.Succs[1].Block()
492 if s0 == s1 {
493
494
495 continue
496 }
497 if s0.Kind != ssa.BlockPlain || s1.Kind != ssa.BlockPlain {
498 lv.f.Fatalf("expected successors of write barrier block %v to be plain", wbBlock)
499 }
500 if s0.Succs[0].Block() != s1.Succs[0].Block() {
501 lv.f.Fatalf("expected successors of write barrier block %v to converge", wbBlock)
502 }
503
504
505
506
507
508
509 var load *ssa.Value
510 v := wbBlock.Controls[0]
511 for {
512 if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
513 load = v
514 break
515 }
516 switch v.Op {
517 case ssa.Op386TESTL:
518
519 if v.Args[0] == v.Args[1] {
520 v = v.Args[0]
521 continue
522 }
523 case ssa.Op386MOVLload, ssa.OpARM64MOVWUload, ssa.OpPPC64MOVWZload, ssa.OpWasmI64Load32U:
524
525
526
527
528 v = v.Args[0]
529 continue
530 }
531
532 if len(v.Args) != 1 {
533 v.Fatalf("write barrier control value has more than one argument: %s", v.LongString())
534 }
535 v = v.Args[0]
536 }
537
538
539 found := false
540 for _, v := range wbBlock.Values {
541 found = found || v == load
542 if found {
543 lv.unsafePoints.Set(int32(v.ID))
544 }
545 }
546
547
548
549
550
551 for _, succ := range wbBlock.Succs {
552 for _, v := range succ.Block().Values {
553 lv.unsafePoints.Set(int32(v.ID))
554 }
555 }
556 }
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579 var flooded bitvec.BitVec
580 var flood func(b *ssa.Block, vi int)
581 flood = func(b *ssa.Block, vi int) {
582 if flooded.N == 0 {
583 flooded = bitvec.New(int32(lv.f.NumBlocks()))
584 }
585 if flooded.Get(int32(b.ID)) {
586 return
587 }
588 for i := vi - 1; i >= 0; i-- {
589 v := b.Values[i]
590 if v.Op.IsCall() {
591
592
593
594 return
595 }
596 lv.unsafePoints.Set(int32(v.ID))
597 }
598 if vi == len(b.Values) {
599
600
601 flooded.Set(int32(b.ID))
602 }
603 for _, pred := range b.Preds {
604 flood(pred.Block(), len(pred.Block().Values))
605 }
606 }
607 for _, b := range lv.f.Blocks {
608 for i, v := range b.Values {
609 if !(v.Op == ssa.OpConvert && v.Type.IsPtrShaped()) {
610 continue
611 }
612
613
614 flood(b, i+1)
615 }
616 }
617 }
618
619
620
621
622
623
624 func (lv *liveness) hasStackMap(v *ssa.Value) bool {
625 if !v.Op.IsCall() {
626 return false
627 }
628
629
630
631 if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.Typedmemclr || sym.Fn == ir.Syms.Typedmemmove) {
632 return false
633 }
634 return true
635 }
636
637
638
639
640 func (lv *liveness) prologue() {
641 lv.initcache()
642
643 for _, b := range lv.f.Blocks {
644 be := lv.blockEffects(b)
645
646
647
648 for j := len(b.Values) - 1; j >= 0; j-- {
649 pos, e := lv.valueEffects(b.Values[j])
650 if e&varkill != 0 {
651 be.varkill.Set(pos)
652 be.uevar.Unset(pos)
653 }
654 if e&uevar != 0 {
655 be.uevar.Set(pos)
656 }
657 }
658 }
659 }
660
661
662 func (lv *liveness) solve() {
663
664
665 nvars := int32(len(lv.vars))
666 newlivein := bitvec.New(nvars)
667 newliveout := bitvec.New(nvars)
668
669
670 po := lv.f.Postorder()
671
672
673
674
675
676 for change := true; change; {
677 change = false
678 for _, b := range po {
679 be := lv.blockEffects(b)
680
681 newliveout.Clear()
682 switch b.Kind {
683 case ssa.BlockRet:
684 for _, pos := range lv.cache.retuevar {
685 newliveout.Set(pos)
686 }
687 case ssa.BlockRetJmp:
688 for _, pos := range lv.cache.tailuevar {
689 newliveout.Set(pos)
690 }
691 case ssa.BlockExit:
692
693 default:
694
695
696
697
698 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein)
699 for _, succ := range b.Succs[1:] {
700 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein)
701 }
702 }
703
704 if !be.liveout.Eq(newliveout) {
705 change = true
706 be.liveout.Copy(newliveout)
707 }
708
709
710
711
712
713
714 newlivein.AndNot(be.liveout, be.varkill)
715 be.livein.Or(newlivein, be.uevar)
716 }
717 }
718 }
719
720
721
722 func (lv *liveness) epilogue() {
723 nvars := int32(len(lv.vars))
724 liveout := bitvec.New(nvars)
725 livedefer := bitvec.New(nvars)
726
727
728
729
730
731
732
733
734 if lv.fn.HasDefer() {
735 for i, n := range lv.vars {
736 if n.Class == ir.PPARAMOUT {
737 if n.IsOutputParamHeapAddr() {
738
739 base.Fatalf("variable %v both output param and heap output param", n)
740 }
741 if n.Heapaddr != nil {
742
743
744 continue
745 }
746
747 livedefer.Set(int32(i))
748 }
749 if n.IsOutputParamHeapAddr() {
750
751
752
753 n.SetNeedzero(true)
754 livedefer.Set(int32(i))
755 }
756 if n.OpenDeferSlot() {
757
758
759
760
761 livedefer.Set(int32(i))
762
763 if !n.Needzero() {
764 base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
765 }
766 }
767 }
768 }
769
770
771
772
773 if lv.f.Entry != lv.f.Blocks[0] {
774 lv.f.Fatalf("entry block must be first")
775 }
776
777 {
778
779 live := bitvec.New(nvars)
780 lv.livevars = append(lv.livevars, live)
781 }
782
783 for _, b := range lv.f.Blocks {
784 be := lv.blockEffects(b)
785
786
787
788 for _, v := range b.Values {
789 if !lv.hasStackMap(v) {
790 continue
791 }
792
793 live := bitvec.New(nvars)
794 lv.livevars = append(lv.livevars, live)
795 }
796
797
798 index := int32(len(lv.livevars) - 1)
799
800 liveout.Copy(be.liveout)
801 for i := len(b.Values) - 1; i >= 0; i-- {
802 v := b.Values[i]
803
804 if lv.hasStackMap(v) {
805
806
807
808 live := &lv.livevars[index]
809 live.Or(*live, liveout)
810 live.Or(*live, livedefer)
811 index--
812 }
813
814
815 pos, e := lv.valueEffects(v)
816 if e&varkill != 0 {
817 liveout.Unset(pos)
818 }
819 if e&uevar != 0 {
820 liveout.Set(pos)
821 }
822 }
823
824 if b == lv.f.Entry {
825 if index != 0 {
826 base.Fatalf("bad index for entry point: %v", index)
827 }
828
829
830 for i, n := range lv.vars {
831 if !liveout.Get(int32(i)) {
832 continue
833 }
834 if n.Class == ir.PPARAM {
835 continue
836 }
837 base.FatalfAt(n.Pos(), "bad live variable at entry of %v: %L", lv.fn.Nname, n)
838 }
839
840
841 live := &lv.livevars[index]
842 live.Or(*live, liveout)
843 }
844
845 if lv.doClobber {
846 lv.clobber(b)
847 }
848
849
850 lv.compact(b)
851 }
852
853
854 if lv.fn.OpenCodedDeferDisallowed() {
855 lv.livenessMap.DeferReturn = objw.LivenessDontCare
856 } else {
857 idx, _ := lv.stackMapSet.add(livedefer)
858 lv.livenessMap.DeferReturn = objw.LivenessIndex{
859 StackMapIndex: idx,
860 IsUnsafePoint: false,
861 }
862 }
863
864
865 lv.stackMaps = lv.stackMapSet.extractUnique()
866 lv.stackMapSet = bvecSet{}
867
868
869
870
871 for j, n := range lv.vars {
872 if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
873 lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
874 }
875 }
876 }
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894 func (lv *liveness) compact(b *ssa.Block) {
895 pos := 0
896 if b == lv.f.Entry {
897
898 lv.stackMapSet.add(lv.livevars[0])
899 pos++
900 }
901 for _, v := range b.Values {
902 hasStackMap := lv.hasStackMap(v)
903 isUnsafePoint := lv.allUnsafe || v.Op != ssa.OpClobber && lv.unsafePoints.Get(int32(v.ID))
904 idx := objw.LivenessIndex{StackMapIndex: objw.StackMapDontCare, IsUnsafePoint: isUnsafePoint}
905 if hasStackMap {
906 idx.StackMapIndex, _ = lv.stackMapSet.add(lv.livevars[pos])
907 pos++
908 }
909 if hasStackMap || isUnsafePoint {
910 lv.livenessMap.set(v, idx)
911 }
912 }
913
914
915 lv.livevars = lv.livevars[:0]
916 }
917
918 func (lv *liveness) enableClobber() {
919
920
921 if !base.Flag.ClobberDead {
922 return
923 }
924 if lv.fn.Pragma&ir.CgoUnsafeArgs != 0 {
925
926 return
927 }
928 if len(lv.vars) > 10000 || len(lv.f.Blocks) > 10000 {
929
930
931
932 return
933 }
934 if lv.f.Name == "forkAndExecInChild" {
935
936
937
938
939
940 return
941 }
942 if lv.f.Name == "wbBufFlush" ||
943 ((lv.f.Name == "callReflect" || lv.f.Name == "callMethod") && lv.fn.ABIWrapper()) {
944
945
946
947
948
949
950
951
952
953
954 lv.noClobberArgs = true
955 }
956 if h := os.Getenv("GOCLOBBERDEADHASH"); h != "" {
957
958
959 hstr := ""
960 for _, b := range sha1.Sum([]byte(lv.f.Name)) {
961 hstr += fmt.Sprintf("%08b", b)
962 }
963 if !strings.HasSuffix(hstr, h) {
964 return
965 }
966 fmt.Printf("\t\t\tCLOBBERDEAD %s\n", lv.f.Name)
967 }
968 lv.doClobber = true
969 }
970
971
972
973 func (lv *liveness) clobber(b *ssa.Block) {
974
975 oldSched := append([]*ssa.Value{}, b.Values...)
976 b.Values = b.Values[:0]
977 idx := 0
978
979
980 if b == lv.f.Entry {
981 for len(oldSched) > 0 && len(oldSched[0].Args) == 0 {
982
983
984
985
986 b.Values = append(b.Values, oldSched[0])
987 oldSched = oldSched[1:]
988 }
989 clobber(lv, b, lv.livevars[0])
990 idx++
991 }
992
993
994 for _, v := range oldSched {
995 if !lv.hasStackMap(v) {
996 b.Values = append(b.Values, v)
997 continue
998 }
999 clobber(lv, b, lv.livevars[idx])
1000 b.Values = append(b.Values, v)
1001 idx++
1002 }
1003 }
1004
1005
1006
1007
1008 func clobber(lv *liveness, b *ssa.Block, live bitvec.BitVec) {
1009 for i, n := range lv.vars {
1010 if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() {
1011
1012
1013
1014
1015 if lv.noClobberArgs && n.Class == ir.PPARAM {
1016 continue
1017 }
1018 clobberVar(b, n)
1019 }
1020 }
1021 }
1022
1023
1024
1025 func clobberVar(b *ssa.Block, v *ir.Name) {
1026 clobberWalk(b, v, 0, v.Type())
1027 }
1028
1029
1030
1031
1032
1033 func clobberWalk(b *ssa.Block, v *ir.Name, offset int64, t *types.Type) {
1034 if !t.HasPointers() {
1035 return
1036 }
1037 switch t.Kind() {
1038 case types.TPTR,
1039 types.TUNSAFEPTR,
1040 types.TFUNC,
1041 types.TCHAN,
1042 types.TMAP:
1043 clobberPtr(b, v, offset)
1044
1045 case types.TSTRING:
1046
1047 clobberPtr(b, v, offset)
1048
1049 case types.TINTER:
1050
1051
1052
1053 clobberPtr(b, v, offset)
1054 clobberPtr(b, v, offset+int64(types.PtrSize))
1055
1056 case types.TSLICE:
1057
1058 clobberPtr(b, v, offset)
1059
1060 case types.TARRAY:
1061 for i := int64(0); i < t.NumElem(); i++ {
1062 clobberWalk(b, v, offset+i*t.Elem().Size(), t.Elem())
1063 }
1064
1065 case types.TSTRUCT:
1066 for _, t1 := range t.Fields().Slice() {
1067 clobberWalk(b, v, offset+t1.Offset, t1.Type)
1068 }
1069
1070 default:
1071 base.Fatalf("clobberWalk: unexpected type, %v", t)
1072 }
1073 }
1074
1075
1076
1077 func clobberPtr(b *ssa.Block, v *ir.Name, offset int64) {
1078 b.NewValue0IA(src.NoXPos, ssa.OpClobber, types.TypeVoid, offset, v)
1079 }
1080
1081 func (lv *liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
1082 if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
1083 return
1084 }
1085 if lv.fn.Wrapper() || lv.fn.Dupok() {
1086
1087 return
1088 }
1089 if !(v == nil || v.Op.IsCall()) {
1090
1091
1092 return
1093 }
1094 if live.IsEmpty() {
1095 return
1096 }
1097
1098 pos := lv.fn.Nname.Pos()
1099 if v != nil {
1100 pos = v.Pos
1101 }
1102
1103 s := "live at "
1104 if v == nil {
1105 s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
1106 } else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
1107 fn := sym.Fn.Name
1108 if pos := strings.Index(fn, "."); pos >= 0 {
1109 fn = fn[pos+1:]
1110 }
1111 s += fmt.Sprintf("call to %s:", fn)
1112 } else {
1113 s += "indirect call:"
1114 }
1115
1116 for j, n := range lv.vars {
1117 if live.Get(int32(j)) {
1118 s += fmt.Sprintf(" %v", n)
1119 }
1120 }
1121
1122 base.WarnfAt(pos, s)
1123 }
1124
1125 func (lv *liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
1126 if live.IsEmpty() {
1127 return printed
1128 }
1129
1130 if !printed {
1131 fmt.Printf("\t")
1132 } else {
1133 fmt.Printf(" ")
1134 }
1135 fmt.Printf("%s=", name)
1136
1137 comma := ""
1138 for i, n := range lv.vars {
1139 if !live.Get(int32(i)) {
1140 continue
1141 }
1142 fmt.Printf("%s%s", comma, n.Sym().Name)
1143 comma = ","
1144 }
1145 return true
1146 }
1147
1148
1149 func (lv *liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
1150 if !x {
1151 return printed
1152 }
1153 if !printed {
1154 fmt.Printf("\t")
1155 } else {
1156 fmt.Printf(" ")
1157 }
1158 fmt.Printf("%s=", name)
1159 if x {
1160 fmt.Printf("%s", lv.vars[pos].Sym().Name)
1161 }
1162
1163 return true
1164 }
1165
1166
1167
1168
1169 func (lv *liveness) printDebug() {
1170 fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
1171
1172 for i, b := range lv.f.Blocks {
1173 if i > 0 {
1174 fmt.Printf("\n")
1175 }
1176
1177
1178 fmt.Printf("bb#%d pred=", b.ID)
1179 for j, pred := range b.Preds {
1180 if j > 0 {
1181 fmt.Printf(",")
1182 }
1183 fmt.Printf("%d", pred.Block().ID)
1184 }
1185 fmt.Printf(" succ=")
1186 for j, succ := range b.Succs {
1187 if j > 0 {
1188 fmt.Printf(",")
1189 }
1190 fmt.Printf("%d", succ.Block().ID)
1191 }
1192 fmt.Printf("\n")
1193
1194 be := lv.blockEffects(b)
1195
1196
1197 printed := false
1198 printed = lv.printbvec(printed, "uevar", be.uevar)
1199 printed = lv.printbvec(printed, "livein", be.livein)
1200 if printed {
1201 fmt.Printf("\n")
1202 }
1203
1204
1205
1206 if b == lv.f.Entry {
1207 live := lv.stackMaps[0]
1208 fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Nname.Pos()))
1209 fmt.Printf("\tlive=")
1210 printed = false
1211 for j, n := range lv.vars {
1212 if !live.Get(int32(j)) {
1213 continue
1214 }
1215 if printed {
1216 fmt.Printf(",")
1217 }
1218 fmt.Printf("%v", n)
1219 printed = true
1220 }
1221 fmt.Printf("\n")
1222 }
1223
1224 for _, v := range b.Values {
1225 fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
1226
1227 pcdata := lv.livenessMap.Get(v)
1228
1229 pos, effect := lv.valueEffects(v)
1230 printed = false
1231 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0)
1232 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0)
1233 if printed {
1234 fmt.Printf("\n")
1235 }
1236
1237 if pcdata.StackMapValid() {
1238 fmt.Printf("\tlive=")
1239 printed = false
1240 if pcdata.StackMapValid() {
1241 live := lv.stackMaps[pcdata.StackMapIndex]
1242 for j, n := range lv.vars {
1243 if !live.Get(int32(j)) {
1244 continue
1245 }
1246 if printed {
1247 fmt.Printf(",")
1248 }
1249 fmt.Printf("%v", n)
1250 printed = true
1251 }
1252 }
1253 fmt.Printf("\n")
1254 }
1255
1256 if pcdata.IsUnsafePoint {
1257 fmt.Printf("\tunsafe-point\n")
1258 }
1259 }
1260
1261
1262 fmt.Printf("end\n")
1263 printed = false
1264 printed = lv.printbvec(printed, "varkill", be.varkill)
1265 printed = lv.printbvec(printed, "liveout", be.liveout)
1266 if printed {
1267 fmt.Printf("\n")
1268 }
1269 }
1270
1271 fmt.Printf("\n")
1272 }
1273
1274
1275
1276
1277
1278 func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) {
1279
1280
1281
1282 var maxArgNode *ir.Name
1283 for _, n := range lv.vars {
1284 switch n.Class {
1285 case ir.PPARAM, ir.PPARAMOUT:
1286 if !n.IsOutputParamInRegisters() {
1287 if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
1288 maxArgNode = n
1289 }
1290 }
1291 }
1292 }
1293
1294 var maxArgs int64
1295 if maxArgNode != nil {
1296 maxArgs = maxArgNode.FrameOffset() + types.PtrDataSize(maxArgNode.Type())
1297 }
1298
1299
1300
1301
1302
1303
1304
1305
1306 maxLocals := lv.stkptrsize
1307
1308
1309 var argsSymTmp, liveSymTmp obj.LSym
1310
1311 args := bitvec.New(int32(maxArgs / int64(types.PtrSize)))
1312 aoff := objw.Uint32(&argsSymTmp, 0, uint32(len(lv.stackMaps)))
1313 aoff = objw.Uint32(&argsSymTmp, aoff, uint32(args.N))
1314
1315 locals := bitvec.New(int32(maxLocals / int64(types.PtrSize)))
1316 loff := objw.Uint32(&liveSymTmp, 0, uint32(len(lv.stackMaps)))
1317 loff = objw.Uint32(&liveSymTmp, loff, uint32(locals.N))
1318
1319 for _, live := range lv.stackMaps {
1320 args.Clear()
1321 locals.Clear()
1322
1323 lv.pointerMap(live, lv.vars, args, locals)
1324
1325 aoff = objw.BitVec(&argsSymTmp, aoff, args)
1326 loff = objw.BitVec(&liveSymTmp, loff, locals)
1327 }
1328
1329
1330
1331 return base.Ctxt.GCLocalsSym(argsSymTmp.P), base.Ctxt.GCLocalsSym(liveSymTmp.P)
1332 }
1333
1334
1335
1336
1337
1338
1339 func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) (Map, map[*ir.Name]bool) {
1340
1341 vars, idx := getvariables(curfn)
1342 lv := newliveness(curfn, f, vars, idx, stkptrsize)
1343
1344
1345 lv.prologue()
1346 lv.solve()
1347 lv.epilogue()
1348 if base.Flag.Live > 0 {
1349 lv.showlive(nil, lv.stackMaps[0])
1350 for _, b := range f.Blocks {
1351 for _, val := range b.Values {
1352 if idx := lv.livenessMap.Get(val); idx.StackMapValid() {
1353 lv.showlive(val, lv.stackMaps[idx.StackMapIndex])
1354 }
1355 }
1356 }
1357 }
1358 if base.Flag.Live >= 2 {
1359 lv.printDebug()
1360 }
1361
1362
1363 {
1364 cache := f.Cache.Liveness.(*livenessFuncCache)
1365 if cap(lv.be) < 2000 {
1366 for i := range lv.be {
1367 lv.be[i] = blockEffects{}
1368 }
1369 cache.be = lv.be
1370 }
1371 if len(lv.livenessMap.Vals) < 2000 {
1372 cache.livenessMap = lv.livenessMap
1373 }
1374 }
1375
1376
1377 ls := curfn.LSym
1378 fninfo := ls.Func()
1379 fninfo.GCArgs, fninfo.GCLocals = lv.emit()
1380
1381 p := pp.Prog(obj.AFUNCDATA)
1382 p.From.SetConst(objabi.FUNCDATA_ArgsPointerMaps)
1383 p.To.Type = obj.TYPE_MEM
1384 p.To.Name = obj.NAME_EXTERN
1385 p.To.Sym = fninfo.GCArgs
1386
1387 p = pp.Prog(obj.AFUNCDATA)
1388 p.From.SetConst(objabi.FUNCDATA_LocalsPointerMaps)
1389 p.To.Type = obj.TYPE_MEM
1390 p.To.Name = obj.NAME_EXTERN
1391 p.To.Sym = fninfo.GCLocals
1392
1393 if x := lv.emitStackObjects(); x != nil {
1394 p := pp.Prog(obj.AFUNCDATA)
1395 p.From.SetConst(objabi.FUNCDATA_StackObjects)
1396 p.To.Type = obj.TYPE_MEM
1397 p.To.Name = obj.NAME_EXTERN
1398 p.To.Sym = x
1399 }
1400
1401 return lv.livenessMap, lv.partLiveArgs
1402 }
1403
1404 func (lv *liveness) emitStackObjects() *obj.LSym {
1405 var vars []*ir.Name
1406 for _, n := range lv.fn.Dcl {
1407 if shouldTrack(n) && n.Addrtaken() && n.Esc() != ir.EscHeap {
1408 vars = append(vars, n)
1409 }
1410 }
1411 if len(vars) == 0 {
1412 return nil
1413 }
1414
1415
1416 sort.Slice(vars, func(i, j int) bool { return vars[i].FrameOffset() < vars[j].FrameOffset() })
1417
1418
1419
1420 x := base.Ctxt.Lookup(lv.fn.LSym.Name + ".stkobj")
1421 x.Set(obj.AttrContentAddressable, true)
1422 lv.fn.LSym.Func().StackObjects = x
1423 off := 0
1424 off = objw.Uintptr(x, off, uint64(len(vars)))
1425 for _, v := range vars {
1426
1427
1428
1429
1430
1431 frameOffset := v.FrameOffset()
1432 if frameOffset != int64(int32(frameOffset)) {
1433 base.Fatalf("frame offset too big: %v %d", v, frameOffset)
1434 }
1435 off = objw.Uint32(x, off, uint32(frameOffset))
1436
1437 t := v.Type()
1438 sz := t.Size()
1439 if sz != int64(int32(sz)) {
1440 base.Fatalf("stack object too big: %v of type %v, size %d", v, t, sz)
1441 }
1442 lsym, useGCProg, ptrdata := reflectdata.GCSym(t)
1443 if useGCProg {
1444 ptrdata = -ptrdata
1445 }
1446 off = objw.Uint32(x, off, uint32(sz))
1447 off = objw.Uint32(x, off, uint32(ptrdata))
1448 off = objw.SymPtrOff(x, off, lsym)
1449 }
1450
1451 if base.Flag.Live != 0 {
1452 for _, v := range vars {
1453 base.WarnfAt(v.Pos(), "stack object %v %v", v, v.Type())
1454 }
1455 }
1456
1457 return x
1458 }
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473 func isfat(t *types.Type) bool {
1474 if t != nil {
1475 switch t.Kind() {
1476 case types.TSLICE, types.TSTRING,
1477 types.TINTER:
1478 return true
1479 case types.TARRAY:
1480
1481 if t.NumElem() == 1 {
1482 return isfat(t.Elem())
1483 }
1484 return true
1485 case types.TSTRUCT:
1486
1487 if t.NumFields() == 1 {
1488 return isfat(t.Field(0).Type)
1489 }
1490 return true
1491 }
1492 }
1493
1494 return false
1495 }
1496
1497
1498
1499
1500 func WriteFuncMap(fn *ir.Func, abiInfo *abi.ABIParamResultInfo) {
1501 if ir.FuncName(fn) == "_" || fn.Sym().Linkname != "" {
1502 return
1503 }
1504 nptr := int(abiInfo.ArgWidth() / int64(types.PtrSize))
1505 bv := bitvec.New(int32(nptr) * 2)
1506
1507 for _, p := range abiInfo.InParams() {
1508 typebits.Set(p.Type, p.FrameOffset(abiInfo), bv)
1509 }
1510
1511 nbitmap := 1
1512 if fn.Type().NumResults() > 0 {
1513 nbitmap = 2
1514 }
1515 lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
1516 off := objw.Uint32(lsym, 0, uint32(nbitmap))
1517 off = objw.Uint32(lsym, off, uint32(bv.N))
1518 off = objw.BitVec(lsym, off, bv)
1519
1520 if fn.Type().NumResults() > 0 {
1521 for _, p := range abiInfo.OutParams() {
1522 if len(p.Registers) == 0 {
1523 typebits.Set(p.Type, p.FrameOffset(abiInfo), bv)
1524 }
1525 }
1526 off = objw.BitVec(lsym, off, bv)
1527 }
1528
1529 objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
1530 }
1531
View as plain text