1
2
3
4
5 package ssagen
6
7 import (
8 "internal/buildcfg"
9 "internal/race"
10 "math/rand"
11 "sort"
12 "sync"
13 "time"
14
15 "cmd/compile/internal/base"
16 "cmd/compile/internal/ir"
17 "cmd/compile/internal/objw"
18 "cmd/compile/internal/ssa"
19 "cmd/compile/internal/types"
20 "cmd/internal/obj"
21 "cmd/internal/objabi"
22 "cmd/internal/src"
23 )
24
25
26
27
28
29
30
31
32
33
34
35 func cmpstackvarlt(a, b *ir.Name) bool {
36 if needAlloc(a) != needAlloc(b) {
37 return needAlloc(b)
38 }
39
40 if !needAlloc(a) {
41 return a.FrameOffset() < b.FrameOffset()
42 }
43
44 if a.Used() != b.Used() {
45 return a.Used()
46 }
47
48 ap := a.Type().HasPointers()
49 bp := b.Type().HasPointers()
50 if ap != bp {
51 return ap
52 }
53
54 ap = a.Needzero()
55 bp = b.Needzero()
56 if ap != bp {
57 return ap
58 }
59
60 if a.Type().Size() != b.Type().Size() {
61 return a.Type().Size() > b.Type().Size()
62 }
63
64 return a.Sym().Name < b.Sym().Name
65 }
66
67
68 type byStackVar []*ir.Name
69
70 func (s byStackVar) Len() int { return len(s) }
71 func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) }
72 func (s byStackVar) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
73
74
75
76
77 func needAlloc(n *ir.Name) bool {
78 if n.Op() != ir.ONAME {
79 base.FatalfAt(n.Pos(), "%v has unexpected Op %v", n, n.Op())
80 }
81
82 switch n.Class {
83 case ir.PAUTO:
84 return true
85 case ir.PPARAM:
86 return false
87 case ir.PPARAMOUT:
88 return n.IsOutputParamInRegisters()
89
90 default:
91 base.FatalfAt(n.Pos(), "%v has unexpected Class %v", n, n.Class)
92 return false
93 }
94 }
95
96 func (s *ssafn) AllocFrame(f *ssa.Func) {
97 s.stksize = 0
98 s.stkptrsize = 0
99 fn := s.curfn
100
101
102 for _, ln := range fn.Dcl {
103 if needAlloc(ln) {
104 ln.SetUsed(false)
105 }
106 }
107
108 for _, l := range f.RegAlloc {
109 if ls, ok := l.(ssa.LocalSlot); ok {
110 ls.N.SetUsed(true)
111 }
112 }
113
114 for _, b := range f.Blocks {
115 for _, v := range b.Values {
116 if n, ok := v.Aux.(*ir.Name); ok {
117 switch n.Class {
118 case ir.PPARAMOUT:
119 if n.IsOutputParamInRegisters() && v.Op == ssa.OpVarDef {
120
121
122 continue
123 }
124 fallthrough
125 case ir.PPARAM, ir.PAUTO:
126 n.SetUsed(true)
127 }
128 }
129 }
130 }
131
132
133
134
135 sort.Stable(byStackVar(fn.Dcl))
136
137
138 lastHasPtr := false
139 for i, n := range fn.Dcl {
140 if n.Op() != ir.ONAME || n.Class != ir.PAUTO && !(n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters()) {
141
142 continue
143 }
144 if !n.Used() {
145 fn.Dcl = fn.Dcl[:i]
146 break
147 }
148
149 types.CalcSize(n.Type())
150 w := n.Type().Size()
151 if w >= types.MaxWidth || w < 0 {
152 base.Fatalf("bad width")
153 }
154 if w == 0 && lastHasPtr {
155
156
157
158
159 w = 1
160 }
161 s.stksize += w
162 s.stksize = types.Rnd(s.stksize, n.Type().Alignment())
163 if n.Type().HasPointers() {
164 s.stkptrsize = s.stksize
165 lastHasPtr = true
166 } else {
167 lastHasPtr = false
168 }
169 n.SetFrameOffset(-s.stksize)
170 }
171
172 s.stksize = types.Rnd(s.stksize, int64(types.RegSize))
173 s.stkptrsize = types.Rnd(s.stkptrsize, int64(types.RegSize))
174 }
175
176 const maxStackSize = 1 << 30
177
178
179
180
181
182 func Compile(fn *ir.Func, worker int) {
183 f := buildssa(fn, worker)
184
185 if f.Frontend().(*ssafn).stksize >= maxStackSize || f.OwnAux.ArgWidth() >= maxStackSize {
186 largeStackFramesMu.Lock()
187 largeStackFrames = append(largeStackFrames, largeStack{locals: f.Frontend().(*ssafn).stksize, args: f.OwnAux.ArgWidth(), pos: fn.Pos()})
188 largeStackFramesMu.Unlock()
189 return
190 }
191 pp := objw.NewProgs(fn, worker)
192 defer pp.Free()
193 genssa(f, pp)
194
195
196
197
198
199
200 if pp.Text.To.Offset >= maxStackSize {
201 largeStackFramesMu.Lock()
202 locals := f.Frontend().(*ssafn).stksize
203 largeStackFrames = append(largeStackFrames, largeStack{locals: locals, args: f.OwnAux.ArgWidth(), callee: pp.Text.To.Offset - locals, pos: fn.Pos()})
204 largeStackFramesMu.Unlock()
205 return
206 }
207
208 pp.Flush()
209
210 fieldtrack(pp.Text.From.Sym, fn.FieldTrack)
211 }
212
213 func init() {
214 if race.Enabled {
215 rand.Seed(time.Now().UnixNano())
216 }
217 }
218
219
220
221
222 func StackOffset(slot ssa.LocalSlot) int32 {
223 n := slot.N
224 var off int64
225 switch n.Class {
226 case ir.PPARAM, ir.PPARAMOUT:
227 if !n.IsOutputParamInRegisters() {
228 off = n.FrameOffset() + base.Ctxt.FixedFrameSize()
229 break
230 }
231 fallthrough
232 case ir.PAUTO:
233 off = n.FrameOffset()
234 if base.Ctxt.FixedFrameSize() == 0 {
235 off -= int64(types.PtrSize)
236 }
237 if buildcfg.FramePointerEnabled {
238 off -= int64(types.PtrSize)
239 }
240 }
241 return int32(off + slot.Off)
242 }
243
244
245
246 func fieldtrack(fnsym *obj.LSym, tracked map[*obj.LSym]struct{}) {
247 if fnsym == nil {
248 return
249 }
250 if !buildcfg.Experiment.FieldTrack || len(tracked) == 0 {
251 return
252 }
253
254 trackSyms := make([]*obj.LSym, 0, len(tracked))
255 for sym := range tracked {
256 trackSyms = append(trackSyms, sym)
257 }
258 sort.Slice(trackSyms, func(i, j int) bool { return trackSyms[i].Name < trackSyms[j].Name })
259 for _, sym := range trackSyms {
260 r := obj.Addrel(fnsym)
261 r.Sym = sym
262 r.Type = objabi.R_USEFIELD
263 }
264 }
265
266
267 type largeStack struct {
268 locals int64
269 args int64
270 callee int64
271 pos src.XPos
272 }
273
274 var (
275 largeStackFramesMu sync.Mutex
276 largeStackFrames []largeStack
277 )
278
279 func CheckLargeStacks() {
280
281 sort.Slice(largeStackFrames, func(i, j int) bool {
282 return largeStackFrames[i].pos.Before(largeStackFrames[j].pos)
283 })
284 for _, large := range largeStackFrames {
285 if large.callee != 0 {
286 base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args + %d MB callee", large.locals>>20, large.args>>20, large.callee>>20)
287 } else {
288 base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args", large.locals>>20, large.args>>20)
289 }
290 }
291 }
292
View as plain text