1
2
3
4
5 package escape
6
7 import (
8 "fmt"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/typecheck"
14 "cmd/compile/internal/types"
15 )
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87 type batch struct {
88 allLocs []*location
89 closures []closure
90
91 heapLoc location
92 blankLoc location
93 }
94
95
96
97 type closure struct {
98 k hole
99 clo *ir.ClosureExpr
100 }
101
102
103
104 type escape struct {
105 *batch
106
107 curfn *ir.Func
108
109 labels map[*types.Sym]labelState
110
111
112
113
114
115 loopDepth int
116 }
117
118 func Funcs(all []ir.Node) {
119 ir.VisitFuncsBottomUp(all, Batch)
120 }
121
122
123
124 func Batch(fns []*ir.Func, recursive bool) {
125 for _, fn := range fns {
126 if fn.Op() != ir.ODCLFUNC {
127 base.Fatalf("unexpected node: %v", fn)
128 }
129 }
130
131 var b batch
132 b.heapLoc.escapes = true
133
134
135 for _, fn := range fns {
136 if base.Flag.W > 1 {
137 s := fmt.Sprintf("\nbefore escape %v", fn)
138 ir.Dump(s, fn)
139 }
140 b.initFunc(fn)
141 }
142 for _, fn := range fns {
143 if !fn.IsHiddenClosure() {
144 b.walkFunc(fn)
145 }
146 }
147
148
149
150
151
152 for _, closure := range b.closures {
153 b.flowClosure(closure.k, closure.clo)
154 }
155 b.closures = nil
156
157 for _, loc := range b.allLocs {
158 if why := HeapAllocReason(loc.n); why != "" {
159 b.flow(b.heapHole().addr(loc.n, why), loc)
160 }
161 }
162
163 b.walkAll()
164 b.finish(fns)
165 }
166
167 func (b *batch) with(fn *ir.Func) *escape {
168 return &escape{
169 batch: b,
170 curfn: fn,
171 loopDepth: 1,
172 }
173 }
174
175 func (b *batch) initFunc(fn *ir.Func) {
176 e := b.with(fn)
177 if fn.Esc() != escFuncUnknown {
178 base.Fatalf("unexpected node: %v", fn)
179 }
180 fn.SetEsc(escFuncPlanned)
181 if base.Flag.LowerM > 3 {
182 ir.Dump("escAnalyze", fn)
183 }
184
185
186 for _, n := range fn.Dcl {
187 e.newLoc(n, false)
188 }
189
190
191
192 if fn.OClosure == nil {
193 for _, n := range fn.ClosureVars {
194 e.newLoc(n.Canonical(), false)
195 }
196 }
197
198
199 for i, f := range fn.Type().Results().FieldSlice() {
200 e.oldLoc(f.Nname.(*ir.Name)).resultIndex = 1 + i
201 }
202 }
203
204 func (b *batch) walkFunc(fn *ir.Func) {
205 e := b.with(fn)
206 fn.SetEsc(escFuncStarted)
207
208
209 ir.Visit(fn, func(n ir.Node) {
210 switch n.Op() {
211 case ir.OLABEL:
212 n := n.(*ir.LabelStmt)
213 if e.labels == nil {
214 e.labels = make(map[*types.Sym]labelState)
215 }
216 e.labels[n.Label] = nonlooping
217
218 case ir.OGOTO:
219
220
221 n := n.(*ir.BranchStmt)
222 if e.labels[n.Label] == nonlooping {
223 e.labels[n.Label] = looping
224 }
225 }
226 })
227
228 e.block(fn.Body)
229
230 if len(e.labels) != 0 {
231 base.FatalfAt(fn.Pos(), "leftover labels after walkFunc")
232 }
233 }
234
235 func (b *batch) flowClosure(k hole, clo *ir.ClosureExpr) {
236 for _, cv := range clo.Func.ClosureVars {
237 n := cv.Canonical()
238 loc := b.oldLoc(cv)
239 if !loc.captured {
240 base.FatalfAt(cv.Pos(), "closure variable never captured: %v", cv)
241 }
242
243
244 n.SetByval(!loc.addrtaken && !loc.reassigned && n.Type().Size() <= 128)
245 if !n.Byval() {
246 n.SetAddrtaken(true)
247 if n.Sym().Name == typecheck.LocalDictName {
248 base.FatalfAt(n.Pos(), "dictionary variable not captured by value")
249 }
250 }
251
252 if base.Flag.LowerM > 1 {
253 how := "ref"
254 if n.Byval() {
255 how = "value"
256 }
257 base.WarnfAt(n.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", n.Curfn, how, n, loc.addrtaken, loc.reassigned, n.Type().Size())
258 }
259
260
261 k := k
262 if !cv.Byval() {
263 k = k.addr(cv, "reference")
264 }
265 b.flow(k.note(cv, "captured by a closure"), loc)
266 }
267 }
268
269 func (b *batch) finish(fns []*ir.Func) {
270
271 for _, fn := range fns {
272 fn.SetEsc(escFuncTagged)
273
274 narg := 0
275 for _, fs := range &types.RecvsParams {
276 for _, f := range fs(fn.Type()).Fields().Slice() {
277 narg++
278 f.Note = b.paramTag(fn, narg, f)
279 }
280 }
281 }
282
283 for _, loc := range b.allLocs {
284 n := loc.n
285 if n == nil {
286 continue
287 }
288 if n.Op() == ir.ONAME {
289 n := n.(*ir.Name)
290 n.Opt = nil
291 }
292
293
294
295
296
297
298 goDeferWrapper := n.Op() == ir.OCLOSURE && n.(*ir.ClosureExpr).Func.Wrapper()
299
300 if n.Op() == ir.OCONVIDATA && n.(*ir.ConvExpr).NonEscaping {
301
302
303
304
305 loc.escapes = false
306 }
307
308 if loc.escapes {
309 if n.Op() == ir.ONAME {
310 if base.Flag.CompilingRuntime {
311 base.ErrorfAt(n.Pos(), "%v escapes to heap, not allowed in runtime", n)
312 }
313 if base.Flag.LowerM != 0 {
314 base.WarnfAt(n.Pos(), "moved to heap: %v", n)
315 }
316 } else {
317 if base.Flag.LowerM != 0 && !goDeferWrapper {
318 base.WarnfAt(n.Pos(), "%v escapes to heap", n)
319 }
320 if logopt.Enabled() {
321 var e_curfn *ir.Func
322 logopt.LogOpt(n.Pos(), "escape", "escape", ir.FuncName(e_curfn))
323 }
324 }
325 n.SetEsc(ir.EscHeap)
326 } else {
327 if base.Flag.LowerM != 0 && n.Op() != ir.ONAME && !goDeferWrapper {
328 base.WarnfAt(n.Pos(), "%v does not escape", n)
329 }
330 n.SetEsc(ir.EscNone)
331 if loc.transient {
332 switch n.Op() {
333 case ir.OCLOSURE:
334 n := n.(*ir.ClosureExpr)
335 n.SetTransient(true)
336 case ir.OMETHVALUE:
337 n := n.(*ir.SelectorExpr)
338 n.SetTransient(true)
339 case ir.OSLICELIT:
340 n := n.(*ir.CompLitExpr)
341 n.SetTransient(true)
342 }
343 }
344 }
345 }
346 }
347
348
349
350
351
352
353 func (e *escape) inMutualBatch(fn *ir.Name) bool {
354 if fn.Defn != nil && fn.Defn.Esc() < escFuncTagged {
355 if fn.Defn.Esc() == escFuncUnknown {
356 base.Fatalf("graph inconsistency: %v", fn)
357 }
358 return true
359 }
360 return false
361 }
362
363 const (
364 escFuncUnknown = 0 + iota
365 escFuncPlanned
366 escFuncStarted
367 escFuncTagged
368 )
369
370
371 type labelState int
372
373 const (
374 looping labelState = 1 + iota
375 nonlooping
376 )
377
378 func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
379 name := func() string {
380 if f.Sym != nil {
381 return f.Sym.Name
382 }
383 return fmt.Sprintf("arg#%d", narg)
384 }
385
386
387
388
389 diagnose := base.Flag.LowerM != 0 && !(fn.Wrapper() || fn.Dupok())
390
391 if len(fn.Body) == 0 {
392
393
394
395
396
397
398 fn.Pragma |= ir.UintptrKeepAlive
399
400 if f.Type.IsUintptr() {
401 if diagnose {
402 base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name())
403 }
404 return ""
405 }
406
407 if !f.Type.HasPointers() {
408 return ""
409 }
410
411 var esc leaks
412
413
414
415 if fn.Pragma&ir.Noescape != 0 {
416 if diagnose && f.Sym != nil {
417 base.WarnfAt(f.Pos, "%v does not escape", name())
418 }
419 } else {
420 if diagnose && f.Sym != nil {
421 base.WarnfAt(f.Pos, "leaking param: %v", name())
422 }
423 esc.AddHeap(0)
424 }
425
426 return esc.Encode()
427 }
428
429 if fn.Pragma&ir.UintptrEscapes != 0 {
430 fn.Pragma |= ir.UintptrKeepAlive
431
432 if f.Type.IsUintptr() {
433 if diagnose {
434 base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
435 }
436 return ""
437 }
438 if f.IsDDD() && f.Type.Elem().IsUintptr() {
439
440 if diagnose {
441 base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name())
442 }
443 return ""
444 }
445 }
446
447 if !f.Type.HasPointers() {
448 return ""
449 }
450
451
452 if f.Sym == nil || f.Sym.IsBlank() {
453 var esc leaks
454 return esc.Encode()
455 }
456
457 n := f.Nname.(*ir.Name)
458 loc := b.oldLoc(n)
459 esc := loc.paramEsc
460 esc.Optimize()
461
462 if diagnose && !loc.escapes {
463 if esc.Empty() {
464 base.WarnfAt(f.Pos, "%v does not escape", name())
465 }
466 if x := esc.Heap(); x >= 0 {
467 if x == 0 {
468 base.WarnfAt(f.Pos, "leaking param: %v", name())
469 } else {
470
471 base.WarnfAt(f.Pos, "leaking param content: %v", name())
472 }
473 }
474 for i := 0; i < numEscResults; i++ {
475 if x := esc.Result(i); x >= 0 {
476 res := fn.Type().Results().Field(i).Sym
477 base.WarnfAt(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
478 }
479 }
480 }
481
482 return esc.Encode()
483 }
484
View as plain text