1
2
3
4
5 package wasm
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/objw"
12 "cmd/compile/internal/ssa"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/wasm"
17 "internal/buildcfg"
18 )
19
20 func Init(arch *ssagen.ArchInfo) {
21 arch.LinkArch = &wasm.Linkwasm
22 arch.REGSP = wasm.REG_SP
23 arch.MAXWIDTH = 1 << 50
24
25 arch.ZeroRange = zeroRange
26 arch.Ginsnop = ginsnop
27
28 arch.SSAMarkMoves = ssaMarkMoves
29 arch.SSAGenValue = ssaGenValue
30 arch.SSAGenBlock = ssaGenBlock
31 }
32
33 func zeroRange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
34 if cnt == 0 {
35 return p
36 }
37 if cnt%8 != 0 {
38 base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
39 }
40
41 for i := int64(0); i < cnt; i += 8 {
42 p = pp.Append(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
43 p = pp.Append(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
44 p = pp.Append(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
45 }
46
47 return p
48 }
49
50 func ginsnop(pp *objw.Progs) *obj.Prog {
51 return pp.Prog(wasm.ANop)
52 }
53
54 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {
55 }
56
57 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
58 switch b.Kind {
59 case ssa.BlockPlain:
60 if next != b.Succs[0].Block() {
61 s.Br(obj.AJMP, b.Succs[0].Block())
62 }
63
64 case ssa.BlockIf:
65 switch next {
66 case b.Succs[0].Block():
67
68 getValue32(s, b.Controls[0])
69 s.Prog(wasm.AI32Eqz)
70 s.Prog(wasm.AIf)
71 s.Br(obj.AJMP, b.Succs[1].Block())
72 s.Prog(wasm.AEnd)
73 case b.Succs[1].Block():
74
75 getValue32(s, b.Controls[0])
76 s.Prog(wasm.AIf)
77 s.Br(obj.AJMP, b.Succs[0].Block())
78 s.Prog(wasm.AEnd)
79 default:
80
81 getValue32(s, b.Controls[0])
82 s.Prog(wasm.AIf)
83 s.Br(obj.AJMP, b.Succs[0].Block())
84 s.Prog(wasm.AEnd)
85 s.Br(obj.AJMP, b.Succs[1].Block())
86 }
87
88 case ssa.BlockRet:
89 s.Prog(obj.ARET)
90
91 case ssa.BlockExit, ssa.BlockRetJmp:
92
93 case ssa.BlockDefer:
94 p := s.Prog(wasm.AGet)
95 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
96 s.Prog(wasm.AI64Eqz)
97 s.Prog(wasm.AI32Eqz)
98 s.Prog(wasm.AIf)
99 s.Br(obj.AJMP, b.Succs[1].Block())
100 s.Prog(wasm.AEnd)
101 if next != b.Succs[0].Block() {
102 s.Br(obj.AJMP, b.Succs[0].Block())
103 }
104
105 default:
106 panic("unexpected block")
107 }
108
109
110 s.Prog(wasm.ARESUMEPOINT)
111
112 if s.OnWasmStackSkipped != 0 {
113 panic("wasm: bad stack")
114 }
115 }
116
117 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
118 switch v.Op {
119 case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall, ssa.OpWasmLoweredTailCall:
120 s.PrepareCall(v)
121 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
122
123
124
125
126
127 s.Prog(wasm.ARESUMEPOINT)
128 }
129 if v.Op == ssa.OpWasmLoweredClosureCall {
130 getValue64(s, v.Args[1])
131 setReg(s, wasm.REG_CTXT)
132 }
133 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn != nil {
134 sym := call.Fn
135 p := s.Prog(obj.ACALL)
136 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
137 p.Pos = v.Pos
138 if v.Op == ssa.OpWasmLoweredTailCall {
139 p.As = obj.ARET
140 }
141 } else {
142 getValue64(s, v.Args[0])
143 p := s.Prog(obj.ACALL)
144 p.To = obj.Addr{Type: obj.TYPE_NONE}
145 p.Pos = v.Pos
146 }
147
148 case ssa.OpWasmLoweredMove:
149 getValue32(s, v.Args[0])
150 getValue32(s, v.Args[1])
151 i32Const(s, int32(v.AuxInt))
152 p := s.Prog(wasm.ACall)
153 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmMove}
154
155 case ssa.OpWasmLoweredZero:
156 getValue32(s, v.Args[0])
157 i32Const(s, int32(v.AuxInt))
158 p := s.Prog(wasm.ACall)
159 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmZero}
160
161 case ssa.OpWasmLoweredNilCheck:
162 getValue64(s, v.Args[0])
163 s.Prog(wasm.AI64Eqz)
164 s.Prog(wasm.AIf)
165 p := s.Prog(wasm.ACALLNORESUME)
166 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.SigPanic}
167 s.Prog(wasm.AEnd)
168 if logopt.Enabled() {
169 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
170 }
171 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
172 base.WarnfAt(v.Pos, "generated nil check")
173 }
174
175 case ssa.OpWasmLoweredWB:
176 getValue64(s, v.Args[0])
177 getValue64(s, v.Args[1])
178 p := s.Prog(wasm.ACALLNORESUME)
179 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
180
181 case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
182 getValue32(s, v.Args[0])
183 getValue64(s, v.Args[1])
184 p := s.Prog(v.Op.Asm())
185 p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
186
187 case ssa.OpStoreReg:
188 getReg(s, wasm.REG_SP)
189 getValue64(s, v.Args[0])
190 p := s.Prog(storeOp(v.Type))
191 ssagen.AddrAuto(&p.To, v)
192
193 case ssa.OpClobber, ssa.OpClobberReg:
194
195
196 default:
197 if v.Type.IsMemory() {
198 return
199 }
200 if v.OnWasmStack {
201 s.OnWasmStackSkipped++
202
203
204 return
205 }
206 ssaGenValueOnStack(s, v, true)
207 if s.OnWasmStackSkipped != 0 {
208 panic("wasm: bad stack")
209 }
210 setReg(s, v.Reg())
211 }
212 }
213
214 func ssaGenValueOnStack(s *ssagen.State, v *ssa.Value, extend bool) {
215 switch v.Op {
216 case ssa.OpWasmLoweredGetClosurePtr:
217 getReg(s, wasm.REG_CTXT)
218
219 case ssa.OpWasmLoweredGetCallerPC:
220 p := s.Prog(wasm.AI64Load)
221
222 p.From = obj.Addr{
223 Type: obj.TYPE_MEM,
224 Name: obj.NAME_PARAM,
225 Offset: -8,
226 }
227
228 case ssa.OpWasmLoweredGetCallerSP:
229 p := s.Prog(wasm.AGet)
230
231 p.From = obj.Addr{
232 Type: obj.TYPE_ADDR,
233 Name: obj.NAME_PARAM,
234 Reg: wasm.REG_SP,
235 Offset: 0,
236 }
237
238 case ssa.OpWasmLoweredAddr:
239 if v.Aux == nil {
240 getValue64(s, v.Args[0])
241 i64Const(s, v.AuxInt)
242 s.Prog(wasm.AI64Add)
243 break
244 }
245 p := s.Prog(wasm.AGet)
246 p.From.Type = obj.TYPE_ADDR
247 switch v.Aux.(type) {
248 case *obj.LSym:
249 ssagen.AddAux(&p.From, v)
250 case *ir.Name:
251 p.From.Reg = v.Args[0].Reg()
252 ssagen.AddAux(&p.From, v)
253 default:
254 panic("wasm: bad LoweredAddr")
255 }
256
257 case ssa.OpWasmLoweredConvert:
258 getValue64(s, v.Args[0])
259
260 case ssa.OpWasmSelect:
261 getValue64(s, v.Args[0])
262 getValue64(s, v.Args[1])
263 getValue32(s, v.Args[2])
264 s.Prog(v.Op.Asm())
265
266 case ssa.OpWasmI64AddConst:
267 getValue64(s, v.Args[0])
268 i64Const(s, v.AuxInt)
269 s.Prog(v.Op.Asm())
270
271 case ssa.OpWasmI64Const:
272 i64Const(s, v.AuxInt)
273
274 case ssa.OpWasmF32Const:
275 f32Const(s, v.AuxFloat())
276
277 case ssa.OpWasmF64Const:
278 f64Const(s, v.AuxFloat())
279
280 case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
281 getValue32(s, v.Args[0])
282 p := s.Prog(v.Op.Asm())
283 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
284
285 case ssa.OpWasmI64Eqz:
286 getValue64(s, v.Args[0])
287 s.Prog(v.Op.Asm())
288 if extend {
289 s.Prog(wasm.AI64ExtendI32U)
290 }
291
292 case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
293 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
294 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
295 getValue64(s, v.Args[0])
296 getValue64(s, v.Args[1])
297 s.Prog(v.Op.Asm())
298 if extend {
299 s.Prog(wasm.AI64ExtendI32U)
300 }
301
302 case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmI64Rotl,
303 ssa.OpWasmF32Add, ssa.OpWasmF32Sub, ssa.OpWasmF32Mul, ssa.OpWasmF32Div, ssa.OpWasmF32Copysign,
304 ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div, ssa.OpWasmF64Copysign:
305 getValue64(s, v.Args[0])
306 getValue64(s, v.Args[1])
307 s.Prog(v.Op.Asm())
308
309 case ssa.OpWasmI32Rotl:
310 getValue32(s, v.Args[0])
311 getValue32(s, v.Args[1])
312 s.Prog(wasm.AI32Rotl)
313 s.Prog(wasm.AI64ExtendI32U)
314
315 case ssa.OpWasmI64DivS:
316 getValue64(s, v.Args[0])
317 getValue64(s, v.Args[1])
318 if v.Type.Size() == 8 {
319
320 p := s.Prog(wasm.ACall)
321 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmDiv}
322 break
323 }
324 s.Prog(wasm.AI64DivS)
325
326 case ssa.OpWasmI64TruncSatF32S, ssa.OpWasmI64TruncSatF64S:
327 getValue64(s, v.Args[0])
328 if buildcfg.GOWASM.SatConv {
329 s.Prog(v.Op.Asm())
330 } else {
331 if v.Op == ssa.OpWasmI64TruncSatF32S {
332 s.Prog(wasm.AF64PromoteF32)
333 }
334 p := s.Prog(wasm.ACall)
335 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncS}
336 }
337
338 case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
339 getValue64(s, v.Args[0])
340 if buildcfg.GOWASM.SatConv {
341 s.Prog(v.Op.Asm())
342 } else {
343 if v.Op == ssa.OpWasmI64TruncSatF32U {
344 s.Prog(wasm.AF64PromoteF32)
345 }
346 p := s.Prog(wasm.ACall)
347 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncU}
348 }
349
350 case ssa.OpWasmF32DemoteF64:
351 getValue64(s, v.Args[0])
352 s.Prog(v.Op.Asm())
353
354 case ssa.OpWasmF64PromoteF32:
355 getValue64(s, v.Args[0])
356 s.Prog(v.Op.Asm())
357
358 case ssa.OpWasmF32ConvertI64S, ssa.OpWasmF32ConvertI64U,
359 ssa.OpWasmF64ConvertI64S, ssa.OpWasmF64ConvertI64U,
360 ssa.OpWasmI64Extend8S, ssa.OpWasmI64Extend16S, ssa.OpWasmI64Extend32S,
361 ssa.OpWasmF32Neg, ssa.OpWasmF32Sqrt, ssa.OpWasmF32Trunc, ssa.OpWasmF32Ceil, ssa.OpWasmF32Floor, ssa.OpWasmF32Nearest, ssa.OpWasmF32Abs,
362 ssa.OpWasmF64Neg, ssa.OpWasmF64Sqrt, ssa.OpWasmF64Trunc, ssa.OpWasmF64Ceil, ssa.OpWasmF64Floor, ssa.OpWasmF64Nearest, ssa.OpWasmF64Abs,
363 ssa.OpWasmI64Ctz, ssa.OpWasmI64Clz, ssa.OpWasmI64Popcnt:
364 getValue64(s, v.Args[0])
365 s.Prog(v.Op.Asm())
366
367 case ssa.OpLoadReg:
368 p := s.Prog(loadOp(v.Type))
369 ssagen.AddrAuto(&p.From, v.Args[0])
370
371 case ssa.OpCopy:
372 getValue64(s, v.Args[0])
373
374 default:
375 v.Fatalf("unexpected op: %s", v.Op)
376
377 }
378 }
379
380 func isCmp(v *ssa.Value) bool {
381 switch v.Op {
382 case ssa.OpWasmI64Eqz, ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
383 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
384 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
385 return true
386 default:
387 return false
388 }
389 }
390
391 func getValue32(s *ssagen.State, v *ssa.Value) {
392 if v.OnWasmStack {
393 s.OnWasmStackSkipped--
394 ssaGenValueOnStack(s, v, false)
395 if !isCmp(v) {
396 s.Prog(wasm.AI32WrapI64)
397 }
398 return
399 }
400
401 reg := v.Reg()
402 getReg(s, reg)
403 if reg != wasm.REG_SP {
404 s.Prog(wasm.AI32WrapI64)
405 }
406 }
407
408 func getValue64(s *ssagen.State, v *ssa.Value) {
409 if v.OnWasmStack {
410 s.OnWasmStackSkipped--
411 ssaGenValueOnStack(s, v, true)
412 return
413 }
414
415 reg := v.Reg()
416 getReg(s, reg)
417 if reg == wasm.REG_SP {
418 s.Prog(wasm.AI64ExtendI32U)
419 }
420 }
421
422 func i32Const(s *ssagen.State, val int32) {
423 p := s.Prog(wasm.AI32Const)
424 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
425 }
426
427 func i64Const(s *ssagen.State, val int64) {
428 p := s.Prog(wasm.AI64Const)
429 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
430 }
431
432 func f32Const(s *ssagen.State, val float64) {
433 p := s.Prog(wasm.AF32Const)
434 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
435 }
436
437 func f64Const(s *ssagen.State, val float64) {
438 p := s.Prog(wasm.AF64Const)
439 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
440 }
441
442 func getReg(s *ssagen.State, reg int16) {
443 p := s.Prog(wasm.AGet)
444 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
445 }
446
447 func setReg(s *ssagen.State, reg int16) {
448 p := s.Prog(wasm.ASet)
449 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
450 }
451
452 func loadOp(t *types.Type) obj.As {
453 if t.IsFloat() {
454 switch t.Size() {
455 case 4:
456 return wasm.AF32Load
457 case 8:
458 return wasm.AF64Load
459 default:
460 panic("bad load type")
461 }
462 }
463
464 switch t.Size() {
465 case 1:
466 if t.IsSigned() {
467 return wasm.AI64Load8S
468 }
469 return wasm.AI64Load8U
470 case 2:
471 if t.IsSigned() {
472 return wasm.AI64Load16S
473 }
474 return wasm.AI64Load16U
475 case 4:
476 if t.IsSigned() {
477 return wasm.AI64Load32S
478 }
479 return wasm.AI64Load32U
480 case 8:
481 return wasm.AI64Load
482 default:
483 panic("bad load type")
484 }
485 }
486
487 func storeOp(t *types.Type) obj.As {
488 if t.IsFloat() {
489 switch t.Size() {
490 case 4:
491 return wasm.AF32Store
492 case 8:
493 return wasm.AF64Store
494 default:
495 panic("bad store type")
496 }
497 }
498
499 switch t.Size() {
500 case 1:
501 return wasm.AI64Store8
502 case 2:
503 return wasm.AI64Store16
504 case 4:
505 return wasm.AI64Store32
506 case 8:
507 return wasm.AI64Store
508 default:
509 panic("bad store type")
510 }
511 }
512
View as plain text