1
2
3
4
5 package wasm
6
7 import (
8 "bytes"
9 "cmd/internal/obj"
10 "cmd/internal/objabi"
11 "cmd/internal/sys"
12 "encoding/binary"
13 "fmt"
14 "io"
15 "math"
16 )
17
18 var Register = map[string]int16{
19 "SP": REG_SP,
20 "CTXT": REG_CTXT,
21 "g": REG_g,
22 "RET0": REG_RET0,
23 "RET1": REG_RET1,
24 "RET2": REG_RET2,
25 "RET3": REG_RET3,
26 "PAUSE": REG_PAUSE,
27
28 "R0": REG_R0,
29 "R1": REG_R1,
30 "R2": REG_R2,
31 "R3": REG_R3,
32 "R4": REG_R4,
33 "R5": REG_R5,
34 "R6": REG_R6,
35 "R7": REG_R7,
36 "R8": REG_R8,
37 "R9": REG_R9,
38 "R10": REG_R10,
39 "R11": REG_R11,
40 "R12": REG_R12,
41 "R13": REG_R13,
42 "R14": REG_R14,
43 "R15": REG_R15,
44
45 "F0": REG_F0,
46 "F1": REG_F1,
47 "F2": REG_F2,
48 "F3": REG_F3,
49 "F4": REG_F4,
50 "F5": REG_F5,
51 "F6": REG_F6,
52 "F7": REG_F7,
53 "F8": REG_F8,
54 "F9": REG_F9,
55 "F10": REG_F10,
56 "F11": REG_F11,
57 "F12": REG_F12,
58 "F13": REG_F13,
59 "F14": REG_F14,
60 "F15": REG_F15,
61
62 "F16": REG_F16,
63 "F17": REG_F17,
64 "F18": REG_F18,
65 "F19": REG_F19,
66 "F20": REG_F20,
67 "F21": REG_F21,
68 "F22": REG_F22,
69 "F23": REG_F23,
70 "F24": REG_F24,
71 "F25": REG_F25,
72 "F26": REG_F26,
73 "F27": REG_F27,
74 "F28": REG_F28,
75 "F29": REG_F29,
76 "F30": REG_F30,
77 "F31": REG_F31,
78
79 "PC_B": REG_PC_B,
80 }
81
82 var registerNames []string
83
84 func init() {
85 obj.RegisterRegister(MINREG, MAXREG, rconv)
86 obj.RegisterOpcode(obj.ABaseWasm, Anames)
87
88 registerNames = make([]string, MAXREG-MINREG)
89 for name, reg := range Register {
90 registerNames[reg-MINREG] = name
91 }
92 }
93
94 func rconv(r int) string {
95 return registerNames[r-MINREG]
96 }
97
98 var unaryDst = map[obj.As]bool{
99 ASet: true,
100 ATee: true,
101 ACall: true,
102 ACallIndirect: true,
103 ACallImport: true,
104 ABr: true,
105 ABrIf: true,
106 ABrTable: true,
107 AI32Store: true,
108 AI64Store: true,
109 AF32Store: true,
110 AF64Store: true,
111 AI32Store8: true,
112 AI32Store16: true,
113 AI64Store8: true,
114 AI64Store16: true,
115 AI64Store32: true,
116 ACALLNORESUME: true,
117 }
118
119 var Linkwasm = obj.LinkArch{
120 Arch: sys.ArchWasm,
121 Init: instinit,
122 Preprocess: preprocess,
123 Assemble: assemble,
124 UnaryDst: unaryDst,
125 }
126
127 var (
128 morestack *obj.LSym
129 morestackNoCtxt *obj.LSym
130 gcWriteBarrier *obj.LSym
131 sigpanic *obj.LSym
132 )
133
134 const (
135
136 WasmImport = 1 << 0
137 )
138
139 func instinit(ctxt *obj.Link) {
140 morestack = ctxt.Lookup("runtime.morestack")
141 morestackNoCtxt = ctxt.Lookup("runtime.morestack_noctxt")
142 gcWriteBarrier = ctxt.LookupABI("runtime.gcWriteBarrier", obj.ABIInternal)
143 sigpanic = ctxt.LookupABI("runtime.sigpanic", obj.ABIInternal)
144 }
145
146 func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
147 appendp := func(p *obj.Prog, as obj.As, args ...obj.Addr) *obj.Prog {
148 if p.As != obj.ANOP {
149 p2 := obj.Appendp(p, newprog)
150 p2.Pc = p.Pc
151 p = p2
152 }
153 p.As = as
154 switch len(args) {
155 case 0:
156 p.From = obj.Addr{}
157 p.To = obj.Addr{}
158 case 1:
159 if unaryDst[as] {
160 p.From = obj.Addr{}
161 p.To = args[0]
162 } else {
163 p.From = args[0]
164 p.To = obj.Addr{}
165 }
166 case 2:
167 p.From = args[0]
168 p.To = args[1]
169 default:
170 panic("bad args")
171 }
172 return p
173 }
174
175 framesize := s.Func().Text.To.Offset
176 if framesize < 0 {
177 panic("bad framesize")
178 }
179 s.Func().Args = s.Func().Text.To.Val.(int32)
180 s.Func().Locals = int32(framesize)
181
182 if s.Func().Text.From.Sym.Wrapper() {
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203 gpanic := obj.Addr{
204 Type: obj.TYPE_MEM,
205 Reg: REGG,
206 Offset: 4 * 8,
207 }
208
209 panicargp := obj.Addr{
210 Type: obj.TYPE_MEM,
211 Reg: REG_R0,
212 Offset: 0,
213 }
214
215 p := s.Func().Text
216 p = appendp(p, AMOVD, gpanic, regAddr(REG_R0))
217
218 p = appendp(p, AGet, regAddr(REG_R0))
219 p = appendp(p, AI64Eqz)
220 p = appendp(p, ANot)
221 p = appendp(p, AIf)
222
223 p = appendp(p, AGet, regAddr(REG_SP))
224 p = appendp(p, AI64ExtendI32U)
225 p = appendp(p, AI64Const, constAddr(framesize+8))
226 p = appendp(p, AI64Add)
227 p = appendp(p, AI64Load, panicargp)
228
229 p = appendp(p, AI64Eq)
230 p = appendp(p, AIf)
231 p = appendp(p, AMOVD, regAddr(REG_SP), panicargp)
232 p = appendp(p, AEnd)
233
234 p = appendp(p, AEnd)
235 }
236
237 if framesize > 0 {
238 p := s.Func().Text
239 p = appendp(p, AGet, regAddr(REG_SP))
240 p = appendp(p, AI32Const, constAddr(framesize))
241 p = appendp(p, AI32Sub)
242 p = appendp(p, ASet, regAddr(REG_SP))
243 p.Spadj = int32(framesize)
244 }
245
246 needMoreStack := !s.Func().Text.From.Sym.NoSplit()
247
248
249
250
251
252 var pMorestack = s.Func().Text
253 if needMoreStack && ctxt.Flag_maymorestack != "" {
254 p := pMorestack
255
256
257 const tempFrame = 8
258 p = appendp(p, AGet, regAddr(REG_SP))
259 p = appendp(p, AI32Const, constAddr(tempFrame))
260 p = appendp(p, AI32Sub)
261 p = appendp(p, ASet, regAddr(REG_SP))
262 p.Spadj = tempFrame
263 ctxtp := obj.Addr{
264 Type: obj.TYPE_MEM,
265 Reg: REG_SP,
266 Offset: 0,
267 }
268 p = appendp(p, AMOVD, regAddr(REGCTXT), ctxtp)
269
270
271
272
273 p = appendp(p, ACALLNORESUME, constAddr(0))
274
275 sym := ctxt.LookupABI(ctxt.Flag_maymorestack, s.ABI())
276 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
277
278
279 p = appendp(p, AMOVD, ctxtp, regAddr(REGCTXT))
280 p = appendp(p, AGet, regAddr(REG_SP))
281 p = appendp(p, AI32Const, constAddr(tempFrame))
282 p = appendp(p, AI32Add)
283 p = appendp(p, ASet, regAddr(REG_SP))
284 p.Spadj = -tempFrame
285
286
287
288 pMorestack = appendp(p, ARESUMEPOINT)
289 }
290
291
292
293 numResumePoints := 0
294 explicitBlockDepth := 0
295 pc := int64(0)
296 var tableIdxs []uint64
297 tablePC := int64(0)
298 base := ctxt.PosTable.Pos(s.Func().Text.Pos).Base()
299 for p := s.Func().Text; p != nil; p = p.Link {
300 prevBase := base
301 base = ctxt.PosTable.Pos(p.Pos).Base()
302 switch p.As {
303 case ABlock, ALoop, AIf:
304 explicitBlockDepth++
305
306 case AEnd:
307 if explicitBlockDepth == 0 {
308 panic("End without block")
309 }
310 explicitBlockDepth--
311
312 case ARESUMEPOINT:
313 if explicitBlockDepth != 0 {
314 panic("RESUME can only be used on toplevel")
315 }
316 p.As = AEnd
317 for tablePC <= pc {
318 tableIdxs = append(tableIdxs, uint64(numResumePoints))
319 tablePC++
320 }
321 numResumePoints++
322 pc++
323
324 case obj.ACALL:
325 if explicitBlockDepth != 0 {
326 panic("CALL can only be used on toplevel, try CALLNORESUME instead")
327 }
328 appendp(p, ARESUMEPOINT)
329 }
330
331 p.Pc = pc
332
333
334
335
336
337 if p.As == ACALLNORESUME || p.As == obj.ANOP || p.As == ANop || p.Spadj != 0 || base != prevBase {
338 pc++
339 if p.To.Sym == sigpanic {
340
341
342
343
344 pc++
345 }
346 }
347 }
348 tableIdxs = append(tableIdxs, uint64(numResumePoints))
349 s.Size = pc + 1
350
351 if needMoreStack {
352 p := pMorestack
353
354 if framesize <= objabi.StackSmall {
355
356
357
358
359
360
361
362 p = appendp(p, AGet, regAddr(REG_SP))
363 p = appendp(p, AGet, regAddr(REGG))
364 p = appendp(p, AI32WrapI64)
365 p = appendp(p, AI32Load, constAddr(2*int64(ctxt.Arch.PtrSize)))
366 p = appendp(p, AI32LeU)
367 } else {
368
369
370
371
372
373
374
375
376
377
378 p = appendp(p, AGet, regAddr(REG_SP))
379 p = appendp(p, AGet, regAddr(REGG))
380 p = appendp(p, AI32WrapI64)
381 p = appendp(p, AI32Load, constAddr(2*int64(ctxt.Arch.PtrSize)))
382 p = appendp(p, AI32Const, constAddr(int64(framesize)-objabi.StackSmall))
383 p = appendp(p, AI32Add)
384 p = appendp(p, AI32LeU)
385 }
386
387
388 p = appendp(p, AIf)
389
390
391
392
393
394
395
396 p = appendp(p, obj.ACALL, constAddr(0))
397 if s.Func().Text.From.Sym.NeedCtxt() {
398 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: morestack}
399 } else {
400 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: morestackNoCtxt}
401 }
402 p = appendp(p, AEnd)
403 }
404
405
406
407 var entryPointLoopBranches []*obj.Prog
408 var unwindExitBranches []*obj.Prog
409 currentDepth := 0
410 for p := s.Func().Text; p != nil; p = p.Link {
411 switch p.As {
412 case ABlock, ALoop, AIf:
413 currentDepth++
414 case AEnd:
415 currentDepth--
416 }
417
418 switch p.As {
419 case obj.AJMP:
420 jmp := *p
421 p.As = obj.ANOP
422
423 if jmp.To.Type == obj.TYPE_BRANCH {
424
425 p = appendp(p, AI32Const, constAddr(jmp.To.Val.(*obj.Prog).Pc))
426 p = appendp(p, ASet, regAddr(REG_PC_B))
427 p = appendp(p, ABr)
428 entryPointLoopBranches = append(entryPointLoopBranches, p)
429 break
430 }
431
432
433 switch jmp.To.Type {
434 case obj.TYPE_MEM:
435 if !notUsePC_B[jmp.To.Sym.Name] {
436
437 p = appendp(p, AI32Const, constAddr(0))
438 }
439 p = appendp(p, ACall, jmp.To)
440
441 case obj.TYPE_NONE:
442
443 p = appendp(p, AI32WrapI64)
444 p = appendp(p, AI32Const, constAddr(16))
445 p = appendp(p, AI32ShrU)
446
447
448
449
450
451 p = appendp(p, ASet, regAddr(REG_PC_B))
452 p = appendp(p, AI32Const, constAddr(0))
453 p = appendp(p, AGet, regAddr(REG_PC_B))
454
455 p = appendp(p, ACallIndirect)
456
457 default:
458 panic("bad target for JMP")
459 }
460
461 p = appendp(p, AReturn)
462
463 case obj.ACALL, ACALLNORESUME:
464 call := *p
465 p.As = obj.ANOP
466
467 pcAfterCall := call.Link.Pc
468 if call.To.Sym == sigpanic {
469 pcAfterCall--
470 }
471
472
473 p = appendp(p, AGet, regAddr(REG_SP))
474 p = appendp(p, AI32Const, constAddr(8))
475 p = appendp(p, AI32Sub)
476 p = appendp(p, ASet, regAddr(REG_SP))
477
478
479 p = appendp(p, AGet, regAddr(REG_SP))
480 p = appendp(p, AI64Const, obj.Addr{
481 Type: obj.TYPE_ADDR,
482 Name: obj.NAME_EXTERN,
483 Sym: s,
484 Offset: pcAfterCall,
485 })
486 p = appendp(p, AI64Store, constAddr(0))
487
488
489 switch call.To.Type {
490 case obj.TYPE_MEM:
491 if !notUsePC_B[call.To.Sym.Name] {
492
493 p = appendp(p, AI32Const, constAddr(0))
494 }
495 p = appendp(p, ACall, call.To)
496
497 case obj.TYPE_NONE:
498
499 p = appendp(p, AI32WrapI64)
500 p = appendp(p, AI32Const, constAddr(16))
501 p = appendp(p, AI32ShrU)
502
503
504
505
506
507 p = appendp(p, ASet, regAddr(REG_PC_B))
508 p = appendp(p, AI32Const, constAddr(0))
509 p = appendp(p, AGet, regAddr(REG_PC_B))
510
511 p = appendp(p, ACallIndirect)
512
513 default:
514 panic("bad target for CALL")
515 }
516
517
518 if call.To.Sym == gcWriteBarrier {
519 break
520 }
521
522
523 if call.As == ACALLNORESUME && call.To.Sym != sigpanic {
524
525 p = appendp(p, AIf)
526 p = appendp(p, obj.AUNDEF)
527 p = appendp(p, AEnd)
528 } else {
529
530 p = appendp(p, ABrIf)
531 unwindExitBranches = append(unwindExitBranches, p)
532 }
533
534 case obj.ARET, ARETUNWIND:
535 ret := *p
536 p.As = obj.ANOP
537
538 if framesize > 0 {
539
540 p = appendp(p, AGet, regAddr(REG_SP))
541 p = appendp(p, AI32Const, constAddr(framesize))
542 p = appendp(p, AI32Add)
543 p = appendp(p, ASet, regAddr(REG_SP))
544
545
546 }
547
548 if ret.To.Type == obj.TYPE_MEM {
549
550 p = appendp(p, AI32Const, constAddr(0))
551
552
553 p = appendp(p, ACall, ret.To)
554 p = appendp(p, AReturn)
555 break
556 }
557
558
559 p = appendp(p, AGet, regAddr(REG_SP))
560 p = appendp(p, AI32Const, constAddr(8))
561 p = appendp(p, AI32Add)
562 p = appendp(p, ASet, regAddr(REG_SP))
563
564 if ret.As == ARETUNWIND {
565
566 p = appendp(p, AI32Const, constAddr(1))
567 p = appendp(p, AReturn)
568 break
569 }
570
571
572 p = appendp(p, AI32Const, constAddr(0))
573 p = appendp(p, AReturn)
574 }
575 }
576
577 for p := s.Func().Text; p != nil; p = p.Link {
578 switch p.From.Name {
579 case obj.NAME_AUTO:
580 p.From.Offset += int64(framesize)
581 case obj.NAME_PARAM:
582 p.From.Reg = REG_SP
583 p.From.Offset += int64(framesize) + 8
584 }
585
586 switch p.To.Name {
587 case obj.NAME_AUTO:
588 p.To.Offset += int64(framesize)
589 case obj.NAME_PARAM:
590 p.To.Reg = REG_SP
591 p.To.Offset += int64(framesize) + 8
592 }
593
594 switch p.As {
595 case AGet:
596 if p.From.Type == obj.TYPE_ADDR {
597 get := *p
598 p.As = obj.ANOP
599
600 switch get.From.Name {
601 case obj.NAME_EXTERN:
602 p = appendp(p, AI64Const, get.From)
603 case obj.NAME_AUTO, obj.NAME_PARAM:
604 p = appendp(p, AGet, regAddr(get.From.Reg))
605 if get.From.Reg == REG_SP {
606 p = appendp(p, AI64ExtendI32U)
607 }
608 if get.From.Offset != 0 {
609 p = appendp(p, AI64Const, constAddr(get.From.Offset))
610 p = appendp(p, AI64Add)
611 }
612 default:
613 panic("bad Get: invalid name")
614 }
615 }
616
617 case AI32Load, AI64Load, AF32Load, AF64Load, AI32Load8S, AI32Load8U, AI32Load16S, AI32Load16U, AI64Load8S, AI64Load8U, AI64Load16S, AI64Load16U, AI64Load32S, AI64Load32U:
618 if p.From.Type == obj.TYPE_MEM {
619 as := p.As
620 from := p.From
621
622 p.As = AGet
623 p.From = regAddr(from.Reg)
624
625 if from.Reg != REG_SP {
626 p = appendp(p, AI32WrapI64)
627 }
628
629 p = appendp(p, as, constAddr(from.Offset))
630 }
631
632 case AMOVB, AMOVH, AMOVW, AMOVD:
633 mov := *p
634 p.As = obj.ANOP
635
636 var loadAs obj.As
637 var storeAs obj.As
638 switch mov.As {
639 case AMOVB:
640 loadAs = AI64Load8U
641 storeAs = AI64Store8
642 case AMOVH:
643 loadAs = AI64Load16U
644 storeAs = AI64Store16
645 case AMOVW:
646 loadAs = AI64Load32U
647 storeAs = AI64Store32
648 case AMOVD:
649 loadAs = AI64Load
650 storeAs = AI64Store
651 }
652
653 appendValue := func() {
654 switch mov.From.Type {
655 case obj.TYPE_CONST:
656 p = appendp(p, AI64Const, constAddr(mov.From.Offset))
657
658 case obj.TYPE_ADDR:
659 switch mov.From.Name {
660 case obj.NAME_NONE, obj.NAME_PARAM, obj.NAME_AUTO:
661 p = appendp(p, AGet, regAddr(mov.From.Reg))
662 if mov.From.Reg == REG_SP {
663 p = appendp(p, AI64ExtendI32U)
664 }
665 p = appendp(p, AI64Const, constAddr(mov.From.Offset))
666 p = appendp(p, AI64Add)
667 case obj.NAME_EXTERN:
668 p = appendp(p, AI64Const, mov.From)
669 default:
670 panic("bad name for MOV")
671 }
672
673 case obj.TYPE_REG:
674 p = appendp(p, AGet, mov.From)
675 if mov.From.Reg == REG_SP {
676 p = appendp(p, AI64ExtendI32U)
677 }
678
679 case obj.TYPE_MEM:
680 p = appendp(p, AGet, regAddr(mov.From.Reg))
681 if mov.From.Reg != REG_SP {
682 p = appendp(p, AI32WrapI64)
683 }
684 p = appendp(p, loadAs, constAddr(mov.From.Offset))
685
686 default:
687 panic("bad MOV type")
688 }
689 }
690
691 switch mov.To.Type {
692 case obj.TYPE_REG:
693 appendValue()
694 if mov.To.Reg == REG_SP {
695 p = appendp(p, AI32WrapI64)
696 }
697 p = appendp(p, ASet, mov.To)
698
699 case obj.TYPE_MEM:
700 switch mov.To.Name {
701 case obj.NAME_NONE, obj.NAME_PARAM:
702 p = appendp(p, AGet, regAddr(mov.To.Reg))
703 if mov.To.Reg != REG_SP {
704 p = appendp(p, AI32WrapI64)
705 }
706 case obj.NAME_EXTERN:
707 p = appendp(p, AI32Const, obj.Addr{Type: obj.TYPE_ADDR, Name: obj.NAME_EXTERN, Sym: mov.To.Sym})
708 default:
709 panic("bad MOV name")
710 }
711 appendValue()
712 p = appendp(p, storeAs, constAddr(mov.To.Offset))
713
714 default:
715 panic("bad MOV type")
716 }
717
718 case ACallImport:
719 p.As = obj.ANOP
720 p = appendp(p, AGet, regAddr(REG_SP))
721 p = appendp(p, ACall, obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: s})
722 p.Mark = WasmImport
723 }
724 }
725
726 {
727 p := s.Func().Text
728 if len(unwindExitBranches) > 0 {
729 p = appendp(p, ABlock)
730 for _, b := range unwindExitBranches {
731 b.To = obj.Addr{Type: obj.TYPE_BRANCH, Val: p}
732 }
733 }
734 if len(entryPointLoopBranches) > 0 {
735 p = appendp(p, ALoop)
736 for _, b := range entryPointLoopBranches {
737 b.To = obj.Addr{Type: obj.TYPE_BRANCH, Val: p}
738 }
739 }
740 if numResumePoints > 0 {
741
742 for i := 0; i < numResumePoints+1; i++ {
743 p = appendp(p, ABlock)
744 }
745 p = appendp(p, AGet, regAddr(REG_PC_B))
746 p = appendp(p, ABrTable, obj.Addr{Val: tableIdxs})
747 p = appendp(p, AEnd)
748 }
749 for p.Link != nil {
750 p = p.Link
751 }
752 if len(entryPointLoopBranches) > 0 {
753 p = appendp(p, AEnd)
754 }
755 p = appendp(p, obj.AUNDEF)
756 if len(unwindExitBranches) > 0 {
757 p = appendp(p, AEnd)
758 p = appendp(p, AI32Const, constAddr(1))
759 }
760 }
761
762 currentDepth = 0
763 blockDepths := make(map[*obj.Prog]int)
764 for p := s.Func().Text; p != nil; p = p.Link {
765 switch p.As {
766 case ABlock, ALoop, AIf:
767 currentDepth++
768 blockDepths[p] = currentDepth
769 case AEnd:
770 currentDepth--
771 }
772
773 switch p.As {
774 case ABr, ABrIf:
775 if p.To.Type == obj.TYPE_BRANCH {
776 blockDepth, ok := blockDepths[p.To.Val.(*obj.Prog)]
777 if !ok {
778 panic("label not at block")
779 }
780 p.To = constAddr(int64(currentDepth - blockDepth))
781 }
782 }
783 }
784 }
785
786 func constAddr(value int64) obj.Addr {
787 return obj.Addr{Type: obj.TYPE_CONST, Offset: value}
788 }
789
790 func regAddr(reg int16) obj.Addr {
791 return obj.Addr{Type: obj.TYPE_REG, Reg: reg}
792 }
793
794
795
796 var notUsePC_B = map[string]bool{
797 "_rt0_wasm_js": true,
798 "wasm_export_run": true,
799 "wasm_export_resume": true,
800 "wasm_export_getsp": true,
801 "wasm_pc_f_loop": true,
802 "runtime.wasmMove": true,
803 "runtime.wasmZero": true,
804 "runtime.wasmDiv": true,
805 "runtime.wasmTruncS": true,
806 "runtime.wasmTruncU": true,
807 "runtime.gcWriteBarrier": true,
808 "cmpbody": true,
809 "memeqbody": true,
810 "memcmp": true,
811 "memchr": true,
812 }
813
814 func assemble(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
815 type regVar struct {
816 global bool
817 index uint64
818 }
819
820 type varDecl struct {
821 count uint64
822 typ valueType
823 }
824
825 hasLocalSP := false
826 regVars := [MAXREG - MINREG]*regVar{
827 REG_SP - MINREG: {true, 0},
828 REG_CTXT - MINREG: {true, 1},
829 REG_g - MINREG: {true, 2},
830 REG_RET0 - MINREG: {true, 3},
831 REG_RET1 - MINREG: {true, 4},
832 REG_RET2 - MINREG: {true, 5},
833 REG_RET3 - MINREG: {true, 6},
834 REG_PAUSE - MINREG: {true, 7},
835 }
836 var varDecls []*varDecl
837 useAssemblyRegMap := func() {
838 for i := int16(0); i < 16; i++ {
839 regVars[REG_R0+i-MINREG] = ®Var{false, uint64(i)}
840 }
841 }
842
843
844
845 switch s.Name {
846 case "_rt0_wasm_js", "wasm_export_run", "wasm_export_resume", "wasm_export_getsp", "wasm_pc_f_loop",
847 "runtime.wasmMove", "runtime.wasmZero", "runtime.wasmDiv", "runtime.wasmTruncS", "runtime.wasmTruncU", "memeqbody":
848 varDecls = []*varDecl{}
849 useAssemblyRegMap()
850 case "memchr", "memcmp":
851 varDecls = []*varDecl{{count: 2, typ: i32}}
852 useAssemblyRegMap()
853 case "cmpbody":
854 varDecls = []*varDecl{{count: 2, typ: i64}}
855 useAssemblyRegMap()
856 case "runtime.gcWriteBarrier":
857 varDecls = []*varDecl{{count: 4, typ: i64}}
858 useAssemblyRegMap()
859 default:
860
861 regVars[REG_PC_B-MINREG] = ®Var{false, 0}
862 hasLocalSP = true
863
864 var regUsed [MAXREG - MINREG]bool
865 for p := s.Func().Text; p != nil; p = p.Link {
866 if p.From.Reg != 0 {
867 regUsed[p.From.Reg-MINREG] = true
868 }
869 if p.To.Reg != 0 {
870 regUsed[p.To.Reg-MINREG] = true
871 }
872 }
873
874 regs := []int16{REG_SP}
875 for reg := int16(REG_R0); reg <= REG_F31; reg++ {
876 if regUsed[reg-MINREG] {
877 regs = append(regs, reg)
878 }
879 }
880
881 var lastDecl *varDecl
882 for i, reg := range regs {
883 t := regType(reg)
884 if lastDecl == nil || lastDecl.typ != t {
885 lastDecl = &varDecl{
886 count: 0,
887 typ: t,
888 }
889 varDecls = append(varDecls, lastDecl)
890 }
891 lastDecl.count++
892 if reg != REG_SP {
893 regVars[reg-MINREG] = ®Var{false, 1 + uint64(i)}
894 }
895 }
896 }
897
898 w := new(bytes.Buffer)
899
900 writeUleb128(w, uint64(len(varDecls)))
901 for _, decl := range varDecls {
902 writeUleb128(w, decl.count)
903 w.WriteByte(byte(decl.typ))
904 }
905
906 if hasLocalSP {
907
908 updateLocalSP(w)
909 }
910
911 for p := s.Func().Text; p != nil; p = p.Link {
912 switch p.As {
913 case AGet:
914 if p.From.Type != obj.TYPE_REG {
915 panic("bad Get: argument is not a register")
916 }
917 reg := p.From.Reg
918 v := regVars[reg-MINREG]
919 if v == nil {
920 panic("bad Get: invalid register")
921 }
922 if reg == REG_SP && hasLocalSP {
923 writeOpcode(w, ALocalGet)
924 writeUleb128(w, 1)
925 continue
926 }
927 if v.global {
928 writeOpcode(w, AGlobalGet)
929 } else {
930 writeOpcode(w, ALocalGet)
931 }
932 writeUleb128(w, v.index)
933 continue
934
935 case ASet:
936 if p.To.Type != obj.TYPE_REG {
937 panic("bad Set: argument is not a register")
938 }
939 reg := p.To.Reg
940 v := regVars[reg-MINREG]
941 if v == nil {
942 panic("bad Set: invalid register")
943 }
944 if reg == REG_SP && hasLocalSP {
945 writeOpcode(w, ALocalTee)
946 writeUleb128(w, 1)
947 }
948 if v.global {
949 writeOpcode(w, AGlobalSet)
950 } else {
951 if p.Link.As == AGet && p.Link.From.Reg == reg {
952 writeOpcode(w, ALocalTee)
953 p = p.Link
954 } else {
955 writeOpcode(w, ALocalSet)
956 }
957 }
958 writeUleb128(w, v.index)
959 continue
960
961 case ATee:
962 if p.To.Type != obj.TYPE_REG {
963 panic("bad Tee: argument is not a register")
964 }
965 reg := p.To.Reg
966 v := regVars[reg-MINREG]
967 if v == nil {
968 panic("bad Tee: invalid register")
969 }
970 writeOpcode(w, ALocalTee)
971 writeUleb128(w, v.index)
972 continue
973
974 case ANot:
975 writeOpcode(w, AI32Eqz)
976 continue
977
978 case obj.AUNDEF:
979 writeOpcode(w, AUnreachable)
980 continue
981
982 case obj.ANOP, obj.ATEXT, obj.AFUNCDATA, obj.APCDATA:
983
984 continue
985 }
986
987 writeOpcode(w, p.As)
988
989 switch p.As {
990 case ABlock, ALoop, AIf:
991 if p.From.Offset != 0 {
992
993 w.WriteByte(0x80 - byte(p.From.Offset))
994 continue
995 }
996 w.WriteByte(0x40)
997
998 case ABr, ABrIf:
999 if p.To.Type != obj.TYPE_CONST {
1000 panic("bad Br/BrIf")
1001 }
1002 writeUleb128(w, uint64(p.To.Offset))
1003
1004 case ABrTable:
1005 idxs := p.To.Val.([]uint64)
1006 writeUleb128(w, uint64(len(idxs)-1))
1007 for _, idx := range idxs {
1008 writeUleb128(w, idx)
1009 }
1010
1011 case ACall:
1012 switch p.To.Type {
1013 case obj.TYPE_CONST:
1014 writeUleb128(w, uint64(p.To.Offset))
1015
1016 case obj.TYPE_MEM:
1017 if p.To.Name != obj.NAME_EXTERN && p.To.Name != obj.NAME_STATIC {
1018 fmt.Println(p.To)
1019 panic("bad name for Call")
1020 }
1021 r := obj.Addrel(s)
1022 r.Siz = 1
1023 r.Off = int32(w.Len())
1024 r.Type = objabi.R_CALL
1025 if p.Mark&WasmImport != 0 {
1026 r.Type = objabi.R_WASMIMPORT
1027 }
1028 r.Sym = p.To.Sym
1029 if hasLocalSP {
1030
1031 updateLocalSP(w)
1032 }
1033
1034 default:
1035 panic("bad type for Call")
1036 }
1037
1038 case ACallIndirect:
1039 writeUleb128(w, uint64(p.To.Offset))
1040 w.WriteByte(0x00)
1041 if hasLocalSP {
1042
1043 updateLocalSP(w)
1044 }
1045
1046 case AI32Const, AI64Const:
1047 if p.From.Name == obj.NAME_EXTERN {
1048 r := obj.Addrel(s)
1049 r.Siz = 1
1050 r.Off = int32(w.Len())
1051 r.Type = objabi.R_ADDR
1052 r.Sym = p.From.Sym
1053 r.Add = p.From.Offset
1054 break
1055 }
1056 writeSleb128(w, p.From.Offset)
1057
1058 case AF32Const:
1059 b := make([]byte, 4)
1060 binary.LittleEndian.PutUint32(b, math.Float32bits(float32(p.From.Val.(float64))))
1061 w.Write(b)
1062
1063 case AF64Const:
1064 b := make([]byte, 8)
1065 binary.LittleEndian.PutUint64(b, math.Float64bits(p.From.Val.(float64)))
1066 w.Write(b)
1067
1068 case AI32Load, AI64Load, AF32Load, AF64Load, AI32Load8S, AI32Load8U, AI32Load16S, AI32Load16U, AI64Load8S, AI64Load8U, AI64Load16S, AI64Load16U, AI64Load32S, AI64Load32U:
1069 if p.From.Offset < 0 {
1070 panic("negative offset for *Load")
1071 }
1072 if p.From.Type != obj.TYPE_CONST {
1073 panic("bad type for *Load")
1074 }
1075 if p.From.Offset > math.MaxUint32 {
1076 ctxt.Diag("bad offset in %v", p)
1077 }
1078 writeUleb128(w, align(p.As))
1079 writeUleb128(w, uint64(p.From.Offset))
1080
1081 case AI32Store, AI64Store, AF32Store, AF64Store, AI32Store8, AI32Store16, AI64Store8, AI64Store16, AI64Store32:
1082 if p.To.Offset < 0 {
1083 panic("negative offset")
1084 }
1085 if p.From.Offset > math.MaxUint32 {
1086 ctxt.Diag("bad offset in %v", p)
1087 }
1088 writeUleb128(w, align(p.As))
1089 writeUleb128(w, uint64(p.To.Offset))
1090
1091 case ACurrentMemory, AGrowMemory:
1092 w.WriteByte(0x00)
1093
1094 }
1095 }
1096
1097 w.WriteByte(0x0b)
1098
1099 s.P = w.Bytes()
1100 }
1101
1102 func updateLocalSP(w *bytes.Buffer) {
1103 writeOpcode(w, AGlobalGet)
1104 writeUleb128(w, 0)
1105 writeOpcode(w, ALocalSet)
1106 writeUleb128(w, 1)
1107 }
1108
1109 func writeOpcode(w *bytes.Buffer, as obj.As) {
1110 switch {
1111 case as < AUnreachable:
1112 panic(fmt.Sprintf("unexpected assembler op: %s", as))
1113 case as < AEnd:
1114 w.WriteByte(byte(as - AUnreachable + 0x00))
1115 case as < ADrop:
1116 w.WriteByte(byte(as - AEnd + 0x0B))
1117 case as < ALocalGet:
1118 w.WriteByte(byte(as - ADrop + 0x1A))
1119 case as < AI32Load:
1120 w.WriteByte(byte(as - ALocalGet + 0x20))
1121 case as < AI32TruncSatF32S:
1122 w.WriteByte(byte(as - AI32Load + 0x28))
1123 case as < ALast:
1124 w.WriteByte(0xFC)
1125 w.WriteByte(byte(as - AI32TruncSatF32S + 0x00))
1126 default:
1127 panic(fmt.Sprintf("unexpected assembler op: %s", as))
1128 }
1129 }
1130
1131 type valueType byte
1132
1133 const (
1134 i32 valueType = 0x7F
1135 i64 valueType = 0x7E
1136 f32 valueType = 0x7D
1137 f64 valueType = 0x7C
1138 )
1139
1140 func regType(reg int16) valueType {
1141 switch {
1142 case reg == REG_SP:
1143 return i32
1144 case reg >= REG_R0 && reg <= REG_R15:
1145 return i64
1146 case reg >= REG_F0 && reg <= REG_F15:
1147 return f32
1148 case reg >= REG_F16 && reg <= REG_F31:
1149 return f64
1150 default:
1151 panic("invalid register")
1152 }
1153 }
1154
1155 func align(as obj.As) uint64 {
1156 switch as {
1157 case AI32Load8S, AI32Load8U, AI64Load8S, AI64Load8U, AI32Store8, AI64Store8:
1158 return 0
1159 case AI32Load16S, AI32Load16U, AI64Load16S, AI64Load16U, AI32Store16, AI64Store16:
1160 return 1
1161 case AI32Load, AF32Load, AI64Load32S, AI64Load32U, AI32Store, AF32Store, AI64Store32:
1162 return 2
1163 case AI64Load, AF64Load, AI64Store, AF64Store:
1164 return 3
1165 default:
1166 panic("align: bad op")
1167 }
1168 }
1169
1170 func writeUleb128(w io.ByteWriter, v uint64) {
1171 if v < 128 {
1172 w.WriteByte(uint8(v))
1173 return
1174 }
1175 more := true
1176 for more {
1177 c := uint8(v & 0x7f)
1178 v >>= 7
1179 more = v != 0
1180 if more {
1181 c |= 0x80
1182 }
1183 w.WriteByte(c)
1184 }
1185 }
1186
1187 func writeSleb128(w io.ByteWriter, v int64) {
1188 more := true
1189 for more {
1190 c := uint8(v & 0x7f)
1191 s := uint8(v & 0x40)
1192 v >>= 7
1193 more = !((v == 0 && s == 0) || (v == -1 && s != 0))
1194 if more {
1195 c |= 0x80
1196 }
1197 w.WriteByte(c)
1198 }
1199 }
1200
View as plain text