Source file src/cmd/compile/internal/riscv64/ssa.go

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package riscv64
     6  
     7  import (
     8  	"cmd/compile/internal/base"
     9  	"cmd/compile/internal/ir"
    10  	"cmd/compile/internal/ssa"
    11  	"cmd/compile/internal/ssagen"
    12  	"cmd/compile/internal/types"
    13  	"cmd/internal/obj"
    14  	"cmd/internal/obj/riscv"
    15  )
    16  
    17  // ssaRegToReg maps ssa register numbers to obj register numbers.
    18  var ssaRegToReg = []int16{
    19  	riscv.REG_X0,
    20  	// X1 (LR): unused
    21  	riscv.REG_X2,
    22  	riscv.REG_X3,
    23  	riscv.REG_X4,
    24  	riscv.REG_X5,
    25  	riscv.REG_X6,
    26  	riscv.REG_X7,
    27  	riscv.REG_X8,
    28  	riscv.REG_X9,
    29  	riscv.REG_X10,
    30  	riscv.REG_X11,
    31  	riscv.REG_X12,
    32  	riscv.REG_X13,
    33  	riscv.REG_X14,
    34  	riscv.REG_X15,
    35  	riscv.REG_X16,
    36  	riscv.REG_X17,
    37  	riscv.REG_X18,
    38  	riscv.REG_X19,
    39  	riscv.REG_X20,
    40  	riscv.REG_X21,
    41  	riscv.REG_X22,
    42  	riscv.REG_X23,
    43  	riscv.REG_X24,
    44  	riscv.REG_X25,
    45  	riscv.REG_X26,
    46  	riscv.REG_X27,
    47  	riscv.REG_X28,
    48  	riscv.REG_X29,
    49  	riscv.REG_X30,
    50  	riscv.REG_X31,
    51  	riscv.REG_F0,
    52  	riscv.REG_F1,
    53  	riscv.REG_F2,
    54  	riscv.REG_F3,
    55  	riscv.REG_F4,
    56  	riscv.REG_F5,
    57  	riscv.REG_F6,
    58  	riscv.REG_F7,
    59  	riscv.REG_F8,
    60  	riscv.REG_F9,
    61  	riscv.REG_F10,
    62  	riscv.REG_F11,
    63  	riscv.REG_F12,
    64  	riscv.REG_F13,
    65  	riscv.REG_F14,
    66  	riscv.REG_F15,
    67  	riscv.REG_F16,
    68  	riscv.REG_F17,
    69  	riscv.REG_F18,
    70  	riscv.REG_F19,
    71  	riscv.REG_F20,
    72  	riscv.REG_F21,
    73  	riscv.REG_F22,
    74  	riscv.REG_F23,
    75  	riscv.REG_F24,
    76  	riscv.REG_F25,
    77  	riscv.REG_F26,
    78  	riscv.REG_F27,
    79  	riscv.REG_F28,
    80  	riscv.REG_F29,
    81  	riscv.REG_F30,
    82  	riscv.REG_F31,
    83  	0, // SB isn't a real register.  We fill an Addr.Reg field with 0 in this case.
    84  }
    85  
    86  func loadByType(t *types.Type) obj.As {
    87  	width := t.Size()
    88  
    89  	if t.IsFloat() {
    90  		switch width {
    91  		case 4:
    92  			return riscv.AMOVF
    93  		case 8:
    94  			return riscv.AMOVD
    95  		default:
    96  			base.Fatalf("unknown float width for load %d in type %v", width, t)
    97  			return 0
    98  		}
    99  	}
   100  
   101  	switch width {
   102  	case 1:
   103  		if t.IsSigned() {
   104  			return riscv.AMOVB
   105  		} else {
   106  			return riscv.AMOVBU
   107  		}
   108  	case 2:
   109  		if t.IsSigned() {
   110  			return riscv.AMOVH
   111  		} else {
   112  			return riscv.AMOVHU
   113  		}
   114  	case 4:
   115  		if t.IsSigned() {
   116  			return riscv.AMOVW
   117  		} else {
   118  			return riscv.AMOVWU
   119  		}
   120  	case 8:
   121  		return riscv.AMOV
   122  	default:
   123  		base.Fatalf("unknown width for load %d in type %v", width, t)
   124  		return 0
   125  	}
   126  }
   127  
   128  // storeByType returns the store instruction of the given type.
   129  func storeByType(t *types.Type) obj.As {
   130  	width := t.Size()
   131  
   132  	if t.IsFloat() {
   133  		switch width {
   134  		case 4:
   135  			return riscv.AMOVF
   136  		case 8:
   137  			return riscv.AMOVD
   138  		default:
   139  			base.Fatalf("unknown float width for store %d in type %v", width, t)
   140  			return 0
   141  		}
   142  	}
   143  
   144  	switch width {
   145  	case 1:
   146  		return riscv.AMOVB
   147  	case 2:
   148  		return riscv.AMOVH
   149  	case 4:
   150  		return riscv.AMOVW
   151  	case 8:
   152  		return riscv.AMOV
   153  	default:
   154  		base.Fatalf("unknown width for store %d in type %v", width, t)
   155  		return 0
   156  	}
   157  }
   158  
   159  // largestMove returns the largest move instruction possible and its size,
   160  // given the alignment of the total size of the move.
   161  //
   162  // e.g., a 16-byte move may use MOV, but an 11-byte move must use MOVB.
   163  //
   164  // Note that the moves may not be on naturally aligned addresses depending on
   165  // the source and destination.
   166  //
   167  // This matches the calculation in ssa.moveSize.
   168  func largestMove(alignment int64) (obj.As, int64) {
   169  	switch {
   170  	case alignment%8 == 0:
   171  		return riscv.AMOV, 8
   172  	case alignment%4 == 0:
   173  		return riscv.AMOVW, 4
   174  	case alignment%2 == 0:
   175  		return riscv.AMOVH, 2
   176  	default:
   177  		return riscv.AMOVB, 1
   178  	}
   179  }
   180  
   181  // markMoves marks any MOVXconst ops that need to avoid clobbering flags.
   182  // RISC-V has no flags, so this is a no-op.
   183  func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {}
   184  
   185  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
   186  	s.SetPos(v.Pos)
   187  
   188  	switch v.Op {
   189  	case ssa.OpInitMem:
   190  		// memory arg needs no code
   191  	case ssa.OpArg:
   192  		// input args need no code
   193  	case ssa.OpPhi:
   194  		ssagen.CheckLoweredPhi(v)
   195  	case ssa.OpCopy, ssa.OpRISCV64MOVconvert, ssa.OpRISCV64MOVDreg:
   196  		if v.Type.IsMemory() {
   197  			return
   198  		}
   199  		rs := v.Args[0].Reg()
   200  		rd := v.Reg()
   201  		if rs == rd {
   202  			return
   203  		}
   204  		as := riscv.AMOV
   205  		if v.Type.IsFloat() {
   206  			as = riscv.AMOVD
   207  		}
   208  		p := s.Prog(as)
   209  		p.From.Type = obj.TYPE_REG
   210  		p.From.Reg = rs
   211  		p.To.Type = obj.TYPE_REG
   212  		p.To.Reg = rd
   213  	case ssa.OpRISCV64MOVDnop:
   214  		// nothing to do
   215  	case ssa.OpLoadReg:
   216  		if v.Type.IsFlags() {
   217  			v.Fatalf("load flags not implemented: %v", v.LongString())
   218  			return
   219  		}
   220  		p := s.Prog(loadByType(v.Type))
   221  		ssagen.AddrAuto(&p.From, v.Args[0])
   222  		p.To.Type = obj.TYPE_REG
   223  		p.To.Reg = v.Reg()
   224  	case ssa.OpStoreReg:
   225  		if v.Type.IsFlags() {
   226  			v.Fatalf("store flags not implemented: %v", v.LongString())
   227  			return
   228  		}
   229  		p := s.Prog(storeByType(v.Type))
   230  		p.From.Type = obj.TYPE_REG
   231  		p.From.Reg = v.Args[0].Reg()
   232  		ssagen.AddrAuto(&p.To, v)
   233  	case ssa.OpSP, ssa.OpSB, ssa.OpGetG:
   234  		// nothing to do
   235  	case ssa.OpRISCV64MOVBreg, ssa.OpRISCV64MOVHreg, ssa.OpRISCV64MOVWreg,
   236  		ssa.OpRISCV64MOVBUreg, ssa.OpRISCV64MOVHUreg, ssa.OpRISCV64MOVWUreg:
   237  		a := v.Args[0]
   238  		for a.Op == ssa.OpCopy || a.Op == ssa.OpRISCV64MOVDreg {
   239  			a = a.Args[0]
   240  		}
   241  		as := v.Op.Asm()
   242  		rs := v.Args[0].Reg()
   243  		rd := v.Reg()
   244  		if a.Op == ssa.OpLoadReg {
   245  			t := a.Type
   246  			switch {
   247  			case v.Op == ssa.OpRISCV64MOVBreg && t.Size() == 1 && t.IsSigned(),
   248  				v.Op == ssa.OpRISCV64MOVHreg && t.Size() == 2 && t.IsSigned(),
   249  				v.Op == ssa.OpRISCV64MOVWreg && t.Size() == 4 && t.IsSigned(),
   250  				v.Op == ssa.OpRISCV64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   251  				v.Op == ssa.OpRISCV64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   252  				v.Op == ssa.OpRISCV64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   253  				// arg is a proper-typed load and already sign/zero-extended
   254  				if rs == rd {
   255  					return
   256  				}
   257  				as = riscv.AMOV
   258  			default:
   259  			}
   260  		}
   261  		p := s.Prog(as)
   262  		p.From.Type = obj.TYPE_REG
   263  		p.From.Reg = rs
   264  		p.To.Type = obj.TYPE_REG
   265  		p.To.Reg = rd
   266  	case ssa.OpRISCV64ADD, ssa.OpRISCV64SUB, ssa.OpRISCV64SUBW, ssa.OpRISCV64XOR, ssa.OpRISCV64OR, ssa.OpRISCV64AND,
   267  		ssa.OpRISCV64SLL, ssa.OpRISCV64SRA, ssa.OpRISCV64SRL,
   268  		ssa.OpRISCV64SLT, ssa.OpRISCV64SLTU, ssa.OpRISCV64MUL, ssa.OpRISCV64MULW, ssa.OpRISCV64MULH,
   269  		ssa.OpRISCV64MULHU, ssa.OpRISCV64DIV, ssa.OpRISCV64DIVU, ssa.OpRISCV64DIVW,
   270  		ssa.OpRISCV64DIVUW, ssa.OpRISCV64REM, ssa.OpRISCV64REMU, ssa.OpRISCV64REMW,
   271  		ssa.OpRISCV64REMUW,
   272  		ssa.OpRISCV64FADDS, ssa.OpRISCV64FSUBS, ssa.OpRISCV64FMULS, ssa.OpRISCV64FDIVS,
   273  		ssa.OpRISCV64FEQS, ssa.OpRISCV64FNES, ssa.OpRISCV64FLTS, ssa.OpRISCV64FLES,
   274  		ssa.OpRISCV64FADDD, ssa.OpRISCV64FSUBD, ssa.OpRISCV64FMULD, ssa.OpRISCV64FDIVD,
   275  		ssa.OpRISCV64FEQD, ssa.OpRISCV64FNED, ssa.OpRISCV64FLTD, ssa.OpRISCV64FLED,
   276  		ssa.OpRISCV64FSGNJD:
   277  		r := v.Reg()
   278  		r1 := v.Args[0].Reg()
   279  		r2 := v.Args[1].Reg()
   280  		p := s.Prog(v.Op.Asm())
   281  		p.From.Type = obj.TYPE_REG
   282  		p.From.Reg = r2
   283  		p.Reg = r1
   284  		p.To.Type = obj.TYPE_REG
   285  		p.To.Reg = r
   286  	case ssa.OpRISCV64LoweredMuluhilo:
   287  		r0 := v.Args[0].Reg()
   288  		r1 := v.Args[1].Reg()
   289  		p := s.Prog(riscv.AMULHU)
   290  		p.From.Type = obj.TYPE_REG
   291  		p.From.Reg = r1
   292  		p.Reg = r0
   293  		p.To.Type = obj.TYPE_REG
   294  		p.To.Reg = v.Reg0()
   295  		p1 := s.Prog(riscv.AMUL)
   296  		p1.From.Type = obj.TYPE_REG
   297  		p1.From.Reg = r1
   298  		p1.Reg = r0
   299  		p1.To.Type = obj.TYPE_REG
   300  		p1.To.Reg = v.Reg1()
   301  	case ssa.OpRISCV64LoweredMuluover:
   302  		r0 := v.Args[0].Reg()
   303  		r1 := v.Args[1].Reg()
   304  		p := s.Prog(riscv.AMULHU)
   305  		p.From.Type = obj.TYPE_REG
   306  		p.From.Reg = r1
   307  		p.Reg = r0
   308  		p.To.Type = obj.TYPE_REG
   309  		p.To.Reg = v.Reg1()
   310  		p1 := s.Prog(riscv.AMUL)
   311  		p1.From.Type = obj.TYPE_REG
   312  		p1.From.Reg = r1
   313  		p1.Reg = r0
   314  		p1.To.Type = obj.TYPE_REG
   315  		p1.To.Reg = v.Reg0()
   316  		p2 := s.Prog(riscv.ASNEZ)
   317  		p2.From.Type = obj.TYPE_REG
   318  		p2.From.Reg = v.Reg1()
   319  		p2.To.Type = obj.TYPE_REG
   320  		p2.To.Reg = v.Reg1()
   321  	case ssa.OpRISCV64FMADDD, ssa.OpRISCV64FMSUBD, ssa.OpRISCV64FNMADDD, ssa.OpRISCV64FNMSUBD:
   322  		r := v.Reg()
   323  		r1 := v.Args[0].Reg()
   324  		r2 := v.Args[1].Reg()
   325  		r3 := v.Args[2].Reg()
   326  		p := s.Prog(v.Op.Asm())
   327  		p.From.Type = obj.TYPE_REG
   328  		p.From.Reg = r2
   329  		p.Reg = r1
   330  		p.SetRestArgs([]obj.Addr{{Type: obj.TYPE_REG, Reg: r3}})
   331  		p.To.Type = obj.TYPE_REG
   332  		p.To.Reg = r
   333  	case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FNEGS, ssa.OpRISCV64FABSD, ssa.OpRISCV64FSQRTD, ssa.OpRISCV64FNEGD,
   334  		ssa.OpRISCV64FMVSX, ssa.OpRISCV64FMVDX,
   335  		ssa.OpRISCV64FCVTSW, ssa.OpRISCV64FCVTSL, ssa.OpRISCV64FCVTWS, ssa.OpRISCV64FCVTLS,
   336  		ssa.OpRISCV64FCVTDW, ssa.OpRISCV64FCVTDL, ssa.OpRISCV64FCVTWD, ssa.OpRISCV64FCVTLD, ssa.OpRISCV64FCVTDS, ssa.OpRISCV64FCVTSD,
   337  		ssa.OpRISCV64NOT, ssa.OpRISCV64NEG, ssa.OpRISCV64NEGW:
   338  		p := s.Prog(v.Op.Asm())
   339  		p.From.Type = obj.TYPE_REG
   340  		p.From.Reg = v.Args[0].Reg()
   341  		p.To.Type = obj.TYPE_REG
   342  		p.To.Reg = v.Reg()
   343  	case ssa.OpRISCV64ADDI, ssa.OpRISCV64ADDIW, ssa.OpRISCV64XORI, ssa.OpRISCV64ORI, ssa.OpRISCV64ANDI,
   344  		ssa.OpRISCV64SLLI, ssa.OpRISCV64SRAI, ssa.OpRISCV64SRLI, ssa.OpRISCV64SLTI,
   345  		ssa.OpRISCV64SLTIU:
   346  		p := s.Prog(v.Op.Asm())
   347  		p.From.Type = obj.TYPE_CONST
   348  		p.From.Offset = v.AuxInt
   349  		p.Reg = v.Args[0].Reg()
   350  		p.To.Type = obj.TYPE_REG
   351  		p.To.Reg = v.Reg()
   352  	case ssa.OpRISCV64MOVDconst:
   353  		p := s.Prog(v.Op.Asm())
   354  		p.From.Type = obj.TYPE_CONST
   355  		p.From.Offset = v.AuxInt
   356  		p.To.Type = obj.TYPE_REG
   357  		p.To.Reg = v.Reg()
   358  	case ssa.OpRISCV64MOVaddr:
   359  		p := s.Prog(v.Op.Asm())
   360  		p.From.Type = obj.TYPE_ADDR
   361  		p.To.Type = obj.TYPE_REG
   362  		p.To.Reg = v.Reg()
   363  
   364  		var wantreg string
   365  		// MOVW $sym+off(base), R
   366  		switch v.Aux.(type) {
   367  		default:
   368  			v.Fatalf("aux is of unknown type %T", v.Aux)
   369  		case *obj.LSym:
   370  			wantreg = "SB"
   371  			ssagen.AddAux(&p.From, v)
   372  		case *ir.Name:
   373  			wantreg = "SP"
   374  			ssagen.AddAux(&p.From, v)
   375  		case nil:
   376  			// No sym, just MOVW $off(SP), R
   377  			wantreg = "SP"
   378  			p.From.Reg = riscv.REG_SP
   379  			p.From.Offset = v.AuxInt
   380  		}
   381  		if reg := v.Args[0].RegName(); reg != wantreg {
   382  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   383  		}
   384  	case ssa.OpRISCV64MOVBload, ssa.OpRISCV64MOVHload, ssa.OpRISCV64MOVWload, ssa.OpRISCV64MOVDload,
   385  		ssa.OpRISCV64MOVBUload, ssa.OpRISCV64MOVHUload, ssa.OpRISCV64MOVWUload,
   386  		ssa.OpRISCV64FMOVWload, ssa.OpRISCV64FMOVDload:
   387  		p := s.Prog(v.Op.Asm())
   388  		p.From.Type = obj.TYPE_MEM
   389  		p.From.Reg = v.Args[0].Reg()
   390  		ssagen.AddAux(&p.From, v)
   391  		p.To.Type = obj.TYPE_REG
   392  		p.To.Reg = v.Reg()
   393  	case ssa.OpRISCV64MOVBstore, ssa.OpRISCV64MOVHstore, ssa.OpRISCV64MOVWstore, ssa.OpRISCV64MOVDstore,
   394  		ssa.OpRISCV64FMOVWstore, ssa.OpRISCV64FMOVDstore:
   395  		p := s.Prog(v.Op.Asm())
   396  		p.From.Type = obj.TYPE_REG
   397  		p.From.Reg = v.Args[1].Reg()
   398  		p.To.Type = obj.TYPE_MEM
   399  		p.To.Reg = v.Args[0].Reg()
   400  		ssagen.AddAux(&p.To, v)
   401  	case ssa.OpRISCV64MOVBstorezero, ssa.OpRISCV64MOVHstorezero, ssa.OpRISCV64MOVWstorezero, ssa.OpRISCV64MOVDstorezero:
   402  		p := s.Prog(v.Op.Asm())
   403  		p.From.Type = obj.TYPE_REG
   404  		p.From.Reg = riscv.REG_ZERO
   405  		p.To.Type = obj.TYPE_MEM
   406  		p.To.Reg = v.Args[0].Reg()
   407  		ssagen.AddAux(&p.To, v)
   408  	case ssa.OpRISCV64SEQZ, ssa.OpRISCV64SNEZ:
   409  		p := s.Prog(v.Op.Asm())
   410  		p.From.Type = obj.TYPE_REG
   411  		p.From.Reg = v.Args[0].Reg()
   412  		p.To.Type = obj.TYPE_REG
   413  		p.To.Reg = v.Reg()
   414  	case ssa.OpRISCV64CALLstatic, ssa.OpRISCV64CALLclosure, ssa.OpRISCV64CALLinter:
   415  		s.Call(v)
   416  	case ssa.OpRISCV64CALLtail:
   417  		s.TailCall(v)
   418  	case ssa.OpRISCV64LoweredWB:
   419  		p := s.Prog(obj.ACALL)
   420  		p.To.Type = obj.TYPE_MEM
   421  		p.To.Name = obj.NAME_EXTERN
   422  		p.To.Sym = v.Aux.(*obj.LSym)
   423  	case ssa.OpRISCV64LoweredPanicBoundsA, ssa.OpRISCV64LoweredPanicBoundsB, ssa.OpRISCV64LoweredPanicBoundsC:
   424  		p := s.Prog(obj.ACALL)
   425  		p.To.Type = obj.TYPE_MEM
   426  		p.To.Name = obj.NAME_EXTERN
   427  		p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
   428  		s.UseArgs(16) // space used in callee args area by assembly stubs
   429  
   430  	case ssa.OpRISCV64LoweredAtomicLoad8:
   431  		s.Prog(riscv.AFENCE)
   432  		p := s.Prog(riscv.AMOVBU)
   433  		p.From.Type = obj.TYPE_MEM
   434  		p.From.Reg = v.Args[0].Reg()
   435  		p.To.Type = obj.TYPE_REG
   436  		p.To.Reg = v.Reg0()
   437  		s.Prog(riscv.AFENCE)
   438  
   439  	case ssa.OpRISCV64LoweredAtomicLoad32, ssa.OpRISCV64LoweredAtomicLoad64:
   440  		as := riscv.ALRW
   441  		if v.Op == ssa.OpRISCV64LoweredAtomicLoad64 {
   442  			as = riscv.ALRD
   443  		}
   444  		p := s.Prog(as)
   445  		p.From.Type = obj.TYPE_MEM
   446  		p.From.Reg = v.Args[0].Reg()
   447  		p.To.Type = obj.TYPE_REG
   448  		p.To.Reg = v.Reg0()
   449  
   450  	case ssa.OpRISCV64LoweredAtomicStore8:
   451  		s.Prog(riscv.AFENCE)
   452  		p := s.Prog(riscv.AMOVB)
   453  		p.From.Type = obj.TYPE_REG
   454  		p.From.Reg = v.Args[1].Reg()
   455  		p.To.Type = obj.TYPE_MEM
   456  		p.To.Reg = v.Args[0].Reg()
   457  		s.Prog(riscv.AFENCE)
   458  
   459  	case ssa.OpRISCV64LoweredAtomicStore32, ssa.OpRISCV64LoweredAtomicStore64:
   460  		as := riscv.AAMOSWAPW
   461  		if v.Op == ssa.OpRISCV64LoweredAtomicStore64 {
   462  			as = riscv.AAMOSWAPD
   463  		}
   464  		p := s.Prog(as)
   465  		p.From.Type = obj.TYPE_REG
   466  		p.From.Reg = v.Args[1].Reg()
   467  		p.To.Type = obj.TYPE_MEM
   468  		p.To.Reg = v.Args[0].Reg()
   469  		p.RegTo2 = riscv.REG_ZERO
   470  
   471  	case ssa.OpRISCV64LoweredAtomicAdd32, ssa.OpRISCV64LoweredAtomicAdd64:
   472  		as := riscv.AAMOADDW
   473  		if v.Op == ssa.OpRISCV64LoweredAtomicAdd64 {
   474  			as = riscv.AAMOADDD
   475  		}
   476  		p := s.Prog(as)
   477  		p.From.Type = obj.TYPE_REG
   478  		p.From.Reg = v.Args[1].Reg()
   479  		p.To.Type = obj.TYPE_MEM
   480  		p.To.Reg = v.Args[0].Reg()
   481  		p.RegTo2 = riscv.REG_TMP
   482  
   483  		p2 := s.Prog(riscv.AADD)
   484  		p2.From.Type = obj.TYPE_REG
   485  		p2.From.Reg = riscv.REG_TMP
   486  		p2.Reg = v.Args[1].Reg()
   487  		p2.To.Type = obj.TYPE_REG
   488  		p2.To.Reg = v.Reg0()
   489  
   490  	case ssa.OpRISCV64LoweredAtomicExchange32, ssa.OpRISCV64LoweredAtomicExchange64:
   491  		as := riscv.AAMOSWAPW
   492  		if v.Op == ssa.OpRISCV64LoweredAtomicExchange64 {
   493  			as = riscv.AAMOSWAPD
   494  		}
   495  		p := s.Prog(as)
   496  		p.From.Type = obj.TYPE_REG
   497  		p.From.Reg = v.Args[1].Reg()
   498  		p.To.Type = obj.TYPE_MEM
   499  		p.To.Reg = v.Args[0].Reg()
   500  		p.RegTo2 = v.Reg0()
   501  
   502  	case ssa.OpRISCV64LoweredAtomicCas32, ssa.OpRISCV64LoweredAtomicCas64:
   503  		// MOV  ZERO, Rout
   504  		// LR	(Rarg0), Rtmp
   505  		// BNE	Rtmp, Rarg1, 3(PC)
   506  		// SC	Rarg2, (Rarg0), Rtmp
   507  		// BNE	Rtmp, ZERO, -3(PC)
   508  		// MOV	$1, Rout
   509  
   510  		lr := riscv.ALRW
   511  		sc := riscv.ASCW
   512  		if v.Op == ssa.OpRISCV64LoweredAtomicCas64 {
   513  			lr = riscv.ALRD
   514  			sc = riscv.ASCD
   515  		}
   516  
   517  		r0 := v.Args[0].Reg()
   518  		r1 := v.Args[1].Reg()
   519  		r2 := v.Args[2].Reg()
   520  		out := v.Reg0()
   521  
   522  		p := s.Prog(riscv.AMOV)
   523  		p.From.Type = obj.TYPE_REG
   524  		p.From.Reg = riscv.REG_ZERO
   525  		p.To.Type = obj.TYPE_REG
   526  		p.To.Reg = out
   527  
   528  		p1 := s.Prog(lr)
   529  		p1.From.Type = obj.TYPE_MEM
   530  		p1.From.Reg = r0
   531  		p1.To.Type = obj.TYPE_REG
   532  		p1.To.Reg = riscv.REG_TMP
   533  
   534  		p2 := s.Prog(riscv.ABNE)
   535  		p2.From.Type = obj.TYPE_REG
   536  		p2.From.Reg = r1
   537  		p2.Reg = riscv.REG_TMP
   538  		p2.To.Type = obj.TYPE_BRANCH
   539  
   540  		p3 := s.Prog(sc)
   541  		p3.From.Type = obj.TYPE_REG
   542  		p3.From.Reg = r2
   543  		p3.To.Type = obj.TYPE_MEM
   544  		p3.To.Reg = r0
   545  		p3.RegTo2 = riscv.REG_TMP
   546  
   547  		p4 := s.Prog(riscv.ABNE)
   548  		p4.From.Type = obj.TYPE_REG
   549  		p4.From.Reg = riscv.REG_TMP
   550  		p4.Reg = riscv.REG_ZERO
   551  		p4.To.Type = obj.TYPE_BRANCH
   552  		p4.To.SetTarget(p1)
   553  
   554  		p5 := s.Prog(riscv.AMOV)
   555  		p5.From.Type = obj.TYPE_CONST
   556  		p5.From.Offset = 1
   557  		p5.To.Type = obj.TYPE_REG
   558  		p5.To.Reg = out
   559  
   560  		p6 := s.Prog(obj.ANOP)
   561  		p2.To.SetTarget(p6)
   562  
   563  	case ssa.OpRISCV64LoweredAtomicAnd32, ssa.OpRISCV64LoweredAtomicOr32:
   564  		p := s.Prog(v.Op.Asm())
   565  		p.From.Type = obj.TYPE_REG
   566  		p.From.Reg = v.Args[1].Reg()
   567  		p.To.Type = obj.TYPE_MEM
   568  		p.To.Reg = v.Args[0].Reg()
   569  		p.RegTo2 = riscv.REG_ZERO
   570  
   571  	case ssa.OpRISCV64LoweredZero:
   572  		mov, sz := largestMove(v.AuxInt)
   573  
   574  		//	mov	ZERO, (Rarg0)
   575  		//	ADD	$sz, Rarg0
   576  		//	BGEU	Rarg1, Rarg0, -2(PC)
   577  
   578  		p := s.Prog(mov)
   579  		p.From.Type = obj.TYPE_REG
   580  		p.From.Reg = riscv.REG_ZERO
   581  		p.To.Type = obj.TYPE_MEM
   582  		p.To.Reg = v.Args[0].Reg()
   583  
   584  		p2 := s.Prog(riscv.AADD)
   585  		p2.From.Type = obj.TYPE_CONST
   586  		p2.From.Offset = sz
   587  		p2.To.Type = obj.TYPE_REG
   588  		p2.To.Reg = v.Args[0].Reg()
   589  
   590  		p3 := s.Prog(riscv.ABGEU)
   591  		p3.To.Type = obj.TYPE_BRANCH
   592  		p3.Reg = v.Args[0].Reg()
   593  		p3.From.Type = obj.TYPE_REG
   594  		p3.From.Reg = v.Args[1].Reg()
   595  		p3.To.SetTarget(p)
   596  
   597  	case ssa.OpRISCV64LoweredMove:
   598  		mov, sz := largestMove(v.AuxInt)
   599  
   600  		//	mov	(Rarg1), T2
   601  		//	mov	T2, (Rarg0)
   602  		//	ADD	$sz, Rarg0
   603  		//	ADD	$sz, Rarg1
   604  		//	BGEU	Rarg2, Rarg0, -4(PC)
   605  
   606  		p := s.Prog(mov)
   607  		p.From.Type = obj.TYPE_MEM
   608  		p.From.Reg = v.Args[1].Reg()
   609  		p.To.Type = obj.TYPE_REG
   610  		p.To.Reg = riscv.REG_T2
   611  
   612  		p2 := s.Prog(mov)
   613  		p2.From.Type = obj.TYPE_REG
   614  		p2.From.Reg = riscv.REG_T2
   615  		p2.To.Type = obj.TYPE_MEM
   616  		p2.To.Reg = v.Args[0].Reg()
   617  
   618  		p3 := s.Prog(riscv.AADD)
   619  		p3.From.Type = obj.TYPE_CONST
   620  		p3.From.Offset = sz
   621  		p3.To.Type = obj.TYPE_REG
   622  		p3.To.Reg = v.Args[0].Reg()
   623  
   624  		p4 := s.Prog(riscv.AADD)
   625  		p4.From.Type = obj.TYPE_CONST
   626  		p4.From.Offset = sz
   627  		p4.To.Type = obj.TYPE_REG
   628  		p4.To.Reg = v.Args[1].Reg()
   629  
   630  		p5 := s.Prog(riscv.ABGEU)
   631  		p5.To.Type = obj.TYPE_BRANCH
   632  		p5.Reg = v.Args[1].Reg()
   633  		p5.From.Type = obj.TYPE_REG
   634  		p5.From.Reg = v.Args[2].Reg()
   635  		p5.To.SetTarget(p)
   636  
   637  	case ssa.OpRISCV64LoweredNilCheck:
   638  		// Issue a load which will fault if arg is nil.
   639  		// TODO: optimizations. See arm and amd64 LoweredNilCheck.
   640  		p := s.Prog(riscv.AMOVB)
   641  		p.From.Type = obj.TYPE_MEM
   642  		p.From.Reg = v.Args[0].Reg()
   643  		ssagen.AddAux(&p.From, v)
   644  		p.To.Type = obj.TYPE_REG
   645  		p.To.Reg = riscv.REG_ZERO
   646  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers
   647  			base.WarnfAt(v.Pos, "generated nil check")
   648  		}
   649  
   650  	case ssa.OpRISCV64LoweredGetClosurePtr:
   651  		// Closure pointer is S4 (riscv.REG_CTXT).
   652  		ssagen.CheckLoweredGetClosurePtr(v)
   653  
   654  	case ssa.OpRISCV64LoweredGetCallerSP:
   655  		// caller's SP is FixedFrameSize below the address of the first arg
   656  		p := s.Prog(riscv.AMOV)
   657  		p.From.Type = obj.TYPE_ADDR
   658  		p.From.Offset = -base.Ctxt.FixedFrameSize()
   659  		p.From.Name = obj.NAME_PARAM
   660  		p.To.Type = obj.TYPE_REG
   661  		p.To.Reg = v.Reg()
   662  
   663  	case ssa.OpRISCV64LoweredGetCallerPC:
   664  		p := s.Prog(obj.AGETCALLERPC)
   665  		p.To.Type = obj.TYPE_REG
   666  		p.To.Reg = v.Reg()
   667  
   668  	case ssa.OpRISCV64DUFFZERO:
   669  		p := s.Prog(obj.ADUFFZERO)
   670  		p.To.Type = obj.TYPE_MEM
   671  		p.To.Name = obj.NAME_EXTERN
   672  		p.To.Sym = ir.Syms.Duffzero
   673  		p.To.Offset = v.AuxInt
   674  
   675  	case ssa.OpRISCV64DUFFCOPY:
   676  		p := s.Prog(obj.ADUFFCOPY)
   677  		p.To.Type = obj.TYPE_MEM
   678  		p.To.Name = obj.NAME_EXTERN
   679  		p.To.Sym = ir.Syms.Duffcopy
   680  		p.To.Offset = v.AuxInt
   681  
   682  	case ssa.OpClobber, ssa.OpClobberReg:
   683  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   684  
   685  	default:
   686  		v.Fatalf("Unhandled op %v", v.Op)
   687  	}
   688  }
   689  
   690  var blockBranch = [...]obj.As{
   691  	ssa.BlockRISCV64BEQ:  riscv.ABEQ,
   692  	ssa.BlockRISCV64BEQZ: riscv.ABEQZ,
   693  	ssa.BlockRISCV64BGE:  riscv.ABGE,
   694  	ssa.BlockRISCV64BGEU: riscv.ABGEU,
   695  	ssa.BlockRISCV64BGEZ: riscv.ABGEZ,
   696  	ssa.BlockRISCV64BGTZ: riscv.ABGTZ,
   697  	ssa.BlockRISCV64BLEZ: riscv.ABLEZ,
   698  	ssa.BlockRISCV64BLT:  riscv.ABLT,
   699  	ssa.BlockRISCV64BLTU: riscv.ABLTU,
   700  	ssa.BlockRISCV64BLTZ: riscv.ABLTZ,
   701  	ssa.BlockRISCV64BNE:  riscv.ABNE,
   702  	ssa.BlockRISCV64BNEZ: riscv.ABNEZ,
   703  }
   704  
   705  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   706  	s.SetPos(b.Pos)
   707  
   708  	switch b.Kind {
   709  	case ssa.BlockDefer:
   710  		// defer returns in A0:
   711  		// 0 if we should continue executing
   712  		// 1 if we should jump to deferreturn call
   713  		p := s.Prog(riscv.ABNE)
   714  		p.To.Type = obj.TYPE_BRANCH
   715  		p.From.Type = obj.TYPE_REG
   716  		p.From.Reg = riscv.REG_ZERO
   717  		p.Reg = riscv.REG_A0
   718  		s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
   719  		if b.Succs[0].Block() != next {
   720  			p := s.Prog(obj.AJMP)
   721  			p.To.Type = obj.TYPE_BRANCH
   722  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   723  		}
   724  	case ssa.BlockPlain:
   725  		if b.Succs[0].Block() != next {
   726  			p := s.Prog(obj.AJMP)
   727  			p.To.Type = obj.TYPE_BRANCH
   728  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   729  		}
   730  	case ssa.BlockExit, ssa.BlockRetJmp:
   731  	case ssa.BlockRet:
   732  		s.Prog(obj.ARET)
   733  	case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BNEZ,
   734  		ssa.BlockRISCV64BLT, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BGEZ,
   735  		ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
   736  
   737  		as := blockBranch[b.Kind]
   738  		invAs := riscv.InvertBranch(as)
   739  
   740  		var p *obj.Prog
   741  		switch next {
   742  		case b.Succs[0].Block():
   743  			p = s.Br(invAs, b.Succs[1].Block())
   744  		case b.Succs[1].Block():
   745  			p = s.Br(as, b.Succs[0].Block())
   746  		default:
   747  			if b.Likely != ssa.BranchUnlikely {
   748  				p = s.Br(as, b.Succs[0].Block())
   749  				s.Br(obj.AJMP, b.Succs[1].Block())
   750  			} else {
   751  				p = s.Br(invAs, b.Succs[1].Block())
   752  				s.Br(obj.AJMP, b.Succs[0].Block())
   753  			}
   754  		}
   755  
   756  		p.From.Type = obj.TYPE_REG
   757  		switch b.Kind {
   758  		case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BLT, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
   759  			if b.NumControls() != 2 {
   760  				b.Fatalf("Unexpected number of controls (%d != 2): %s", b.NumControls(), b.LongString())
   761  			}
   762  			p.From.Reg = b.Controls[0].Reg()
   763  			p.Reg = b.Controls[1].Reg()
   764  
   765  		case ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNEZ, ssa.BlockRISCV64BGEZ, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ:
   766  			if b.NumControls() != 1 {
   767  				b.Fatalf("Unexpected number of controls (%d != 1): %s", b.NumControls(), b.LongString())
   768  			}
   769  			p.From.Reg = b.Controls[0].Reg()
   770  		}
   771  
   772  	default:
   773  		b.Fatalf("Unhandled block: %s", b.LongString())
   774  	}
   775  }
   776  

View as plain text