Source file src/cmd/compile/internal/mips64/ssa.go

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package mips64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/base"
    11  	"cmd/compile/internal/ir"
    12  	"cmd/compile/internal/logopt"
    13  	"cmd/compile/internal/ssa"
    14  	"cmd/compile/internal/ssagen"
    15  	"cmd/compile/internal/types"
    16  	"cmd/internal/obj"
    17  	"cmd/internal/obj/mips"
    18  )
    19  
    20  // isFPreg reports whether r is an FP register
    21  func isFPreg(r int16) bool {
    22  	return mips.REG_F0 <= r && r <= mips.REG_F31
    23  }
    24  
    25  // isHILO reports whether r is HI or LO register
    26  func isHILO(r int16) bool {
    27  	return r == mips.REG_HI || r == mips.REG_LO
    28  }
    29  
    30  // loadByType returns the load instruction of the given type.
    31  func loadByType(t *types.Type, r int16) obj.As {
    32  	if isFPreg(r) {
    33  		if t.Size() == 4 { // float32 or int32
    34  			return mips.AMOVF
    35  		} else { // float64 or int64
    36  			return mips.AMOVD
    37  		}
    38  	} else {
    39  		switch t.Size() {
    40  		case 1:
    41  			if t.IsSigned() {
    42  				return mips.AMOVB
    43  			} else {
    44  				return mips.AMOVBU
    45  			}
    46  		case 2:
    47  			if t.IsSigned() {
    48  				return mips.AMOVH
    49  			} else {
    50  				return mips.AMOVHU
    51  			}
    52  		case 4:
    53  			if t.IsSigned() {
    54  				return mips.AMOVW
    55  			} else {
    56  				return mips.AMOVWU
    57  			}
    58  		case 8:
    59  			return mips.AMOVV
    60  		}
    61  	}
    62  	panic("bad load type")
    63  }
    64  
    65  // storeByType returns the store instruction of the given type.
    66  func storeByType(t *types.Type, r int16) obj.As {
    67  	if isFPreg(r) {
    68  		if t.Size() == 4 { // float32 or int32
    69  			return mips.AMOVF
    70  		} else { // float64 or int64
    71  			return mips.AMOVD
    72  		}
    73  	} else {
    74  		switch t.Size() {
    75  		case 1:
    76  			return mips.AMOVB
    77  		case 2:
    78  			return mips.AMOVH
    79  		case 4:
    80  			return mips.AMOVW
    81  		case 8:
    82  			return mips.AMOVV
    83  		}
    84  	}
    85  	panic("bad store type")
    86  }
    87  
    88  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
    89  	switch v.Op {
    90  	case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
    91  		if v.Type.IsMemory() {
    92  			return
    93  		}
    94  		x := v.Args[0].Reg()
    95  		y := v.Reg()
    96  		if x == y {
    97  			return
    98  		}
    99  		as := mips.AMOVV
   100  		if isFPreg(x) && isFPreg(y) {
   101  			as = mips.AMOVD
   102  		}
   103  		p := s.Prog(as)
   104  		p.From.Type = obj.TYPE_REG
   105  		p.From.Reg = x
   106  		p.To.Type = obj.TYPE_REG
   107  		p.To.Reg = y
   108  		if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
   109  			// cannot move between special registers, use TMP as intermediate
   110  			p.To.Reg = mips.REGTMP
   111  			p = s.Prog(mips.AMOVV)
   112  			p.From.Type = obj.TYPE_REG
   113  			p.From.Reg = mips.REGTMP
   114  			p.To.Type = obj.TYPE_REG
   115  			p.To.Reg = y
   116  		}
   117  	case ssa.OpMIPS64MOVVnop:
   118  		// nothing to do
   119  	case ssa.OpLoadReg:
   120  		if v.Type.IsFlags() {
   121  			v.Fatalf("load flags not implemented: %v", v.LongString())
   122  			return
   123  		}
   124  		r := v.Reg()
   125  		p := s.Prog(loadByType(v.Type, r))
   126  		ssagen.AddrAuto(&p.From, v.Args[0])
   127  		p.To.Type = obj.TYPE_REG
   128  		p.To.Reg = r
   129  		if isHILO(r) {
   130  			// cannot directly load, load to TMP and move
   131  			p.To.Reg = mips.REGTMP
   132  			p = s.Prog(mips.AMOVV)
   133  			p.From.Type = obj.TYPE_REG
   134  			p.From.Reg = mips.REGTMP
   135  			p.To.Type = obj.TYPE_REG
   136  			p.To.Reg = r
   137  		}
   138  	case ssa.OpStoreReg:
   139  		if v.Type.IsFlags() {
   140  			v.Fatalf("store flags not implemented: %v", v.LongString())
   141  			return
   142  		}
   143  		r := v.Args[0].Reg()
   144  		if isHILO(r) {
   145  			// cannot directly store, move to TMP and store
   146  			p := s.Prog(mips.AMOVV)
   147  			p.From.Type = obj.TYPE_REG
   148  			p.From.Reg = r
   149  			p.To.Type = obj.TYPE_REG
   150  			p.To.Reg = mips.REGTMP
   151  			r = mips.REGTMP
   152  		}
   153  		p := s.Prog(storeByType(v.Type, r))
   154  		p.From.Type = obj.TYPE_REG
   155  		p.From.Reg = r
   156  		ssagen.AddrAuto(&p.To, v)
   157  	case ssa.OpMIPS64ADDV,
   158  		ssa.OpMIPS64SUBV,
   159  		ssa.OpMIPS64AND,
   160  		ssa.OpMIPS64OR,
   161  		ssa.OpMIPS64XOR,
   162  		ssa.OpMIPS64NOR,
   163  		ssa.OpMIPS64SLLV,
   164  		ssa.OpMIPS64SRLV,
   165  		ssa.OpMIPS64SRAV,
   166  		ssa.OpMIPS64ADDF,
   167  		ssa.OpMIPS64ADDD,
   168  		ssa.OpMIPS64SUBF,
   169  		ssa.OpMIPS64SUBD,
   170  		ssa.OpMIPS64MULF,
   171  		ssa.OpMIPS64MULD,
   172  		ssa.OpMIPS64DIVF,
   173  		ssa.OpMIPS64DIVD:
   174  		p := s.Prog(v.Op.Asm())
   175  		p.From.Type = obj.TYPE_REG
   176  		p.From.Reg = v.Args[1].Reg()
   177  		p.Reg = v.Args[0].Reg()
   178  		p.To.Type = obj.TYPE_REG
   179  		p.To.Reg = v.Reg()
   180  	case ssa.OpMIPS64SGT,
   181  		ssa.OpMIPS64SGTU:
   182  		p := s.Prog(v.Op.Asm())
   183  		p.From.Type = obj.TYPE_REG
   184  		p.From.Reg = v.Args[0].Reg()
   185  		p.Reg = v.Args[1].Reg()
   186  		p.To.Type = obj.TYPE_REG
   187  		p.To.Reg = v.Reg()
   188  	case ssa.OpMIPS64ADDVconst,
   189  		ssa.OpMIPS64SUBVconst,
   190  		ssa.OpMIPS64ANDconst,
   191  		ssa.OpMIPS64ORconst,
   192  		ssa.OpMIPS64XORconst,
   193  		ssa.OpMIPS64NORconst,
   194  		ssa.OpMIPS64SLLVconst,
   195  		ssa.OpMIPS64SRLVconst,
   196  		ssa.OpMIPS64SRAVconst,
   197  		ssa.OpMIPS64SGTconst,
   198  		ssa.OpMIPS64SGTUconst:
   199  		p := s.Prog(v.Op.Asm())
   200  		p.From.Type = obj.TYPE_CONST
   201  		p.From.Offset = v.AuxInt
   202  		p.Reg = v.Args[0].Reg()
   203  		p.To.Type = obj.TYPE_REG
   204  		p.To.Reg = v.Reg()
   205  	case ssa.OpMIPS64MULV,
   206  		ssa.OpMIPS64MULVU,
   207  		ssa.OpMIPS64DIVV,
   208  		ssa.OpMIPS64DIVVU:
   209  		// result in hi,lo
   210  		p := s.Prog(v.Op.Asm())
   211  		p.From.Type = obj.TYPE_REG
   212  		p.From.Reg = v.Args[1].Reg()
   213  		p.Reg = v.Args[0].Reg()
   214  	case ssa.OpMIPS64MOVVconst:
   215  		r := v.Reg()
   216  		p := s.Prog(v.Op.Asm())
   217  		p.From.Type = obj.TYPE_CONST
   218  		p.From.Offset = v.AuxInt
   219  		p.To.Type = obj.TYPE_REG
   220  		p.To.Reg = r
   221  		if isFPreg(r) || isHILO(r) {
   222  			// cannot move into FP or special registers, use TMP as intermediate
   223  			p.To.Reg = mips.REGTMP
   224  			p = s.Prog(mips.AMOVV)
   225  			p.From.Type = obj.TYPE_REG
   226  			p.From.Reg = mips.REGTMP
   227  			p.To.Type = obj.TYPE_REG
   228  			p.To.Reg = r
   229  		}
   230  	case ssa.OpMIPS64MOVFconst,
   231  		ssa.OpMIPS64MOVDconst:
   232  		p := s.Prog(v.Op.Asm())
   233  		p.From.Type = obj.TYPE_FCONST
   234  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   235  		p.To.Type = obj.TYPE_REG
   236  		p.To.Reg = v.Reg()
   237  	case ssa.OpMIPS64CMPEQF,
   238  		ssa.OpMIPS64CMPEQD,
   239  		ssa.OpMIPS64CMPGEF,
   240  		ssa.OpMIPS64CMPGED,
   241  		ssa.OpMIPS64CMPGTF,
   242  		ssa.OpMIPS64CMPGTD:
   243  		p := s.Prog(v.Op.Asm())
   244  		p.From.Type = obj.TYPE_REG
   245  		p.From.Reg = v.Args[0].Reg()
   246  		p.Reg = v.Args[1].Reg()
   247  	case ssa.OpMIPS64MOVVaddr:
   248  		p := s.Prog(mips.AMOVV)
   249  		p.From.Type = obj.TYPE_ADDR
   250  		p.From.Reg = v.Args[0].Reg()
   251  		var wantreg string
   252  		// MOVV $sym+off(base), R
   253  		// the assembler expands it as the following:
   254  		// - base is SP: add constant offset to SP (R29)
   255  		//               when constant is large, tmp register (R23) may be used
   256  		// - base is SB: load external address with relocation
   257  		switch v.Aux.(type) {
   258  		default:
   259  			v.Fatalf("aux is of unknown type %T", v.Aux)
   260  		case *obj.LSym:
   261  			wantreg = "SB"
   262  			ssagen.AddAux(&p.From, v)
   263  		case *ir.Name:
   264  			wantreg = "SP"
   265  			ssagen.AddAux(&p.From, v)
   266  		case nil:
   267  			// No sym, just MOVV $off(SP), R
   268  			wantreg = "SP"
   269  			p.From.Offset = v.AuxInt
   270  		}
   271  		if reg := v.Args[0].RegName(); reg != wantreg {
   272  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   273  		}
   274  		p.To.Type = obj.TYPE_REG
   275  		p.To.Reg = v.Reg()
   276  	case ssa.OpMIPS64MOVBload,
   277  		ssa.OpMIPS64MOVBUload,
   278  		ssa.OpMIPS64MOVHload,
   279  		ssa.OpMIPS64MOVHUload,
   280  		ssa.OpMIPS64MOVWload,
   281  		ssa.OpMIPS64MOVWUload,
   282  		ssa.OpMIPS64MOVVload,
   283  		ssa.OpMIPS64MOVFload,
   284  		ssa.OpMIPS64MOVDload:
   285  		p := s.Prog(v.Op.Asm())
   286  		p.From.Type = obj.TYPE_MEM
   287  		p.From.Reg = v.Args[0].Reg()
   288  		ssagen.AddAux(&p.From, v)
   289  		p.To.Type = obj.TYPE_REG
   290  		p.To.Reg = v.Reg()
   291  	case ssa.OpMIPS64MOVBstore,
   292  		ssa.OpMIPS64MOVHstore,
   293  		ssa.OpMIPS64MOVWstore,
   294  		ssa.OpMIPS64MOVVstore,
   295  		ssa.OpMIPS64MOVFstore,
   296  		ssa.OpMIPS64MOVDstore:
   297  		p := s.Prog(v.Op.Asm())
   298  		p.From.Type = obj.TYPE_REG
   299  		p.From.Reg = v.Args[1].Reg()
   300  		p.To.Type = obj.TYPE_MEM
   301  		p.To.Reg = v.Args[0].Reg()
   302  		ssagen.AddAux(&p.To, v)
   303  	case ssa.OpMIPS64MOVBstorezero,
   304  		ssa.OpMIPS64MOVHstorezero,
   305  		ssa.OpMIPS64MOVWstorezero,
   306  		ssa.OpMIPS64MOVVstorezero:
   307  		p := s.Prog(v.Op.Asm())
   308  		p.From.Type = obj.TYPE_REG
   309  		p.From.Reg = mips.REGZERO
   310  		p.To.Type = obj.TYPE_MEM
   311  		p.To.Reg = v.Args[0].Reg()
   312  		ssagen.AddAux(&p.To, v)
   313  	case ssa.OpMIPS64MOVBreg,
   314  		ssa.OpMIPS64MOVBUreg,
   315  		ssa.OpMIPS64MOVHreg,
   316  		ssa.OpMIPS64MOVHUreg,
   317  		ssa.OpMIPS64MOVWreg,
   318  		ssa.OpMIPS64MOVWUreg:
   319  		a := v.Args[0]
   320  		for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
   321  			a = a.Args[0]
   322  		}
   323  		if a.Op == ssa.OpLoadReg && mips.REG_R0 <= a.Reg() && a.Reg() <= mips.REG_R31 {
   324  			// LoadReg from a narrower type does an extension, except loading
   325  			// to a floating point register. So only eliminate the extension
   326  			// if it is loaded to an integer register.
   327  			t := a.Type
   328  			switch {
   329  			case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
   330  				v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   331  				v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
   332  				v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   333  				v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
   334  				v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   335  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   336  				if v.Reg() == v.Args[0].Reg() {
   337  					return
   338  				}
   339  				p := s.Prog(mips.AMOVV)
   340  				p.From.Type = obj.TYPE_REG
   341  				p.From.Reg = v.Args[0].Reg()
   342  				p.To.Type = obj.TYPE_REG
   343  				p.To.Reg = v.Reg()
   344  				return
   345  			default:
   346  			}
   347  		}
   348  		fallthrough
   349  	case ssa.OpMIPS64MOVWF,
   350  		ssa.OpMIPS64MOVWD,
   351  		ssa.OpMIPS64TRUNCFW,
   352  		ssa.OpMIPS64TRUNCDW,
   353  		ssa.OpMIPS64MOVVF,
   354  		ssa.OpMIPS64MOVVD,
   355  		ssa.OpMIPS64TRUNCFV,
   356  		ssa.OpMIPS64TRUNCDV,
   357  		ssa.OpMIPS64MOVFD,
   358  		ssa.OpMIPS64MOVDF,
   359  		ssa.OpMIPS64NEGF,
   360  		ssa.OpMIPS64NEGD,
   361  		ssa.OpMIPS64SQRTF,
   362  		ssa.OpMIPS64SQRTD:
   363  		p := s.Prog(v.Op.Asm())
   364  		p.From.Type = obj.TYPE_REG
   365  		p.From.Reg = v.Args[0].Reg()
   366  		p.To.Type = obj.TYPE_REG
   367  		p.To.Reg = v.Reg()
   368  	case ssa.OpMIPS64NEGV:
   369  		// SUB from REGZERO
   370  		p := s.Prog(mips.ASUBVU)
   371  		p.From.Type = obj.TYPE_REG
   372  		p.From.Reg = v.Args[0].Reg()
   373  		p.Reg = mips.REGZERO
   374  		p.To.Type = obj.TYPE_REG
   375  		p.To.Reg = v.Reg()
   376  	case ssa.OpMIPS64DUFFZERO:
   377  		// runtime.duffzero expects start address - 8 in R1
   378  		p := s.Prog(mips.ASUBVU)
   379  		p.From.Type = obj.TYPE_CONST
   380  		p.From.Offset = 8
   381  		p.Reg = v.Args[0].Reg()
   382  		p.To.Type = obj.TYPE_REG
   383  		p.To.Reg = mips.REG_R1
   384  		p = s.Prog(obj.ADUFFZERO)
   385  		p.To.Type = obj.TYPE_MEM
   386  		p.To.Name = obj.NAME_EXTERN
   387  		p.To.Sym = ir.Syms.Duffzero
   388  		p.To.Offset = v.AuxInt
   389  	case ssa.OpMIPS64LoweredZero:
   390  		// SUBV	$8, R1
   391  		// MOVV	R0, 8(R1)
   392  		// ADDV	$8, R1
   393  		// BNE	Rarg1, R1, -2(PC)
   394  		// arg1 is the address of the last element to zero
   395  		var sz int64
   396  		var mov obj.As
   397  		switch {
   398  		case v.AuxInt%8 == 0:
   399  			sz = 8
   400  			mov = mips.AMOVV
   401  		case v.AuxInt%4 == 0:
   402  			sz = 4
   403  			mov = mips.AMOVW
   404  		case v.AuxInt%2 == 0:
   405  			sz = 2
   406  			mov = mips.AMOVH
   407  		default:
   408  			sz = 1
   409  			mov = mips.AMOVB
   410  		}
   411  		p := s.Prog(mips.ASUBVU)
   412  		p.From.Type = obj.TYPE_CONST
   413  		p.From.Offset = sz
   414  		p.To.Type = obj.TYPE_REG
   415  		p.To.Reg = mips.REG_R1
   416  		p2 := s.Prog(mov)
   417  		p2.From.Type = obj.TYPE_REG
   418  		p2.From.Reg = mips.REGZERO
   419  		p2.To.Type = obj.TYPE_MEM
   420  		p2.To.Reg = mips.REG_R1
   421  		p2.To.Offset = sz
   422  		p3 := s.Prog(mips.AADDVU)
   423  		p3.From.Type = obj.TYPE_CONST
   424  		p3.From.Offset = sz
   425  		p3.To.Type = obj.TYPE_REG
   426  		p3.To.Reg = mips.REG_R1
   427  		p4 := s.Prog(mips.ABNE)
   428  		p4.From.Type = obj.TYPE_REG
   429  		p4.From.Reg = v.Args[1].Reg()
   430  		p4.Reg = mips.REG_R1
   431  		p4.To.Type = obj.TYPE_BRANCH
   432  		p4.To.SetTarget(p2)
   433  	case ssa.OpMIPS64DUFFCOPY:
   434  		p := s.Prog(obj.ADUFFCOPY)
   435  		p.To.Type = obj.TYPE_MEM
   436  		p.To.Name = obj.NAME_EXTERN
   437  		p.To.Sym = ir.Syms.Duffcopy
   438  		p.To.Offset = v.AuxInt
   439  	case ssa.OpMIPS64LoweredMove:
   440  		// SUBV	$8, R1
   441  		// MOVV	8(R1), Rtmp
   442  		// MOVV	Rtmp, (R2)
   443  		// ADDV	$8, R1
   444  		// ADDV	$8, R2
   445  		// BNE	Rarg2, R1, -4(PC)
   446  		// arg2 is the address of the last element of src
   447  		var sz int64
   448  		var mov obj.As
   449  		switch {
   450  		case v.AuxInt%8 == 0:
   451  			sz = 8
   452  			mov = mips.AMOVV
   453  		case v.AuxInt%4 == 0:
   454  			sz = 4
   455  			mov = mips.AMOVW
   456  		case v.AuxInt%2 == 0:
   457  			sz = 2
   458  			mov = mips.AMOVH
   459  		default:
   460  			sz = 1
   461  			mov = mips.AMOVB
   462  		}
   463  		p := s.Prog(mips.ASUBVU)
   464  		p.From.Type = obj.TYPE_CONST
   465  		p.From.Offset = sz
   466  		p.To.Type = obj.TYPE_REG
   467  		p.To.Reg = mips.REG_R1
   468  		p2 := s.Prog(mov)
   469  		p2.From.Type = obj.TYPE_MEM
   470  		p2.From.Reg = mips.REG_R1
   471  		p2.From.Offset = sz
   472  		p2.To.Type = obj.TYPE_REG
   473  		p2.To.Reg = mips.REGTMP
   474  		p3 := s.Prog(mov)
   475  		p3.From.Type = obj.TYPE_REG
   476  		p3.From.Reg = mips.REGTMP
   477  		p3.To.Type = obj.TYPE_MEM
   478  		p3.To.Reg = mips.REG_R2
   479  		p4 := s.Prog(mips.AADDVU)
   480  		p4.From.Type = obj.TYPE_CONST
   481  		p4.From.Offset = sz
   482  		p4.To.Type = obj.TYPE_REG
   483  		p4.To.Reg = mips.REG_R1
   484  		p5 := s.Prog(mips.AADDVU)
   485  		p5.From.Type = obj.TYPE_CONST
   486  		p5.From.Offset = sz
   487  		p5.To.Type = obj.TYPE_REG
   488  		p5.To.Reg = mips.REG_R2
   489  		p6 := s.Prog(mips.ABNE)
   490  		p6.From.Type = obj.TYPE_REG
   491  		p6.From.Reg = v.Args[2].Reg()
   492  		p6.Reg = mips.REG_R1
   493  		p6.To.Type = obj.TYPE_BRANCH
   494  		p6.To.SetTarget(p2)
   495  	case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
   496  		s.Call(v)
   497  	case ssa.OpMIPS64CALLtail:
   498  		s.TailCall(v)
   499  	case ssa.OpMIPS64LoweredWB:
   500  		p := s.Prog(obj.ACALL)
   501  		p.To.Type = obj.TYPE_MEM
   502  		p.To.Name = obj.NAME_EXTERN
   503  		p.To.Sym = v.Aux.(*obj.LSym)
   504  	case ssa.OpMIPS64LoweredPanicBoundsA, ssa.OpMIPS64LoweredPanicBoundsB, ssa.OpMIPS64LoweredPanicBoundsC:
   505  		p := s.Prog(obj.ACALL)
   506  		p.To.Type = obj.TYPE_MEM
   507  		p.To.Name = obj.NAME_EXTERN
   508  		p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
   509  		s.UseArgs(16) // space used in callee args area by assembly stubs
   510  	case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
   511  		as := mips.AMOVV
   512  		switch v.Op {
   513  		case ssa.OpMIPS64LoweredAtomicLoad8:
   514  			as = mips.AMOVB
   515  		case ssa.OpMIPS64LoweredAtomicLoad32:
   516  			as = mips.AMOVW
   517  		}
   518  		s.Prog(mips.ASYNC)
   519  		p := s.Prog(as)
   520  		p.From.Type = obj.TYPE_MEM
   521  		p.From.Reg = v.Args[0].Reg()
   522  		p.To.Type = obj.TYPE_REG
   523  		p.To.Reg = v.Reg0()
   524  		s.Prog(mips.ASYNC)
   525  	case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
   526  		as := mips.AMOVV
   527  		switch v.Op {
   528  		case ssa.OpMIPS64LoweredAtomicStore8:
   529  			as = mips.AMOVB
   530  		case ssa.OpMIPS64LoweredAtomicStore32:
   531  			as = mips.AMOVW
   532  		}
   533  		s.Prog(mips.ASYNC)
   534  		p := s.Prog(as)
   535  		p.From.Type = obj.TYPE_REG
   536  		p.From.Reg = v.Args[1].Reg()
   537  		p.To.Type = obj.TYPE_MEM
   538  		p.To.Reg = v.Args[0].Reg()
   539  		s.Prog(mips.ASYNC)
   540  	case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
   541  		as := mips.AMOVV
   542  		if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
   543  			as = mips.AMOVW
   544  		}
   545  		s.Prog(mips.ASYNC)
   546  		p := s.Prog(as)
   547  		p.From.Type = obj.TYPE_REG
   548  		p.From.Reg = mips.REGZERO
   549  		p.To.Type = obj.TYPE_MEM
   550  		p.To.Reg = v.Args[0].Reg()
   551  		s.Prog(mips.ASYNC)
   552  	case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
   553  		// SYNC
   554  		// MOVV	Rarg1, Rtmp
   555  		// LL	(Rarg0), Rout
   556  		// SC	Rtmp, (Rarg0)
   557  		// BEQ	Rtmp, -3(PC)
   558  		// SYNC
   559  		ll := mips.ALLV
   560  		sc := mips.ASCV
   561  		if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
   562  			ll = mips.ALL
   563  			sc = mips.ASC
   564  		}
   565  		s.Prog(mips.ASYNC)
   566  		p := s.Prog(mips.AMOVV)
   567  		p.From.Type = obj.TYPE_REG
   568  		p.From.Reg = v.Args[1].Reg()
   569  		p.To.Type = obj.TYPE_REG
   570  		p.To.Reg = mips.REGTMP
   571  		p1 := s.Prog(ll)
   572  		p1.From.Type = obj.TYPE_MEM
   573  		p1.From.Reg = v.Args[0].Reg()
   574  		p1.To.Type = obj.TYPE_REG
   575  		p1.To.Reg = v.Reg0()
   576  		p2 := s.Prog(sc)
   577  		p2.From.Type = obj.TYPE_REG
   578  		p2.From.Reg = mips.REGTMP
   579  		p2.To.Type = obj.TYPE_MEM
   580  		p2.To.Reg = v.Args[0].Reg()
   581  		p3 := s.Prog(mips.ABEQ)
   582  		p3.From.Type = obj.TYPE_REG
   583  		p3.From.Reg = mips.REGTMP
   584  		p3.To.Type = obj.TYPE_BRANCH
   585  		p3.To.SetTarget(p)
   586  		s.Prog(mips.ASYNC)
   587  	case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
   588  		// SYNC
   589  		// LL	(Rarg0), Rout
   590  		// ADDV Rarg1, Rout, Rtmp
   591  		// SC	Rtmp, (Rarg0)
   592  		// BEQ	Rtmp, -3(PC)
   593  		// SYNC
   594  		// ADDV Rarg1, Rout
   595  		ll := mips.ALLV
   596  		sc := mips.ASCV
   597  		if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
   598  			ll = mips.ALL
   599  			sc = mips.ASC
   600  		}
   601  		s.Prog(mips.ASYNC)
   602  		p := s.Prog(ll)
   603  		p.From.Type = obj.TYPE_MEM
   604  		p.From.Reg = v.Args[0].Reg()
   605  		p.To.Type = obj.TYPE_REG
   606  		p.To.Reg = v.Reg0()
   607  		p1 := s.Prog(mips.AADDVU)
   608  		p1.From.Type = obj.TYPE_REG
   609  		p1.From.Reg = v.Args[1].Reg()
   610  		p1.Reg = v.Reg0()
   611  		p1.To.Type = obj.TYPE_REG
   612  		p1.To.Reg = mips.REGTMP
   613  		p2 := s.Prog(sc)
   614  		p2.From.Type = obj.TYPE_REG
   615  		p2.From.Reg = mips.REGTMP
   616  		p2.To.Type = obj.TYPE_MEM
   617  		p2.To.Reg = v.Args[0].Reg()
   618  		p3 := s.Prog(mips.ABEQ)
   619  		p3.From.Type = obj.TYPE_REG
   620  		p3.From.Reg = mips.REGTMP
   621  		p3.To.Type = obj.TYPE_BRANCH
   622  		p3.To.SetTarget(p)
   623  		s.Prog(mips.ASYNC)
   624  		p4 := s.Prog(mips.AADDVU)
   625  		p4.From.Type = obj.TYPE_REG
   626  		p4.From.Reg = v.Args[1].Reg()
   627  		p4.Reg = v.Reg0()
   628  		p4.To.Type = obj.TYPE_REG
   629  		p4.To.Reg = v.Reg0()
   630  	case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
   631  		// SYNC
   632  		// LL	(Rarg0), Rout
   633  		// ADDV $auxint, Rout, Rtmp
   634  		// SC	Rtmp, (Rarg0)
   635  		// BEQ	Rtmp, -3(PC)
   636  		// SYNC
   637  		// ADDV $auxint, Rout
   638  		ll := mips.ALLV
   639  		sc := mips.ASCV
   640  		if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
   641  			ll = mips.ALL
   642  			sc = mips.ASC
   643  		}
   644  		s.Prog(mips.ASYNC)
   645  		p := s.Prog(ll)
   646  		p.From.Type = obj.TYPE_MEM
   647  		p.From.Reg = v.Args[0].Reg()
   648  		p.To.Type = obj.TYPE_REG
   649  		p.To.Reg = v.Reg0()
   650  		p1 := s.Prog(mips.AADDVU)
   651  		p1.From.Type = obj.TYPE_CONST
   652  		p1.From.Offset = v.AuxInt
   653  		p1.Reg = v.Reg0()
   654  		p1.To.Type = obj.TYPE_REG
   655  		p1.To.Reg = mips.REGTMP
   656  		p2 := s.Prog(sc)
   657  		p2.From.Type = obj.TYPE_REG
   658  		p2.From.Reg = mips.REGTMP
   659  		p2.To.Type = obj.TYPE_MEM
   660  		p2.To.Reg = v.Args[0].Reg()
   661  		p3 := s.Prog(mips.ABEQ)
   662  		p3.From.Type = obj.TYPE_REG
   663  		p3.From.Reg = mips.REGTMP
   664  		p3.To.Type = obj.TYPE_BRANCH
   665  		p3.To.SetTarget(p)
   666  		s.Prog(mips.ASYNC)
   667  		p4 := s.Prog(mips.AADDVU)
   668  		p4.From.Type = obj.TYPE_CONST
   669  		p4.From.Offset = v.AuxInt
   670  		p4.Reg = v.Reg0()
   671  		p4.To.Type = obj.TYPE_REG
   672  		p4.To.Reg = v.Reg0()
   673  	case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
   674  		// MOVV $0, Rout
   675  		// SYNC
   676  		// LL	(Rarg0), Rtmp
   677  		// BNE	Rtmp, Rarg1, 4(PC)
   678  		// MOVV Rarg2, Rout
   679  		// SC	Rout, (Rarg0)
   680  		// BEQ	Rout, -4(PC)
   681  		// SYNC
   682  		ll := mips.ALLV
   683  		sc := mips.ASCV
   684  		if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
   685  			ll = mips.ALL
   686  			sc = mips.ASC
   687  		}
   688  		p := s.Prog(mips.AMOVV)
   689  		p.From.Type = obj.TYPE_REG
   690  		p.From.Reg = mips.REGZERO
   691  		p.To.Type = obj.TYPE_REG
   692  		p.To.Reg = v.Reg0()
   693  		s.Prog(mips.ASYNC)
   694  		p1 := s.Prog(ll)
   695  		p1.From.Type = obj.TYPE_MEM
   696  		p1.From.Reg = v.Args[0].Reg()
   697  		p1.To.Type = obj.TYPE_REG
   698  		p1.To.Reg = mips.REGTMP
   699  		p2 := s.Prog(mips.ABNE)
   700  		p2.From.Type = obj.TYPE_REG
   701  		p2.From.Reg = v.Args[1].Reg()
   702  		p2.Reg = mips.REGTMP
   703  		p2.To.Type = obj.TYPE_BRANCH
   704  		p3 := s.Prog(mips.AMOVV)
   705  		p3.From.Type = obj.TYPE_REG
   706  		p3.From.Reg = v.Args[2].Reg()
   707  		p3.To.Type = obj.TYPE_REG
   708  		p3.To.Reg = v.Reg0()
   709  		p4 := s.Prog(sc)
   710  		p4.From.Type = obj.TYPE_REG
   711  		p4.From.Reg = v.Reg0()
   712  		p4.To.Type = obj.TYPE_MEM
   713  		p4.To.Reg = v.Args[0].Reg()
   714  		p5 := s.Prog(mips.ABEQ)
   715  		p5.From.Type = obj.TYPE_REG
   716  		p5.From.Reg = v.Reg0()
   717  		p5.To.Type = obj.TYPE_BRANCH
   718  		p5.To.SetTarget(p1)
   719  		p6 := s.Prog(mips.ASYNC)
   720  		p2.To.SetTarget(p6)
   721  	case ssa.OpMIPS64LoweredNilCheck:
   722  		// Issue a load which will fault if arg is nil.
   723  		p := s.Prog(mips.AMOVB)
   724  		p.From.Type = obj.TYPE_MEM
   725  		p.From.Reg = v.Args[0].Reg()
   726  		ssagen.AddAux(&p.From, v)
   727  		p.To.Type = obj.TYPE_REG
   728  		p.To.Reg = mips.REGTMP
   729  		if logopt.Enabled() {
   730  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   731  		}
   732  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   733  			base.WarnfAt(v.Pos, "generated nil check")
   734  		}
   735  	case ssa.OpMIPS64FPFlagTrue,
   736  		ssa.OpMIPS64FPFlagFalse:
   737  		// MOVV	$0, r
   738  		// BFPF	2(PC)
   739  		// MOVV	$1, r
   740  		branch := mips.ABFPF
   741  		if v.Op == ssa.OpMIPS64FPFlagFalse {
   742  			branch = mips.ABFPT
   743  		}
   744  		p := s.Prog(mips.AMOVV)
   745  		p.From.Type = obj.TYPE_REG
   746  		p.From.Reg = mips.REGZERO
   747  		p.To.Type = obj.TYPE_REG
   748  		p.To.Reg = v.Reg()
   749  		p2 := s.Prog(branch)
   750  		p2.To.Type = obj.TYPE_BRANCH
   751  		p3 := s.Prog(mips.AMOVV)
   752  		p3.From.Type = obj.TYPE_CONST
   753  		p3.From.Offset = 1
   754  		p3.To.Type = obj.TYPE_REG
   755  		p3.To.Reg = v.Reg()
   756  		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
   757  		p2.To.SetTarget(p4)
   758  	case ssa.OpMIPS64LoweredGetClosurePtr:
   759  		// Closure pointer is R22 (mips.REGCTXT).
   760  		ssagen.CheckLoweredGetClosurePtr(v)
   761  	case ssa.OpMIPS64LoweredGetCallerSP:
   762  		// caller's SP is FixedFrameSize below the address of the first arg
   763  		p := s.Prog(mips.AMOVV)
   764  		p.From.Type = obj.TYPE_ADDR
   765  		p.From.Offset = -base.Ctxt.FixedFrameSize()
   766  		p.From.Name = obj.NAME_PARAM
   767  		p.To.Type = obj.TYPE_REG
   768  		p.To.Reg = v.Reg()
   769  	case ssa.OpMIPS64LoweredGetCallerPC:
   770  		p := s.Prog(obj.AGETCALLERPC)
   771  		p.To.Type = obj.TYPE_REG
   772  		p.To.Reg = v.Reg()
   773  	case ssa.OpClobber, ssa.OpClobberReg:
   774  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   775  	default:
   776  		v.Fatalf("genValue not implemented: %s", v.LongString())
   777  	}
   778  }
   779  
   780  var blockJump = map[ssa.BlockKind]struct {
   781  	asm, invasm obj.As
   782  }{
   783  	ssa.BlockMIPS64EQ:  {mips.ABEQ, mips.ABNE},
   784  	ssa.BlockMIPS64NE:  {mips.ABNE, mips.ABEQ},
   785  	ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
   786  	ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
   787  	ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
   788  	ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
   789  	ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
   790  	ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
   791  }
   792  
   793  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   794  	switch b.Kind {
   795  	case ssa.BlockPlain:
   796  		if b.Succs[0].Block() != next {
   797  			p := s.Prog(obj.AJMP)
   798  			p.To.Type = obj.TYPE_BRANCH
   799  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   800  		}
   801  	case ssa.BlockDefer:
   802  		// defer returns in R1:
   803  		// 0 if we should continue executing
   804  		// 1 if we should jump to deferreturn call
   805  		p := s.Prog(mips.ABNE)
   806  		p.From.Type = obj.TYPE_REG
   807  		p.From.Reg = mips.REGZERO
   808  		p.Reg = mips.REG_R1
   809  		p.To.Type = obj.TYPE_BRANCH
   810  		s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
   811  		if b.Succs[0].Block() != next {
   812  			p := s.Prog(obj.AJMP)
   813  			p.To.Type = obj.TYPE_BRANCH
   814  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   815  		}
   816  	case ssa.BlockExit, ssa.BlockRetJmp:
   817  	case ssa.BlockRet:
   818  		s.Prog(obj.ARET)
   819  	case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
   820  		ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
   821  		ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
   822  		ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
   823  		jmp := blockJump[b.Kind]
   824  		var p *obj.Prog
   825  		switch next {
   826  		case b.Succs[0].Block():
   827  			p = s.Br(jmp.invasm, b.Succs[1].Block())
   828  		case b.Succs[1].Block():
   829  			p = s.Br(jmp.asm, b.Succs[0].Block())
   830  		default:
   831  			if b.Likely != ssa.BranchUnlikely {
   832  				p = s.Br(jmp.asm, b.Succs[0].Block())
   833  				s.Br(obj.AJMP, b.Succs[1].Block())
   834  			} else {
   835  				p = s.Br(jmp.invasm, b.Succs[1].Block())
   836  				s.Br(obj.AJMP, b.Succs[0].Block())
   837  			}
   838  		}
   839  		if !b.Controls[0].Type.IsFlags() {
   840  			p.From.Type = obj.TYPE_REG
   841  			p.From.Reg = b.Controls[0].Reg()
   842  		}
   843  	default:
   844  		b.Fatalf("branch not implemented: %s", b.LongString())
   845  	}
   846  }
   847  

View as plain text