Source file src/cmd/compile/internal/ssa/rewriteMIPS64.go

     1  // Code generated from gen/MIPS64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "cmd/compile/internal/types"
     7  
     8  func rewriteValueMIPS64(v *Value) bool {
     9  	switch v.Op {
    10  	case OpAdd16:
    11  		v.Op = OpMIPS64ADDV
    12  		return true
    13  	case OpAdd32:
    14  		v.Op = OpMIPS64ADDV
    15  		return true
    16  	case OpAdd32F:
    17  		v.Op = OpMIPS64ADDF
    18  		return true
    19  	case OpAdd64:
    20  		v.Op = OpMIPS64ADDV
    21  		return true
    22  	case OpAdd64F:
    23  		v.Op = OpMIPS64ADDD
    24  		return true
    25  	case OpAdd8:
    26  		v.Op = OpMIPS64ADDV
    27  		return true
    28  	case OpAddPtr:
    29  		v.Op = OpMIPS64ADDV
    30  		return true
    31  	case OpAddr:
    32  		return rewriteValueMIPS64_OpAddr(v)
    33  	case OpAnd16:
    34  		v.Op = OpMIPS64AND
    35  		return true
    36  	case OpAnd32:
    37  		v.Op = OpMIPS64AND
    38  		return true
    39  	case OpAnd64:
    40  		v.Op = OpMIPS64AND
    41  		return true
    42  	case OpAnd8:
    43  		v.Op = OpMIPS64AND
    44  		return true
    45  	case OpAndB:
    46  		v.Op = OpMIPS64AND
    47  		return true
    48  	case OpAtomicAdd32:
    49  		v.Op = OpMIPS64LoweredAtomicAdd32
    50  		return true
    51  	case OpAtomicAdd64:
    52  		v.Op = OpMIPS64LoweredAtomicAdd64
    53  		return true
    54  	case OpAtomicCompareAndSwap32:
    55  		v.Op = OpMIPS64LoweredAtomicCas32
    56  		return true
    57  	case OpAtomicCompareAndSwap64:
    58  		v.Op = OpMIPS64LoweredAtomicCas64
    59  		return true
    60  	case OpAtomicExchange32:
    61  		v.Op = OpMIPS64LoweredAtomicExchange32
    62  		return true
    63  	case OpAtomicExchange64:
    64  		v.Op = OpMIPS64LoweredAtomicExchange64
    65  		return true
    66  	case OpAtomicLoad32:
    67  		v.Op = OpMIPS64LoweredAtomicLoad32
    68  		return true
    69  	case OpAtomicLoad64:
    70  		v.Op = OpMIPS64LoweredAtomicLoad64
    71  		return true
    72  	case OpAtomicLoad8:
    73  		v.Op = OpMIPS64LoweredAtomicLoad8
    74  		return true
    75  	case OpAtomicLoadPtr:
    76  		v.Op = OpMIPS64LoweredAtomicLoad64
    77  		return true
    78  	case OpAtomicStore32:
    79  		v.Op = OpMIPS64LoweredAtomicStore32
    80  		return true
    81  	case OpAtomicStore64:
    82  		v.Op = OpMIPS64LoweredAtomicStore64
    83  		return true
    84  	case OpAtomicStore8:
    85  		v.Op = OpMIPS64LoweredAtomicStore8
    86  		return true
    87  	case OpAtomicStorePtrNoWB:
    88  		v.Op = OpMIPS64LoweredAtomicStore64
    89  		return true
    90  	case OpAvg64u:
    91  		return rewriteValueMIPS64_OpAvg64u(v)
    92  	case OpClosureCall:
    93  		v.Op = OpMIPS64CALLclosure
    94  		return true
    95  	case OpCom16:
    96  		return rewriteValueMIPS64_OpCom16(v)
    97  	case OpCom32:
    98  		return rewriteValueMIPS64_OpCom32(v)
    99  	case OpCom64:
   100  		return rewriteValueMIPS64_OpCom64(v)
   101  	case OpCom8:
   102  		return rewriteValueMIPS64_OpCom8(v)
   103  	case OpConst16:
   104  		return rewriteValueMIPS64_OpConst16(v)
   105  	case OpConst32:
   106  		return rewriteValueMIPS64_OpConst32(v)
   107  	case OpConst32F:
   108  		return rewriteValueMIPS64_OpConst32F(v)
   109  	case OpConst64:
   110  		return rewriteValueMIPS64_OpConst64(v)
   111  	case OpConst64F:
   112  		return rewriteValueMIPS64_OpConst64F(v)
   113  	case OpConst8:
   114  		return rewriteValueMIPS64_OpConst8(v)
   115  	case OpConstBool:
   116  		return rewriteValueMIPS64_OpConstBool(v)
   117  	case OpConstNil:
   118  		return rewriteValueMIPS64_OpConstNil(v)
   119  	case OpCvt32Fto32:
   120  		v.Op = OpMIPS64TRUNCFW
   121  		return true
   122  	case OpCvt32Fto64:
   123  		v.Op = OpMIPS64TRUNCFV
   124  		return true
   125  	case OpCvt32Fto64F:
   126  		v.Op = OpMIPS64MOVFD
   127  		return true
   128  	case OpCvt32to32F:
   129  		v.Op = OpMIPS64MOVWF
   130  		return true
   131  	case OpCvt32to64F:
   132  		v.Op = OpMIPS64MOVWD
   133  		return true
   134  	case OpCvt64Fto32:
   135  		v.Op = OpMIPS64TRUNCDW
   136  		return true
   137  	case OpCvt64Fto32F:
   138  		v.Op = OpMIPS64MOVDF
   139  		return true
   140  	case OpCvt64Fto64:
   141  		v.Op = OpMIPS64TRUNCDV
   142  		return true
   143  	case OpCvt64to32F:
   144  		v.Op = OpMIPS64MOVVF
   145  		return true
   146  	case OpCvt64to64F:
   147  		v.Op = OpMIPS64MOVVD
   148  		return true
   149  	case OpCvtBoolToUint8:
   150  		v.Op = OpCopy
   151  		return true
   152  	case OpDiv16:
   153  		return rewriteValueMIPS64_OpDiv16(v)
   154  	case OpDiv16u:
   155  		return rewriteValueMIPS64_OpDiv16u(v)
   156  	case OpDiv32:
   157  		return rewriteValueMIPS64_OpDiv32(v)
   158  	case OpDiv32F:
   159  		v.Op = OpMIPS64DIVF
   160  		return true
   161  	case OpDiv32u:
   162  		return rewriteValueMIPS64_OpDiv32u(v)
   163  	case OpDiv64:
   164  		return rewriteValueMIPS64_OpDiv64(v)
   165  	case OpDiv64F:
   166  		v.Op = OpMIPS64DIVD
   167  		return true
   168  	case OpDiv64u:
   169  		return rewriteValueMIPS64_OpDiv64u(v)
   170  	case OpDiv8:
   171  		return rewriteValueMIPS64_OpDiv8(v)
   172  	case OpDiv8u:
   173  		return rewriteValueMIPS64_OpDiv8u(v)
   174  	case OpEq16:
   175  		return rewriteValueMIPS64_OpEq16(v)
   176  	case OpEq32:
   177  		return rewriteValueMIPS64_OpEq32(v)
   178  	case OpEq32F:
   179  		return rewriteValueMIPS64_OpEq32F(v)
   180  	case OpEq64:
   181  		return rewriteValueMIPS64_OpEq64(v)
   182  	case OpEq64F:
   183  		return rewriteValueMIPS64_OpEq64F(v)
   184  	case OpEq8:
   185  		return rewriteValueMIPS64_OpEq8(v)
   186  	case OpEqB:
   187  		return rewriteValueMIPS64_OpEqB(v)
   188  	case OpEqPtr:
   189  		return rewriteValueMIPS64_OpEqPtr(v)
   190  	case OpGetCallerPC:
   191  		v.Op = OpMIPS64LoweredGetCallerPC
   192  		return true
   193  	case OpGetCallerSP:
   194  		v.Op = OpMIPS64LoweredGetCallerSP
   195  		return true
   196  	case OpGetClosurePtr:
   197  		v.Op = OpMIPS64LoweredGetClosurePtr
   198  		return true
   199  	case OpHmul32:
   200  		return rewriteValueMIPS64_OpHmul32(v)
   201  	case OpHmul32u:
   202  		return rewriteValueMIPS64_OpHmul32u(v)
   203  	case OpHmul64:
   204  		return rewriteValueMIPS64_OpHmul64(v)
   205  	case OpHmul64u:
   206  		return rewriteValueMIPS64_OpHmul64u(v)
   207  	case OpInterCall:
   208  		v.Op = OpMIPS64CALLinter
   209  		return true
   210  	case OpIsInBounds:
   211  		return rewriteValueMIPS64_OpIsInBounds(v)
   212  	case OpIsNonNil:
   213  		return rewriteValueMIPS64_OpIsNonNil(v)
   214  	case OpIsSliceInBounds:
   215  		return rewriteValueMIPS64_OpIsSliceInBounds(v)
   216  	case OpLeq16:
   217  		return rewriteValueMIPS64_OpLeq16(v)
   218  	case OpLeq16U:
   219  		return rewriteValueMIPS64_OpLeq16U(v)
   220  	case OpLeq32:
   221  		return rewriteValueMIPS64_OpLeq32(v)
   222  	case OpLeq32F:
   223  		return rewriteValueMIPS64_OpLeq32F(v)
   224  	case OpLeq32U:
   225  		return rewriteValueMIPS64_OpLeq32U(v)
   226  	case OpLeq64:
   227  		return rewriteValueMIPS64_OpLeq64(v)
   228  	case OpLeq64F:
   229  		return rewriteValueMIPS64_OpLeq64F(v)
   230  	case OpLeq64U:
   231  		return rewriteValueMIPS64_OpLeq64U(v)
   232  	case OpLeq8:
   233  		return rewriteValueMIPS64_OpLeq8(v)
   234  	case OpLeq8U:
   235  		return rewriteValueMIPS64_OpLeq8U(v)
   236  	case OpLess16:
   237  		return rewriteValueMIPS64_OpLess16(v)
   238  	case OpLess16U:
   239  		return rewriteValueMIPS64_OpLess16U(v)
   240  	case OpLess32:
   241  		return rewriteValueMIPS64_OpLess32(v)
   242  	case OpLess32F:
   243  		return rewriteValueMIPS64_OpLess32F(v)
   244  	case OpLess32U:
   245  		return rewriteValueMIPS64_OpLess32U(v)
   246  	case OpLess64:
   247  		return rewriteValueMIPS64_OpLess64(v)
   248  	case OpLess64F:
   249  		return rewriteValueMIPS64_OpLess64F(v)
   250  	case OpLess64U:
   251  		return rewriteValueMIPS64_OpLess64U(v)
   252  	case OpLess8:
   253  		return rewriteValueMIPS64_OpLess8(v)
   254  	case OpLess8U:
   255  		return rewriteValueMIPS64_OpLess8U(v)
   256  	case OpLoad:
   257  		return rewriteValueMIPS64_OpLoad(v)
   258  	case OpLocalAddr:
   259  		return rewriteValueMIPS64_OpLocalAddr(v)
   260  	case OpLsh16x16:
   261  		return rewriteValueMIPS64_OpLsh16x16(v)
   262  	case OpLsh16x32:
   263  		return rewriteValueMIPS64_OpLsh16x32(v)
   264  	case OpLsh16x64:
   265  		return rewriteValueMIPS64_OpLsh16x64(v)
   266  	case OpLsh16x8:
   267  		return rewriteValueMIPS64_OpLsh16x8(v)
   268  	case OpLsh32x16:
   269  		return rewriteValueMIPS64_OpLsh32x16(v)
   270  	case OpLsh32x32:
   271  		return rewriteValueMIPS64_OpLsh32x32(v)
   272  	case OpLsh32x64:
   273  		return rewriteValueMIPS64_OpLsh32x64(v)
   274  	case OpLsh32x8:
   275  		return rewriteValueMIPS64_OpLsh32x8(v)
   276  	case OpLsh64x16:
   277  		return rewriteValueMIPS64_OpLsh64x16(v)
   278  	case OpLsh64x32:
   279  		return rewriteValueMIPS64_OpLsh64x32(v)
   280  	case OpLsh64x64:
   281  		return rewriteValueMIPS64_OpLsh64x64(v)
   282  	case OpLsh64x8:
   283  		return rewriteValueMIPS64_OpLsh64x8(v)
   284  	case OpLsh8x16:
   285  		return rewriteValueMIPS64_OpLsh8x16(v)
   286  	case OpLsh8x32:
   287  		return rewriteValueMIPS64_OpLsh8x32(v)
   288  	case OpLsh8x64:
   289  		return rewriteValueMIPS64_OpLsh8x64(v)
   290  	case OpLsh8x8:
   291  		return rewriteValueMIPS64_OpLsh8x8(v)
   292  	case OpMIPS64ADDV:
   293  		return rewriteValueMIPS64_OpMIPS64ADDV(v)
   294  	case OpMIPS64ADDVconst:
   295  		return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
   296  	case OpMIPS64AND:
   297  		return rewriteValueMIPS64_OpMIPS64AND(v)
   298  	case OpMIPS64ANDconst:
   299  		return rewriteValueMIPS64_OpMIPS64ANDconst(v)
   300  	case OpMIPS64LoweredAtomicAdd32:
   301  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
   302  	case OpMIPS64LoweredAtomicAdd64:
   303  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
   304  	case OpMIPS64LoweredAtomicStore32:
   305  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
   306  	case OpMIPS64LoweredAtomicStore64:
   307  		return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
   308  	case OpMIPS64MOVBUload:
   309  		return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
   310  	case OpMIPS64MOVBUreg:
   311  		return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
   312  	case OpMIPS64MOVBload:
   313  		return rewriteValueMIPS64_OpMIPS64MOVBload(v)
   314  	case OpMIPS64MOVBreg:
   315  		return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
   316  	case OpMIPS64MOVBstore:
   317  		return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
   318  	case OpMIPS64MOVBstorezero:
   319  		return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
   320  	case OpMIPS64MOVDload:
   321  		return rewriteValueMIPS64_OpMIPS64MOVDload(v)
   322  	case OpMIPS64MOVDstore:
   323  		return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
   324  	case OpMIPS64MOVFload:
   325  		return rewriteValueMIPS64_OpMIPS64MOVFload(v)
   326  	case OpMIPS64MOVFstore:
   327  		return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
   328  	case OpMIPS64MOVHUload:
   329  		return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
   330  	case OpMIPS64MOVHUreg:
   331  		return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
   332  	case OpMIPS64MOVHload:
   333  		return rewriteValueMIPS64_OpMIPS64MOVHload(v)
   334  	case OpMIPS64MOVHreg:
   335  		return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
   336  	case OpMIPS64MOVHstore:
   337  		return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
   338  	case OpMIPS64MOVHstorezero:
   339  		return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
   340  	case OpMIPS64MOVVload:
   341  		return rewriteValueMIPS64_OpMIPS64MOVVload(v)
   342  	case OpMIPS64MOVVnop:
   343  		return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
   344  	case OpMIPS64MOVVreg:
   345  		return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
   346  	case OpMIPS64MOVVstore:
   347  		return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
   348  	case OpMIPS64MOVVstorezero:
   349  		return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
   350  	case OpMIPS64MOVWUload:
   351  		return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
   352  	case OpMIPS64MOVWUreg:
   353  		return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
   354  	case OpMIPS64MOVWload:
   355  		return rewriteValueMIPS64_OpMIPS64MOVWload(v)
   356  	case OpMIPS64MOVWreg:
   357  		return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
   358  	case OpMIPS64MOVWstore:
   359  		return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
   360  	case OpMIPS64MOVWstorezero:
   361  		return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
   362  	case OpMIPS64NEGV:
   363  		return rewriteValueMIPS64_OpMIPS64NEGV(v)
   364  	case OpMIPS64NOR:
   365  		return rewriteValueMIPS64_OpMIPS64NOR(v)
   366  	case OpMIPS64NORconst:
   367  		return rewriteValueMIPS64_OpMIPS64NORconst(v)
   368  	case OpMIPS64OR:
   369  		return rewriteValueMIPS64_OpMIPS64OR(v)
   370  	case OpMIPS64ORconst:
   371  		return rewriteValueMIPS64_OpMIPS64ORconst(v)
   372  	case OpMIPS64SGT:
   373  		return rewriteValueMIPS64_OpMIPS64SGT(v)
   374  	case OpMIPS64SGTU:
   375  		return rewriteValueMIPS64_OpMIPS64SGTU(v)
   376  	case OpMIPS64SGTUconst:
   377  		return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
   378  	case OpMIPS64SGTconst:
   379  		return rewriteValueMIPS64_OpMIPS64SGTconst(v)
   380  	case OpMIPS64SLLV:
   381  		return rewriteValueMIPS64_OpMIPS64SLLV(v)
   382  	case OpMIPS64SLLVconst:
   383  		return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
   384  	case OpMIPS64SRAV:
   385  		return rewriteValueMIPS64_OpMIPS64SRAV(v)
   386  	case OpMIPS64SRAVconst:
   387  		return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
   388  	case OpMIPS64SRLV:
   389  		return rewriteValueMIPS64_OpMIPS64SRLV(v)
   390  	case OpMIPS64SRLVconst:
   391  		return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
   392  	case OpMIPS64SUBV:
   393  		return rewriteValueMIPS64_OpMIPS64SUBV(v)
   394  	case OpMIPS64SUBVconst:
   395  		return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
   396  	case OpMIPS64XOR:
   397  		return rewriteValueMIPS64_OpMIPS64XOR(v)
   398  	case OpMIPS64XORconst:
   399  		return rewriteValueMIPS64_OpMIPS64XORconst(v)
   400  	case OpMod16:
   401  		return rewriteValueMIPS64_OpMod16(v)
   402  	case OpMod16u:
   403  		return rewriteValueMIPS64_OpMod16u(v)
   404  	case OpMod32:
   405  		return rewriteValueMIPS64_OpMod32(v)
   406  	case OpMod32u:
   407  		return rewriteValueMIPS64_OpMod32u(v)
   408  	case OpMod64:
   409  		return rewriteValueMIPS64_OpMod64(v)
   410  	case OpMod64u:
   411  		return rewriteValueMIPS64_OpMod64u(v)
   412  	case OpMod8:
   413  		return rewriteValueMIPS64_OpMod8(v)
   414  	case OpMod8u:
   415  		return rewriteValueMIPS64_OpMod8u(v)
   416  	case OpMove:
   417  		return rewriteValueMIPS64_OpMove(v)
   418  	case OpMul16:
   419  		return rewriteValueMIPS64_OpMul16(v)
   420  	case OpMul32:
   421  		return rewriteValueMIPS64_OpMul32(v)
   422  	case OpMul32F:
   423  		v.Op = OpMIPS64MULF
   424  		return true
   425  	case OpMul64:
   426  		return rewriteValueMIPS64_OpMul64(v)
   427  	case OpMul64F:
   428  		v.Op = OpMIPS64MULD
   429  		return true
   430  	case OpMul64uhilo:
   431  		v.Op = OpMIPS64MULVU
   432  		return true
   433  	case OpMul8:
   434  		return rewriteValueMIPS64_OpMul8(v)
   435  	case OpNeg16:
   436  		v.Op = OpMIPS64NEGV
   437  		return true
   438  	case OpNeg32:
   439  		v.Op = OpMIPS64NEGV
   440  		return true
   441  	case OpNeg32F:
   442  		v.Op = OpMIPS64NEGF
   443  		return true
   444  	case OpNeg64:
   445  		v.Op = OpMIPS64NEGV
   446  		return true
   447  	case OpNeg64F:
   448  		v.Op = OpMIPS64NEGD
   449  		return true
   450  	case OpNeg8:
   451  		v.Op = OpMIPS64NEGV
   452  		return true
   453  	case OpNeq16:
   454  		return rewriteValueMIPS64_OpNeq16(v)
   455  	case OpNeq32:
   456  		return rewriteValueMIPS64_OpNeq32(v)
   457  	case OpNeq32F:
   458  		return rewriteValueMIPS64_OpNeq32F(v)
   459  	case OpNeq64:
   460  		return rewriteValueMIPS64_OpNeq64(v)
   461  	case OpNeq64F:
   462  		return rewriteValueMIPS64_OpNeq64F(v)
   463  	case OpNeq8:
   464  		return rewriteValueMIPS64_OpNeq8(v)
   465  	case OpNeqB:
   466  		v.Op = OpMIPS64XOR
   467  		return true
   468  	case OpNeqPtr:
   469  		return rewriteValueMIPS64_OpNeqPtr(v)
   470  	case OpNilCheck:
   471  		v.Op = OpMIPS64LoweredNilCheck
   472  		return true
   473  	case OpNot:
   474  		return rewriteValueMIPS64_OpNot(v)
   475  	case OpOffPtr:
   476  		return rewriteValueMIPS64_OpOffPtr(v)
   477  	case OpOr16:
   478  		v.Op = OpMIPS64OR
   479  		return true
   480  	case OpOr32:
   481  		v.Op = OpMIPS64OR
   482  		return true
   483  	case OpOr64:
   484  		v.Op = OpMIPS64OR
   485  		return true
   486  	case OpOr8:
   487  		v.Op = OpMIPS64OR
   488  		return true
   489  	case OpOrB:
   490  		v.Op = OpMIPS64OR
   491  		return true
   492  	case OpPanicBounds:
   493  		return rewriteValueMIPS64_OpPanicBounds(v)
   494  	case OpRotateLeft16:
   495  		return rewriteValueMIPS64_OpRotateLeft16(v)
   496  	case OpRotateLeft32:
   497  		return rewriteValueMIPS64_OpRotateLeft32(v)
   498  	case OpRotateLeft64:
   499  		return rewriteValueMIPS64_OpRotateLeft64(v)
   500  	case OpRotateLeft8:
   501  		return rewriteValueMIPS64_OpRotateLeft8(v)
   502  	case OpRound32F:
   503  		v.Op = OpCopy
   504  		return true
   505  	case OpRound64F:
   506  		v.Op = OpCopy
   507  		return true
   508  	case OpRsh16Ux16:
   509  		return rewriteValueMIPS64_OpRsh16Ux16(v)
   510  	case OpRsh16Ux32:
   511  		return rewriteValueMIPS64_OpRsh16Ux32(v)
   512  	case OpRsh16Ux64:
   513  		return rewriteValueMIPS64_OpRsh16Ux64(v)
   514  	case OpRsh16Ux8:
   515  		return rewriteValueMIPS64_OpRsh16Ux8(v)
   516  	case OpRsh16x16:
   517  		return rewriteValueMIPS64_OpRsh16x16(v)
   518  	case OpRsh16x32:
   519  		return rewriteValueMIPS64_OpRsh16x32(v)
   520  	case OpRsh16x64:
   521  		return rewriteValueMIPS64_OpRsh16x64(v)
   522  	case OpRsh16x8:
   523  		return rewriteValueMIPS64_OpRsh16x8(v)
   524  	case OpRsh32Ux16:
   525  		return rewriteValueMIPS64_OpRsh32Ux16(v)
   526  	case OpRsh32Ux32:
   527  		return rewriteValueMIPS64_OpRsh32Ux32(v)
   528  	case OpRsh32Ux64:
   529  		return rewriteValueMIPS64_OpRsh32Ux64(v)
   530  	case OpRsh32Ux8:
   531  		return rewriteValueMIPS64_OpRsh32Ux8(v)
   532  	case OpRsh32x16:
   533  		return rewriteValueMIPS64_OpRsh32x16(v)
   534  	case OpRsh32x32:
   535  		return rewriteValueMIPS64_OpRsh32x32(v)
   536  	case OpRsh32x64:
   537  		return rewriteValueMIPS64_OpRsh32x64(v)
   538  	case OpRsh32x8:
   539  		return rewriteValueMIPS64_OpRsh32x8(v)
   540  	case OpRsh64Ux16:
   541  		return rewriteValueMIPS64_OpRsh64Ux16(v)
   542  	case OpRsh64Ux32:
   543  		return rewriteValueMIPS64_OpRsh64Ux32(v)
   544  	case OpRsh64Ux64:
   545  		return rewriteValueMIPS64_OpRsh64Ux64(v)
   546  	case OpRsh64Ux8:
   547  		return rewriteValueMIPS64_OpRsh64Ux8(v)
   548  	case OpRsh64x16:
   549  		return rewriteValueMIPS64_OpRsh64x16(v)
   550  	case OpRsh64x32:
   551  		return rewriteValueMIPS64_OpRsh64x32(v)
   552  	case OpRsh64x64:
   553  		return rewriteValueMIPS64_OpRsh64x64(v)
   554  	case OpRsh64x8:
   555  		return rewriteValueMIPS64_OpRsh64x8(v)
   556  	case OpRsh8Ux16:
   557  		return rewriteValueMIPS64_OpRsh8Ux16(v)
   558  	case OpRsh8Ux32:
   559  		return rewriteValueMIPS64_OpRsh8Ux32(v)
   560  	case OpRsh8Ux64:
   561  		return rewriteValueMIPS64_OpRsh8Ux64(v)
   562  	case OpRsh8Ux8:
   563  		return rewriteValueMIPS64_OpRsh8Ux8(v)
   564  	case OpRsh8x16:
   565  		return rewriteValueMIPS64_OpRsh8x16(v)
   566  	case OpRsh8x32:
   567  		return rewriteValueMIPS64_OpRsh8x32(v)
   568  	case OpRsh8x64:
   569  		return rewriteValueMIPS64_OpRsh8x64(v)
   570  	case OpRsh8x8:
   571  		return rewriteValueMIPS64_OpRsh8x8(v)
   572  	case OpSelect0:
   573  		return rewriteValueMIPS64_OpSelect0(v)
   574  	case OpSelect1:
   575  		return rewriteValueMIPS64_OpSelect1(v)
   576  	case OpSignExt16to32:
   577  		v.Op = OpMIPS64MOVHreg
   578  		return true
   579  	case OpSignExt16to64:
   580  		v.Op = OpMIPS64MOVHreg
   581  		return true
   582  	case OpSignExt32to64:
   583  		v.Op = OpMIPS64MOVWreg
   584  		return true
   585  	case OpSignExt8to16:
   586  		v.Op = OpMIPS64MOVBreg
   587  		return true
   588  	case OpSignExt8to32:
   589  		v.Op = OpMIPS64MOVBreg
   590  		return true
   591  	case OpSignExt8to64:
   592  		v.Op = OpMIPS64MOVBreg
   593  		return true
   594  	case OpSlicemask:
   595  		return rewriteValueMIPS64_OpSlicemask(v)
   596  	case OpSqrt:
   597  		v.Op = OpMIPS64SQRTD
   598  		return true
   599  	case OpSqrt32:
   600  		v.Op = OpMIPS64SQRTF
   601  		return true
   602  	case OpStaticCall:
   603  		v.Op = OpMIPS64CALLstatic
   604  		return true
   605  	case OpStore:
   606  		return rewriteValueMIPS64_OpStore(v)
   607  	case OpSub16:
   608  		v.Op = OpMIPS64SUBV
   609  		return true
   610  	case OpSub32:
   611  		v.Op = OpMIPS64SUBV
   612  		return true
   613  	case OpSub32F:
   614  		v.Op = OpMIPS64SUBF
   615  		return true
   616  	case OpSub64:
   617  		v.Op = OpMIPS64SUBV
   618  		return true
   619  	case OpSub64F:
   620  		v.Op = OpMIPS64SUBD
   621  		return true
   622  	case OpSub8:
   623  		v.Op = OpMIPS64SUBV
   624  		return true
   625  	case OpSubPtr:
   626  		v.Op = OpMIPS64SUBV
   627  		return true
   628  	case OpTailCall:
   629  		v.Op = OpMIPS64CALLtail
   630  		return true
   631  	case OpTrunc16to8:
   632  		v.Op = OpCopy
   633  		return true
   634  	case OpTrunc32to16:
   635  		v.Op = OpCopy
   636  		return true
   637  	case OpTrunc32to8:
   638  		v.Op = OpCopy
   639  		return true
   640  	case OpTrunc64to16:
   641  		v.Op = OpCopy
   642  		return true
   643  	case OpTrunc64to32:
   644  		v.Op = OpCopy
   645  		return true
   646  	case OpTrunc64to8:
   647  		v.Op = OpCopy
   648  		return true
   649  	case OpWB:
   650  		v.Op = OpMIPS64LoweredWB
   651  		return true
   652  	case OpXor16:
   653  		v.Op = OpMIPS64XOR
   654  		return true
   655  	case OpXor32:
   656  		v.Op = OpMIPS64XOR
   657  		return true
   658  	case OpXor64:
   659  		v.Op = OpMIPS64XOR
   660  		return true
   661  	case OpXor8:
   662  		v.Op = OpMIPS64XOR
   663  		return true
   664  	case OpZero:
   665  		return rewriteValueMIPS64_OpZero(v)
   666  	case OpZeroExt16to32:
   667  		v.Op = OpMIPS64MOVHUreg
   668  		return true
   669  	case OpZeroExt16to64:
   670  		v.Op = OpMIPS64MOVHUreg
   671  		return true
   672  	case OpZeroExt32to64:
   673  		v.Op = OpMIPS64MOVWUreg
   674  		return true
   675  	case OpZeroExt8to16:
   676  		v.Op = OpMIPS64MOVBUreg
   677  		return true
   678  	case OpZeroExt8to32:
   679  		v.Op = OpMIPS64MOVBUreg
   680  		return true
   681  	case OpZeroExt8to64:
   682  		v.Op = OpMIPS64MOVBUreg
   683  		return true
   684  	}
   685  	return false
   686  }
   687  func rewriteValueMIPS64_OpAddr(v *Value) bool {
   688  	v_0 := v.Args[0]
   689  	// match: (Addr {sym} base)
   690  	// result: (MOVVaddr {sym} base)
   691  	for {
   692  		sym := auxToSym(v.Aux)
   693  		base := v_0
   694  		v.reset(OpMIPS64MOVVaddr)
   695  		v.Aux = symToAux(sym)
   696  		v.AddArg(base)
   697  		return true
   698  	}
   699  }
   700  func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
   701  	v_1 := v.Args[1]
   702  	v_0 := v.Args[0]
   703  	b := v.Block
   704  	// match: (Avg64u <t> x y)
   705  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   706  	for {
   707  		t := v.Type
   708  		x := v_0
   709  		y := v_1
   710  		v.reset(OpMIPS64ADDV)
   711  		v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
   712  		v0.AuxInt = int64ToAuxInt(1)
   713  		v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
   714  		v1.AddArg2(x, y)
   715  		v0.AddArg(v1)
   716  		v.AddArg2(v0, y)
   717  		return true
   718  	}
   719  }
   720  func rewriteValueMIPS64_OpCom16(v *Value) bool {
   721  	v_0 := v.Args[0]
   722  	b := v.Block
   723  	typ := &b.Func.Config.Types
   724  	// match: (Com16 x)
   725  	// result: (NOR (MOVVconst [0]) x)
   726  	for {
   727  		x := v_0
   728  		v.reset(OpMIPS64NOR)
   729  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   730  		v0.AuxInt = int64ToAuxInt(0)
   731  		v.AddArg2(v0, x)
   732  		return true
   733  	}
   734  }
   735  func rewriteValueMIPS64_OpCom32(v *Value) bool {
   736  	v_0 := v.Args[0]
   737  	b := v.Block
   738  	typ := &b.Func.Config.Types
   739  	// match: (Com32 x)
   740  	// result: (NOR (MOVVconst [0]) x)
   741  	for {
   742  		x := v_0
   743  		v.reset(OpMIPS64NOR)
   744  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   745  		v0.AuxInt = int64ToAuxInt(0)
   746  		v.AddArg2(v0, x)
   747  		return true
   748  	}
   749  }
   750  func rewriteValueMIPS64_OpCom64(v *Value) bool {
   751  	v_0 := v.Args[0]
   752  	b := v.Block
   753  	typ := &b.Func.Config.Types
   754  	// match: (Com64 x)
   755  	// result: (NOR (MOVVconst [0]) x)
   756  	for {
   757  		x := v_0
   758  		v.reset(OpMIPS64NOR)
   759  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   760  		v0.AuxInt = int64ToAuxInt(0)
   761  		v.AddArg2(v0, x)
   762  		return true
   763  	}
   764  }
   765  func rewriteValueMIPS64_OpCom8(v *Value) bool {
   766  	v_0 := v.Args[0]
   767  	b := v.Block
   768  	typ := &b.Func.Config.Types
   769  	// match: (Com8 x)
   770  	// result: (NOR (MOVVconst [0]) x)
   771  	for {
   772  		x := v_0
   773  		v.reset(OpMIPS64NOR)
   774  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
   775  		v0.AuxInt = int64ToAuxInt(0)
   776  		v.AddArg2(v0, x)
   777  		return true
   778  	}
   779  }
   780  func rewriteValueMIPS64_OpConst16(v *Value) bool {
   781  	// match: (Const16 [val])
   782  	// result: (MOVVconst [int64(val)])
   783  	for {
   784  		val := auxIntToInt16(v.AuxInt)
   785  		v.reset(OpMIPS64MOVVconst)
   786  		v.AuxInt = int64ToAuxInt(int64(val))
   787  		return true
   788  	}
   789  }
   790  func rewriteValueMIPS64_OpConst32(v *Value) bool {
   791  	// match: (Const32 [val])
   792  	// result: (MOVVconst [int64(val)])
   793  	for {
   794  		val := auxIntToInt32(v.AuxInt)
   795  		v.reset(OpMIPS64MOVVconst)
   796  		v.AuxInt = int64ToAuxInt(int64(val))
   797  		return true
   798  	}
   799  }
   800  func rewriteValueMIPS64_OpConst32F(v *Value) bool {
   801  	// match: (Const32F [val])
   802  	// result: (MOVFconst [float64(val)])
   803  	for {
   804  		val := auxIntToFloat32(v.AuxInt)
   805  		v.reset(OpMIPS64MOVFconst)
   806  		v.AuxInt = float64ToAuxInt(float64(val))
   807  		return true
   808  	}
   809  }
   810  func rewriteValueMIPS64_OpConst64(v *Value) bool {
   811  	// match: (Const64 [val])
   812  	// result: (MOVVconst [int64(val)])
   813  	for {
   814  		val := auxIntToInt64(v.AuxInt)
   815  		v.reset(OpMIPS64MOVVconst)
   816  		v.AuxInt = int64ToAuxInt(int64(val))
   817  		return true
   818  	}
   819  }
   820  func rewriteValueMIPS64_OpConst64F(v *Value) bool {
   821  	// match: (Const64F [val])
   822  	// result: (MOVDconst [float64(val)])
   823  	for {
   824  		val := auxIntToFloat64(v.AuxInt)
   825  		v.reset(OpMIPS64MOVDconst)
   826  		v.AuxInt = float64ToAuxInt(float64(val))
   827  		return true
   828  	}
   829  }
   830  func rewriteValueMIPS64_OpConst8(v *Value) bool {
   831  	// match: (Const8 [val])
   832  	// result: (MOVVconst [int64(val)])
   833  	for {
   834  		val := auxIntToInt8(v.AuxInt)
   835  		v.reset(OpMIPS64MOVVconst)
   836  		v.AuxInt = int64ToAuxInt(int64(val))
   837  		return true
   838  	}
   839  }
   840  func rewriteValueMIPS64_OpConstBool(v *Value) bool {
   841  	// match: (ConstBool [t])
   842  	// result: (MOVVconst [int64(b2i(t))])
   843  	for {
   844  		t := auxIntToBool(v.AuxInt)
   845  		v.reset(OpMIPS64MOVVconst)
   846  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
   847  		return true
   848  	}
   849  }
   850  func rewriteValueMIPS64_OpConstNil(v *Value) bool {
   851  	// match: (ConstNil)
   852  	// result: (MOVVconst [0])
   853  	for {
   854  		v.reset(OpMIPS64MOVVconst)
   855  		v.AuxInt = int64ToAuxInt(0)
   856  		return true
   857  	}
   858  }
   859  func rewriteValueMIPS64_OpDiv16(v *Value) bool {
   860  	v_1 := v.Args[1]
   861  	v_0 := v.Args[0]
   862  	b := v.Block
   863  	typ := &b.Func.Config.Types
   864  	// match: (Div16 x y)
   865  	// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
   866  	for {
   867  		x := v_0
   868  		y := v_1
   869  		v.reset(OpSelect1)
   870  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   871  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
   872  		v1.AddArg(x)
   873  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
   874  		v2.AddArg(y)
   875  		v0.AddArg2(v1, v2)
   876  		v.AddArg(v0)
   877  		return true
   878  	}
   879  }
   880  func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
   881  	v_1 := v.Args[1]
   882  	v_0 := v.Args[0]
   883  	b := v.Block
   884  	typ := &b.Func.Config.Types
   885  	// match: (Div16u x y)
   886  	// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
   887  	for {
   888  		x := v_0
   889  		y := v_1
   890  		v.reset(OpSelect1)
   891  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
   892  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   893  		v1.AddArg(x)
   894  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   895  		v2.AddArg(y)
   896  		v0.AddArg2(v1, v2)
   897  		v.AddArg(v0)
   898  		return true
   899  	}
   900  }
   901  func rewriteValueMIPS64_OpDiv32(v *Value) bool {
   902  	v_1 := v.Args[1]
   903  	v_0 := v.Args[0]
   904  	b := v.Block
   905  	typ := &b.Func.Config.Types
   906  	// match: (Div32 x y)
   907  	// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
   908  	for {
   909  		x := v_0
   910  		y := v_1
   911  		v.reset(OpSelect1)
   912  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   913  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   914  		v1.AddArg(x)
   915  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   916  		v2.AddArg(y)
   917  		v0.AddArg2(v1, v2)
   918  		v.AddArg(v0)
   919  		return true
   920  	}
   921  }
   922  func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
   923  	v_1 := v.Args[1]
   924  	v_0 := v.Args[0]
   925  	b := v.Block
   926  	typ := &b.Func.Config.Types
   927  	// match: (Div32u x y)
   928  	// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
   929  	for {
   930  		x := v_0
   931  		y := v_1
   932  		v.reset(OpSelect1)
   933  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
   934  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
   935  		v1.AddArg(x)
   936  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
   937  		v2.AddArg(y)
   938  		v0.AddArg2(v1, v2)
   939  		v.AddArg(v0)
   940  		return true
   941  	}
   942  }
   943  func rewriteValueMIPS64_OpDiv64(v *Value) bool {
   944  	v_1 := v.Args[1]
   945  	v_0 := v.Args[0]
   946  	b := v.Block
   947  	typ := &b.Func.Config.Types
   948  	// match: (Div64 x y)
   949  	// result: (Select1 (DIVV x y))
   950  	for {
   951  		x := v_0
   952  		y := v_1
   953  		v.reset(OpSelect1)
   954  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   955  		v0.AddArg2(x, y)
   956  		v.AddArg(v0)
   957  		return true
   958  	}
   959  }
   960  func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
   961  	v_1 := v.Args[1]
   962  	v_0 := v.Args[0]
   963  	b := v.Block
   964  	typ := &b.Func.Config.Types
   965  	// match: (Div64u x y)
   966  	// result: (Select1 (DIVVU x y))
   967  	for {
   968  		x := v_0
   969  		y := v_1
   970  		v.reset(OpSelect1)
   971  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
   972  		v0.AddArg2(x, y)
   973  		v.AddArg(v0)
   974  		return true
   975  	}
   976  }
   977  func rewriteValueMIPS64_OpDiv8(v *Value) bool {
   978  	v_1 := v.Args[1]
   979  	v_0 := v.Args[0]
   980  	b := v.Block
   981  	typ := &b.Func.Config.Types
   982  	// match: (Div8 x y)
   983  	// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
   984  	for {
   985  		x := v_0
   986  		y := v_1
   987  		v.reset(OpSelect1)
   988  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
   989  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
   990  		v1.AddArg(x)
   991  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
   992  		v2.AddArg(y)
   993  		v0.AddArg2(v1, v2)
   994  		v.AddArg(v0)
   995  		return true
   996  	}
   997  }
   998  func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
   999  	v_1 := v.Args[1]
  1000  	v_0 := v.Args[0]
  1001  	b := v.Block
  1002  	typ := &b.Func.Config.Types
  1003  	// match: (Div8u x y)
  1004  	// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1005  	for {
  1006  		x := v_0
  1007  		y := v_1
  1008  		v.reset(OpSelect1)
  1009  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1010  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1011  		v1.AddArg(x)
  1012  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1013  		v2.AddArg(y)
  1014  		v0.AddArg2(v1, v2)
  1015  		v.AddArg(v0)
  1016  		return true
  1017  	}
  1018  }
  1019  func rewriteValueMIPS64_OpEq16(v *Value) bool {
  1020  	v_1 := v.Args[1]
  1021  	v_0 := v.Args[0]
  1022  	b := v.Block
  1023  	typ := &b.Func.Config.Types
  1024  	// match: (Eq16 x y)
  1025  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1026  	for {
  1027  		x := v_0
  1028  		y := v_1
  1029  		v.reset(OpMIPS64SGTU)
  1030  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1031  		v0.AuxInt = int64ToAuxInt(1)
  1032  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1033  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1034  		v2.AddArg(x)
  1035  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1036  		v3.AddArg(y)
  1037  		v1.AddArg2(v2, v3)
  1038  		v.AddArg2(v0, v1)
  1039  		return true
  1040  	}
  1041  }
  1042  func rewriteValueMIPS64_OpEq32(v *Value) bool {
  1043  	v_1 := v.Args[1]
  1044  	v_0 := v.Args[0]
  1045  	b := v.Block
  1046  	typ := &b.Func.Config.Types
  1047  	// match: (Eq32 x y)
  1048  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1049  	for {
  1050  		x := v_0
  1051  		y := v_1
  1052  		v.reset(OpMIPS64SGTU)
  1053  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1054  		v0.AuxInt = int64ToAuxInt(1)
  1055  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1056  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1057  		v2.AddArg(x)
  1058  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1059  		v3.AddArg(y)
  1060  		v1.AddArg2(v2, v3)
  1061  		v.AddArg2(v0, v1)
  1062  		return true
  1063  	}
  1064  }
  1065  func rewriteValueMIPS64_OpEq32F(v *Value) bool {
  1066  	v_1 := v.Args[1]
  1067  	v_0 := v.Args[0]
  1068  	b := v.Block
  1069  	// match: (Eq32F x y)
  1070  	// result: (FPFlagTrue (CMPEQF x y))
  1071  	for {
  1072  		x := v_0
  1073  		y := v_1
  1074  		v.reset(OpMIPS64FPFlagTrue)
  1075  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1076  		v0.AddArg2(x, y)
  1077  		v.AddArg(v0)
  1078  		return true
  1079  	}
  1080  }
  1081  func rewriteValueMIPS64_OpEq64(v *Value) bool {
  1082  	v_1 := v.Args[1]
  1083  	v_0 := v.Args[0]
  1084  	b := v.Block
  1085  	typ := &b.Func.Config.Types
  1086  	// match: (Eq64 x y)
  1087  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1088  	for {
  1089  		x := v_0
  1090  		y := v_1
  1091  		v.reset(OpMIPS64SGTU)
  1092  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1093  		v0.AuxInt = int64ToAuxInt(1)
  1094  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1095  		v1.AddArg2(x, y)
  1096  		v.AddArg2(v0, v1)
  1097  		return true
  1098  	}
  1099  }
  1100  func rewriteValueMIPS64_OpEq64F(v *Value) bool {
  1101  	v_1 := v.Args[1]
  1102  	v_0 := v.Args[0]
  1103  	b := v.Block
  1104  	// match: (Eq64F x y)
  1105  	// result: (FPFlagTrue (CMPEQD x y))
  1106  	for {
  1107  		x := v_0
  1108  		y := v_1
  1109  		v.reset(OpMIPS64FPFlagTrue)
  1110  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1111  		v0.AddArg2(x, y)
  1112  		v.AddArg(v0)
  1113  		return true
  1114  	}
  1115  }
  1116  func rewriteValueMIPS64_OpEq8(v *Value) bool {
  1117  	v_1 := v.Args[1]
  1118  	v_0 := v.Args[0]
  1119  	b := v.Block
  1120  	typ := &b.Func.Config.Types
  1121  	// match: (Eq8 x y)
  1122  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1123  	for {
  1124  		x := v_0
  1125  		y := v_1
  1126  		v.reset(OpMIPS64SGTU)
  1127  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1128  		v0.AuxInt = int64ToAuxInt(1)
  1129  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1130  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1131  		v2.AddArg(x)
  1132  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1133  		v3.AddArg(y)
  1134  		v1.AddArg2(v2, v3)
  1135  		v.AddArg2(v0, v1)
  1136  		return true
  1137  	}
  1138  }
  1139  func rewriteValueMIPS64_OpEqB(v *Value) bool {
  1140  	v_1 := v.Args[1]
  1141  	v_0 := v.Args[0]
  1142  	b := v.Block
  1143  	typ := &b.Func.Config.Types
  1144  	// match: (EqB x y)
  1145  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1146  	for {
  1147  		x := v_0
  1148  		y := v_1
  1149  		v.reset(OpMIPS64XOR)
  1150  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1151  		v0.AuxInt = int64ToAuxInt(1)
  1152  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1153  		v1.AddArg2(x, y)
  1154  		v.AddArg2(v0, v1)
  1155  		return true
  1156  	}
  1157  }
  1158  func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
  1159  	v_1 := v.Args[1]
  1160  	v_0 := v.Args[0]
  1161  	b := v.Block
  1162  	typ := &b.Func.Config.Types
  1163  	// match: (EqPtr x y)
  1164  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1165  	for {
  1166  		x := v_0
  1167  		y := v_1
  1168  		v.reset(OpMIPS64SGTU)
  1169  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1170  		v0.AuxInt = int64ToAuxInt(1)
  1171  		v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1172  		v1.AddArg2(x, y)
  1173  		v.AddArg2(v0, v1)
  1174  		return true
  1175  	}
  1176  }
  1177  func rewriteValueMIPS64_OpHmul32(v *Value) bool {
  1178  	v_1 := v.Args[1]
  1179  	v_0 := v.Args[0]
  1180  	b := v.Block
  1181  	typ := &b.Func.Config.Types
  1182  	// match: (Hmul32 x y)
  1183  	// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  1184  	for {
  1185  		x := v_0
  1186  		y := v_1
  1187  		v.reset(OpMIPS64SRAVconst)
  1188  		v.AuxInt = int64ToAuxInt(32)
  1189  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  1190  		v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1191  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1192  		v2.AddArg(x)
  1193  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1194  		v3.AddArg(y)
  1195  		v1.AddArg2(v2, v3)
  1196  		v0.AddArg(v1)
  1197  		v.AddArg(v0)
  1198  		return true
  1199  	}
  1200  }
  1201  func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
  1202  	v_1 := v.Args[1]
  1203  	v_0 := v.Args[0]
  1204  	b := v.Block
  1205  	typ := &b.Func.Config.Types
  1206  	// match: (Hmul32u x y)
  1207  	// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  1208  	for {
  1209  		x := v_0
  1210  		y := v_1
  1211  		v.reset(OpMIPS64SRLVconst)
  1212  		v.AuxInt = int64ToAuxInt(32)
  1213  		v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  1214  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1215  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1216  		v2.AddArg(x)
  1217  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1218  		v3.AddArg(y)
  1219  		v1.AddArg2(v2, v3)
  1220  		v0.AddArg(v1)
  1221  		v.AddArg(v0)
  1222  		return true
  1223  	}
  1224  }
  1225  func rewriteValueMIPS64_OpHmul64(v *Value) bool {
  1226  	v_1 := v.Args[1]
  1227  	v_0 := v.Args[0]
  1228  	b := v.Block
  1229  	typ := &b.Func.Config.Types
  1230  	// match: (Hmul64 x y)
  1231  	// result: (Select0 (MULV x y))
  1232  	for {
  1233  		x := v_0
  1234  		y := v_1
  1235  		v.reset(OpSelect0)
  1236  		v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  1237  		v0.AddArg2(x, y)
  1238  		v.AddArg(v0)
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
  1243  	v_1 := v.Args[1]
  1244  	v_0 := v.Args[0]
  1245  	b := v.Block
  1246  	typ := &b.Func.Config.Types
  1247  	// match: (Hmul64u x y)
  1248  	// result: (Select0 (MULVU x y))
  1249  	for {
  1250  		x := v_0
  1251  		y := v_1
  1252  		v.reset(OpSelect0)
  1253  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1254  		v0.AddArg2(x, y)
  1255  		v.AddArg(v0)
  1256  		return true
  1257  	}
  1258  }
  1259  func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
  1260  	v_1 := v.Args[1]
  1261  	v_0 := v.Args[0]
  1262  	// match: (IsInBounds idx len)
  1263  	// result: (SGTU len idx)
  1264  	for {
  1265  		idx := v_0
  1266  		len := v_1
  1267  		v.reset(OpMIPS64SGTU)
  1268  		v.AddArg2(len, idx)
  1269  		return true
  1270  	}
  1271  }
  1272  func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
  1273  	v_0 := v.Args[0]
  1274  	b := v.Block
  1275  	typ := &b.Func.Config.Types
  1276  	// match: (IsNonNil ptr)
  1277  	// result: (SGTU ptr (MOVVconst [0]))
  1278  	for {
  1279  		ptr := v_0
  1280  		v.reset(OpMIPS64SGTU)
  1281  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1282  		v0.AuxInt = int64ToAuxInt(0)
  1283  		v.AddArg2(ptr, v0)
  1284  		return true
  1285  	}
  1286  }
  1287  func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
  1288  	v_1 := v.Args[1]
  1289  	v_0 := v.Args[0]
  1290  	b := v.Block
  1291  	typ := &b.Func.Config.Types
  1292  	// match: (IsSliceInBounds idx len)
  1293  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1294  	for {
  1295  		idx := v_0
  1296  		len := v_1
  1297  		v.reset(OpMIPS64XOR)
  1298  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1299  		v0.AuxInt = int64ToAuxInt(1)
  1300  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1301  		v1.AddArg2(idx, len)
  1302  		v.AddArg2(v0, v1)
  1303  		return true
  1304  	}
  1305  }
  1306  func rewriteValueMIPS64_OpLeq16(v *Value) bool {
  1307  	v_1 := v.Args[1]
  1308  	v_0 := v.Args[0]
  1309  	b := v.Block
  1310  	typ := &b.Func.Config.Types
  1311  	// match: (Leq16 x y)
  1312  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  1313  	for {
  1314  		x := v_0
  1315  		y := v_1
  1316  		v.reset(OpMIPS64XOR)
  1317  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1318  		v0.AuxInt = int64ToAuxInt(1)
  1319  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1320  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1321  		v2.AddArg(x)
  1322  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1323  		v3.AddArg(y)
  1324  		v1.AddArg2(v2, v3)
  1325  		v.AddArg2(v0, v1)
  1326  		return true
  1327  	}
  1328  }
  1329  func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
  1330  	v_1 := v.Args[1]
  1331  	v_0 := v.Args[0]
  1332  	b := v.Block
  1333  	typ := &b.Func.Config.Types
  1334  	// match: (Leq16U x y)
  1335  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1336  	for {
  1337  		x := v_0
  1338  		y := v_1
  1339  		v.reset(OpMIPS64XOR)
  1340  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1341  		v0.AuxInt = int64ToAuxInt(1)
  1342  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1343  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1344  		v2.AddArg(x)
  1345  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1346  		v3.AddArg(y)
  1347  		v1.AddArg2(v2, v3)
  1348  		v.AddArg2(v0, v1)
  1349  		return true
  1350  	}
  1351  }
  1352  func rewriteValueMIPS64_OpLeq32(v *Value) bool {
  1353  	v_1 := v.Args[1]
  1354  	v_0 := v.Args[0]
  1355  	b := v.Block
  1356  	typ := &b.Func.Config.Types
  1357  	// match: (Leq32 x y)
  1358  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  1359  	for {
  1360  		x := v_0
  1361  		y := v_1
  1362  		v.reset(OpMIPS64XOR)
  1363  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1364  		v0.AuxInt = int64ToAuxInt(1)
  1365  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1366  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1367  		v2.AddArg(x)
  1368  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1369  		v3.AddArg(y)
  1370  		v1.AddArg2(v2, v3)
  1371  		v.AddArg2(v0, v1)
  1372  		return true
  1373  	}
  1374  }
  1375  func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
  1376  	v_1 := v.Args[1]
  1377  	v_0 := v.Args[0]
  1378  	b := v.Block
  1379  	// match: (Leq32F x y)
  1380  	// result: (FPFlagTrue (CMPGEF y x))
  1381  	for {
  1382  		x := v_0
  1383  		y := v_1
  1384  		v.reset(OpMIPS64FPFlagTrue)
  1385  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1386  		v0.AddArg2(y, x)
  1387  		v.AddArg(v0)
  1388  		return true
  1389  	}
  1390  }
  1391  func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
  1392  	v_1 := v.Args[1]
  1393  	v_0 := v.Args[0]
  1394  	b := v.Block
  1395  	typ := &b.Func.Config.Types
  1396  	// match: (Leq32U x y)
  1397  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1398  	for {
  1399  		x := v_0
  1400  		y := v_1
  1401  		v.reset(OpMIPS64XOR)
  1402  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1403  		v0.AuxInt = int64ToAuxInt(1)
  1404  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1405  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1406  		v2.AddArg(x)
  1407  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1408  		v3.AddArg(y)
  1409  		v1.AddArg2(v2, v3)
  1410  		v.AddArg2(v0, v1)
  1411  		return true
  1412  	}
  1413  }
  1414  func rewriteValueMIPS64_OpLeq64(v *Value) bool {
  1415  	v_1 := v.Args[1]
  1416  	v_0 := v.Args[0]
  1417  	b := v.Block
  1418  	typ := &b.Func.Config.Types
  1419  	// match: (Leq64 x y)
  1420  	// result: (XOR (MOVVconst [1]) (SGT x y))
  1421  	for {
  1422  		x := v_0
  1423  		y := v_1
  1424  		v.reset(OpMIPS64XOR)
  1425  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1426  		v0.AuxInt = int64ToAuxInt(1)
  1427  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1428  		v1.AddArg2(x, y)
  1429  		v.AddArg2(v0, v1)
  1430  		return true
  1431  	}
  1432  }
  1433  func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
  1434  	v_1 := v.Args[1]
  1435  	v_0 := v.Args[0]
  1436  	b := v.Block
  1437  	// match: (Leq64F x y)
  1438  	// result: (FPFlagTrue (CMPGED y x))
  1439  	for {
  1440  		x := v_0
  1441  		y := v_1
  1442  		v.reset(OpMIPS64FPFlagTrue)
  1443  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1444  		v0.AddArg2(y, x)
  1445  		v.AddArg(v0)
  1446  		return true
  1447  	}
  1448  }
  1449  func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
  1450  	v_1 := v.Args[1]
  1451  	v_0 := v.Args[0]
  1452  	b := v.Block
  1453  	typ := &b.Func.Config.Types
  1454  	// match: (Leq64U x y)
  1455  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  1456  	for {
  1457  		x := v_0
  1458  		y := v_1
  1459  		v.reset(OpMIPS64XOR)
  1460  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1461  		v0.AuxInt = int64ToAuxInt(1)
  1462  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1463  		v1.AddArg2(x, y)
  1464  		v.AddArg2(v0, v1)
  1465  		return true
  1466  	}
  1467  }
  1468  func rewriteValueMIPS64_OpLeq8(v *Value) bool {
  1469  	v_1 := v.Args[1]
  1470  	v_0 := v.Args[0]
  1471  	b := v.Block
  1472  	typ := &b.Func.Config.Types
  1473  	// match: (Leq8 x y)
  1474  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  1475  	for {
  1476  		x := v_0
  1477  		y := v_1
  1478  		v.reset(OpMIPS64XOR)
  1479  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1480  		v0.AuxInt = int64ToAuxInt(1)
  1481  		v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1482  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1483  		v2.AddArg(x)
  1484  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1485  		v3.AddArg(y)
  1486  		v1.AddArg2(v2, v3)
  1487  		v.AddArg2(v0, v1)
  1488  		return true
  1489  	}
  1490  }
  1491  func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
  1492  	v_1 := v.Args[1]
  1493  	v_0 := v.Args[0]
  1494  	b := v.Block
  1495  	typ := &b.Func.Config.Types
  1496  	// match: (Leq8U x y)
  1497  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1498  	for {
  1499  		x := v_0
  1500  		y := v_1
  1501  		v.reset(OpMIPS64XOR)
  1502  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1503  		v0.AuxInt = int64ToAuxInt(1)
  1504  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1505  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1506  		v2.AddArg(x)
  1507  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1508  		v3.AddArg(y)
  1509  		v1.AddArg2(v2, v3)
  1510  		v.AddArg2(v0, v1)
  1511  		return true
  1512  	}
  1513  }
  1514  func rewriteValueMIPS64_OpLess16(v *Value) bool {
  1515  	v_1 := v.Args[1]
  1516  	v_0 := v.Args[0]
  1517  	b := v.Block
  1518  	typ := &b.Func.Config.Types
  1519  	// match: (Less16 x y)
  1520  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  1521  	for {
  1522  		x := v_0
  1523  		y := v_1
  1524  		v.reset(OpMIPS64SGT)
  1525  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1526  		v0.AddArg(y)
  1527  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1528  		v1.AddArg(x)
  1529  		v.AddArg2(v0, v1)
  1530  		return true
  1531  	}
  1532  }
  1533  func rewriteValueMIPS64_OpLess16U(v *Value) bool {
  1534  	v_1 := v.Args[1]
  1535  	v_0 := v.Args[0]
  1536  	b := v.Block
  1537  	typ := &b.Func.Config.Types
  1538  	// match: (Less16U x y)
  1539  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  1540  	for {
  1541  		x := v_0
  1542  		y := v_1
  1543  		v.reset(OpMIPS64SGTU)
  1544  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1545  		v0.AddArg(y)
  1546  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1547  		v1.AddArg(x)
  1548  		v.AddArg2(v0, v1)
  1549  		return true
  1550  	}
  1551  }
  1552  func rewriteValueMIPS64_OpLess32(v *Value) bool {
  1553  	v_1 := v.Args[1]
  1554  	v_0 := v.Args[0]
  1555  	b := v.Block
  1556  	typ := &b.Func.Config.Types
  1557  	// match: (Less32 x y)
  1558  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  1559  	for {
  1560  		x := v_0
  1561  		y := v_1
  1562  		v.reset(OpMIPS64SGT)
  1563  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1564  		v0.AddArg(y)
  1565  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1566  		v1.AddArg(x)
  1567  		v.AddArg2(v0, v1)
  1568  		return true
  1569  	}
  1570  }
  1571  func rewriteValueMIPS64_OpLess32F(v *Value) bool {
  1572  	v_1 := v.Args[1]
  1573  	v_0 := v.Args[0]
  1574  	b := v.Block
  1575  	// match: (Less32F x y)
  1576  	// result: (FPFlagTrue (CMPGTF y x))
  1577  	for {
  1578  		x := v_0
  1579  		y := v_1
  1580  		v.reset(OpMIPS64FPFlagTrue)
  1581  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  1582  		v0.AddArg2(y, x)
  1583  		v.AddArg(v0)
  1584  		return true
  1585  	}
  1586  }
  1587  func rewriteValueMIPS64_OpLess32U(v *Value) bool {
  1588  	v_1 := v.Args[1]
  1589  	v_0 := v.Args[0]
  1590  	b := v.Block
  1591  	typ := &b.Func.Config.Types
  1592  	// match: (Less32U x y)
  1593  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  1594  	for {
  1595  		x := v_0
  1596  		y := v_1
  1597  		v.reset(OpMIPS64SGTU)
  1598  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1599  		v0.AddArg(y)
  1600  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1601  		v1.AddArg(x)
  1602  		v.AddArg2(v0, v1)
  1603  		return true
  1604  	}
  1605  }
  1606  func rewriteValueMIPS64_OpLess64(v *Value) bool {
  1607  	v_1 := v.Args[1]
  1608  	v_0 := v.Args[0]
  1609  	// match: (Less64 x y)
  1610  	// result: (SGT y x)
  1611  	for {
  1612  		x := v_0
  1613  		y := v_1
  1614  		v.reset(OpMIPS64SGT)
  1615  		v.AddArg2(y, x)
  1616  		return true
  1617  	}
  1618  }
  1619  func rewriteValueMIPS64_OpLess64F(v *Value) bool {
  1620  	v_1 := v.Args[1]
  1621  	v_0 := v.Args[0]
  1622  	b := v.Block
  1623  	// match: (Less64F x y)
  1624  	// result: (FPFlagTrue (CMPGTD y x))
  1625  	for {
  1626  		x := v_0
  1627  		y := v_1
  1628  		v.reset(OpMIPS64FPFlagTrue)
  1629  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  1630  		v0.AddArg2(y, x)
  1631  		v.AddArg(v0)
  1632  		return true
  1633  	}
  1634  }
  1635  func rewriteValueMIPS64_OpLess64U(v *Value) bool {
  1636  	v_1 := v.Args[1]
  1637  	v_0 := v.Args[0]
  1638  	// match: (Less64U x y)
  1639  	// result: (SGTU y x)
  1640  	for {
  1641  		x := v_0
  1642  		y := v_1
  1643  		v.reset(OpMIPS64SGTU)
  1644  		v.AddArg2(y, x)
  1645  		return true
  1646  	}
  1647  }
  1648  func rewriteValueMIPS64_OpLess8(v *Value) bool {
  1649  	v_1 := v.Args[1]
  1650  	v_0 := v.Args[0]
  1651  	b := v.Block
  1652  	typ := &b.Func.Config.Types
  1653  	// match: (Less8 x y)
  1654  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  1655  	for {
  1656  		x := v_0
  1657  		y := v_1
  1658  		v.reset(OpMIPS64SGT)
  1659  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1660  		v0.AddArg(y)
  1661  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1662  		v1.AddArg(x)
  1663  		v.AddArg2(v0, v1)
  1664  		return true
  1665  	}
  1666  }
  1667  func rewriteValueMIPS64_OpLess8U(v *Value) bool {
  1668  	v_1 := v.Args[1]
  1669  	v_0 := v.Args[0]
  1670  	b := v.Block
  1671  	typ := &b.Func.Config.Types
  1672  	// match: (Less8U x y)
  1673  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  1674  	for {
  1675  		x := v_0
  1676  		y := v_1
  1677  		v.reset(OpMIPS64SGTU)
  1678  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1679  		v0.AddArg(y)
  1680  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1681  		v1.AddArg(x)
  1682  		v.AddArg2(v0, v1)
  1683  		return true
  1684  	}
  1685  }
  1686  func rewriteValueMIPS64_OpLoad(v *Value) bool {
  1687  	v_1 := v.Args[1]
  1688  	v_0 := v.Args[0]
  1689  	// match: (Load <t> ptr mem)
  1690  	// cond: t.IsBoolean()
  1691  	// result: (MOVBUload ptr mem)
  1692  	for {
  1693  		t := v.Type
  1694  		ptr := v_0
  1695  		mem := v_1
  1696  		if !(t.IsBoolean()) {
  1697  			break
  1698  		}
  1699  		v.reset(OpMIPS64MOVBUload)
  1700  		v.AddArg2(ptr, mem)
  1701  		return true
  1702  	}
  1703  	// match: (Load <t> ptr mem)
  1704  	// cond: (is8BitInt(t) && isSigned(t))
  1705  	// result: (MOVBload ptr mem)
  1706  	for {
  1707  		t := v.Type
  1708  		ptr := v_0
  1709  		mem := v_1
  1710  		if !(is8BitInt(t) && isSigned(t)) {
  1711  			break
  1712  		}
  1713  		v.reset(OpMIPS64MOVBload)
  1714  		v.AddArg2(ptr, mem)
  1715  		return true
  1716  	}
  1717  	// match: (Load <t> ptr mem)
  1718  	// cond: (is8BitInt(t) && !isSigned(t))
  1719  	// result: (MOVBUload ptr mem)
  1720  	for {
  1721  		t := v.Type
  1722  		ptr := v_0
  1723  		mem := v_1
  1724  		if !(is8BitInt(t) && !isSigned(t)) {
  1725  			break
  1726  		}
  1727  		v.reset(OpMIPS64MOVBUload)
  1728  		v.AddArg2(ptr, mem)
  1729  		return true
  1730  	}
  1731  	// match: (Load <t> ptr mem)
  1732  	// cond: (is16BitInt(t) && isSigned(t))
  1733  	// result: (MOVHload ptr mem)
  1734  	for {
  1735  		t := v.Type
  1736  		ptr := v_0
  1737  		mem := v_1
  1738  		if !(is16BitInt(t) && isSigned(t)) {
  1739  			break
  1740  		}
  1741  		v.reset(OpMIPS64MOVHload)
  1742  		v.AddArg2(ptr, mem)
  1743  		return true
  1744  	}
  1745  	// match: (Load <t> ptr mem)
  1746  	// cond: (is16BitInt(t) && !isSigned(t))
  1747  	// result: (MOVHUload ptr mem)
  1748  	for {
  1749  		t := v.Type
  1750  		ptr := v_0
  1751  		mem := v_1
  1752  		if !(is16BitInt(t) && !isSigned(t)) {
  1753  			break
  1754  		}
  1755  		v.reset(OpMIPS64MOVHUload)
  1756  		v.AddArg2(ptr, mem)
  1757  		return true
  1758  	}
  1759  	// match: (Load <t> ptr mem)
  1760  	// cond: (is32BitInt(t) && isSigned(t))
  1761  	// result: (MOVWload ptr mem)
  1762  	for {
  1763  		t := v.Type
  1764  		ptr := v_0
  1765  		mem := v_1
  1766  		if !(is32BitInt(t) && isSigned(t)) {
  1767  			break
  1768  		}
  1769  		v.reset(OpMIPS64MOVWload)
  1770  		v.AddArg2(ptr, mem)
  1771  		return true
  1772  	}
  1773  	// match: (Load <t> ptr mem)
  1774  	// cond: (is32BitInt(t) && !isSigned(t))
  1775  	// result: (MOVWUload ptr mem)
  1776  	for {
  1777  		t := v.Type
  1778  		ptr := v_0
  1779  		mem := v_1
  1780  		if !(is32BitInt(t) && !isSigned(t)) {
  1781  			break
  1782  		}
  1783  		v.reset(OpMIPS64MOVWUload)
  1784  		v.AddArg2(ptr, mem)
  1785  		return true
  1786  	}
  1787  	// match: (Load <t> ptr mem)
  1788  	// cond: (is64BitInt(t) || isPtr(t))
  1789  	// result: (MOVVload ptr mem)
  1790  	for {
  1791  		t := v.Type
  1792  		ptr := v_0
  1793  		mem := v_1
  1794  		if !(is64BitInt(t) || isPtr(t)) {
  1795  			break
  1796  		}
  1797  		v.reset(OpMIPS64MOVVload)
  1798  		v.AddArg2(ptr, mem)
  1799  		return true
  1800  	}
  1801  	// match: (Load <t> ptr mem)
  1802  	// cond: is32BitFloat(t)
  1803  	// result: (MOVFload ptr mem)
  1804  	for {
  1805  		t := v.Type
  1806  		ptr := v_0
  1807  		mem := v_1
  1808  		if !(is32BitFloat(t)) {
  1809  			break
  1810  		}
  1811  		v.reset(OpMIPS64MOVFload)
  1812  		v.AddArg2(ptr, mem)
  1813  		return true
  1814  	}
  1815  	// match: (Load <t> ptr mem)
  1816  	// cond: is64BitFloat(t)
  1817  	// result: (MOVDload ptr mem)
  1818  	for {
  1819  		t := v.Type
  1820  		ptr := v_0
  1821  		mem := v_1
  1822  		if !(is64BitFloat(t)) {
  1823  			break
  1824  		}
  1825  		v.reset(OpMIPS64MOVDload)
  1826  		v.AddArg2(ptr, mem)
  1827  		return true
  1828  	}
  1829  	return false
  1830  }
  1831  func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
  1832  	v_0 := v.Args[0]
  1833  	// match: (LocalAddr {sym} base _)
  1834  	// result: (MOVVaddr {sym} base)
  1835  	for {
  1836  		sym := auxToSym(v.Aux)
  1837  		base := v_0
  1838  		v.reset(OpMIPS64MOVVaddr)
  1839  		v.Aux = symToAux(sym)
  1840  		v.AddArg(base)
  1841  		return true
  1842  	}
  1843  }
  1844  func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
  1845  	v_1 := v.Args[1]
  1846  	v_0 := v.Args[0]
  1847  	b := v.Block
  1848  	typ := &b.Func.Config.Types
  1849  	// match: (Lsh16x16 <t> x y)
  1850  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  1851  	for {
  1852  		t := v.Type
  1853  		x := v_0
  1854  		y := v_1
  1855  		v.reset(OpMIPS64AND)
  1856  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1857  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1858  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1859  		v2.AuxInt = int64ToAuxInt(64)
  1860  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1861  		v3.AddArg(y)
  1862  		v1.AddArg2(v2, v3)
  1863  		v0.AddArg(v1)
  1864  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1865  		v4.AddArg2(x, v3)
  1866  		v.AddArg2(v0, v4)
  1867  		return true
  1868  	}
  1869  }
  1870  func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
  1871  	v_1 := v.Args[1]
  1872  	v_0 := v.Args[0]
  1873  	b := v.Block
  1874  	typ := &b.Func.Config.Types
  1875  	// match: (Lsh16x32 <t> x y)
  1876  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  1877  	for {
  1878  		t := v.Type
  1879  		x := v_0
  1880  		y := v_1
  1881  		v.reset(OpMIPS64AND)
  1882  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1883  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1884  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1885  		v2.AuxInt = int64ToAuxInt(64)
  1886  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1887  		v3.AddArg(y)
  1888  		v1.AddArg2(v2, v3)
  1889  		v0.AddArg(v1)
  1890  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1891  		v4.AddArg2(x, v3)
  1892  		v.AddArg2(v0, v4)
  1893  		return true
  1894  	}
  1895  }
  1896  func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
  1897  	v_1 := v.Args[1]
  1898  	v_0 := v.Args[0]
  1899  	b := v.Block
  1900  	typ := &b.Func.Config.Types
  1901  	// match: (Lsh16x64 <t> x y)
  1902  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  1903  	for {
  1904  		t := v.Type
  1905  		x := v_0
  1906  		y := v_1
  1907  		v.reset(OpMIPS64AND)
  1908  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1909  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1910  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1911  		v2.AuxInt = int64ToAuxInt(64)
  1912  		v1.AddArg2(v2, y)
  1913  		v0.AddArg(v1)
  1914  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1915  		v3.AddArg2(x, y)
  1916  		v.AddArg2(v0, v3)
  1917  		return true
  1918  	}
  1919  }
  1920  func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
  1921  	v_1 := v.Args[1]
  1922  	v_0 := v.Args[0]
  1923  	b := v.Block
  1924  	typ := &b.Func.Config.Types
  1925  	// match: (Lsh16x8 <t> x y)
  1926  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  1927  	for {
  1928  		t := v.Type
  1929  		x := v_0
  1930  		y := v_1
  1931  		v.reset(OpMIPS64AND)
  1932  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1933  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1934  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1935  		v2.AuxInt = int64ToAuxInt(64)
  1936  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1937  		v3.AddArg(y)
  1938  		v1.AddArg2(v2, v3)
  1939  		v0.AddArg(v1)
  1940  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1941  		v4.AddArg2(x, v3)
  1942  		v.AddArg2(v0, v4)
  1943  		return true
  1944  	}
  1945  }
  1946  func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
  1947  	v_1 := v.Args[1]
  1948  	v_0 := v.Args[0]
  1949  	b := v.Block
  1950  	typ := &b.Func.Config.Types
  1951  	// match: (Lsh32x16 <t> x y)
  1952  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  1953  	for {
  1954  		t := v.Type
  1955  		x := v_0
  1956  		y := v_1
  1957  		v.reset(OpMIPS64AND)
  1958  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1959  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1960  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1961  		v2.AuxInt = int64ToAuxInt(64)
  1962  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1963  		v3.AddArg(y)
  1964  		v1.AddArg2(v2, v3)
  1965  		v0.AddArg(v1)
  1966  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1967  		v4.AddArg2(x, v3)
  1968  		v.AddArg2(v0, v4)
  1969  		return true
  1970  	}
  1971  }
  1972  func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
  1973  	v_1 := v.Args[1]
  1974  	v_0 := v.Args[0]
  1975  	b := v.Block
  1976  	typ := &b.Func.Config.Types
  1977  	// match: (Lsh32x32 <t> x y)
  1978  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  1979  	for {
  1980  		t := v.Type
  1981  		x := v_0
  1982  		y := v_1
  1983  		v.reset(OpMIPS64AND)
  1984  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  1985  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1986  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1987  		v2.AuxInt = int64ToAuxInt(64)
  1988  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1989  		v3.AddArg(y)
  1990  		v1.AddArg2(v2, v3)
  1991  		v0.AddArg(v1)
  1992  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  1993  		v4.AddArg2(x, v3)
  1994  		v.AddArg2(v0, v4)
  1995  		return true
  1996  	}
  1997  }
  1998  func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
  1999  	v_1 := v.Args[1]
  2000  	v_0 := v.Args[0]
  2001  	b := v.Block
  2002  	typ := &b.Func.Config.Types
  2003  	// match: (Lsh32x64 <t> x y)
  2004  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2005  	for {
  2006  		t := v.Type
  2007  		x := v_0
  2008  		y := v_1
  2009  		v.reset(OpMIPS64AND)
  2010  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2011  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2012  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2013  		v2.AuxInt = int64ToAuxInt(64)
  2014  		v1.AddArg2(v2, y)
  2015  		v0.AddArg(v1)
  2016  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2017  		v3.AddArg2(x, y)
  2018  		v.AddArg2(v0, v3)
  2019  		return true
  2020  	}
  2021  }
  2022  func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
  2023  	v_1 := v.Args[1]
  2024  	v_0 := v.Args[0]
  2025  	b := v.Block
  2026  	typ := &b.Func.Config.Types
  2027  	// match: (Lsh32x8 <t> x y)
  2028  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2029  	for {
  2030  		t := v.Type
  2031  		x := v_0
  2032  		y := v_1
  2033  		v.reset(OpMIPS64AND)
  2034  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2035  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2036  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2037  		v2.AuxInt = int64ToAuxInt(64)
  2038  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2039  		v3.AddArg(y)
  2040  		v1.AddArg2(v2, v3)
  2041  		v0.AddArg(v1)
  2042  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2043  		v4.AddArg2(x, v3)
  2044  		v.AddArg2(v0, v4)
  2045  		return true
  2046  	}
  2047  }
  2048  func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
  2049  	v_1 := v.Args[1]
  2050  	v_0 := v.Args[0]
  2051  	b := v.Block
  2052  	typ := &b.Func.Config.Types
  2053  	// match: (Lsh64x16 <t> x y)
  2054  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2055  	for {
  2056  		t := v.Type
  2057  		x := v_0
  2058  		y := v_1
  2059  		v.reset(OpMIPS64AND)
  2060  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2061  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2062  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2063  		v2.AuxInt = int64ToAuxInt(64)
  2064  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2065  		v3.AddArg(y)
  2066  		v1.AddArg2(v2, v3)
  2067  		v0.AddArg(v1)
  2068  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2069  		v4.AddArg2(x, v3)
  2070  		v.AddArg2(v0, v4)
  2071  		return true
  2072  	}
  2073  }
  2074  func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
  2075  	v_1 := v.Args[1]
  2076  	v_0 := v.Args[0]
  2077  	b := v.Block
  2078  	typ := &b.Func.Config.Types
  2079  	// match: (Lsh64x32 <t> x y)
  2080  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2081  	for {
  2082  		t := v.Type
  2083  		x := v_0
  2084  		y := v_1
  2085  		v.reset(OpMIPS64AND)
  2086  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2087  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2088  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2089  		v2.AuxInt = int64ToAuxInt(64)
  2090  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2091  		v3.AddArg(y)
  2092  		v1.AddArg2(v2, v3)
  2093  		v0.AddArg(v1)
  2094  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2095  		v4.AddArg2(x, v3)
  2096  		v.AddArg2(v0, v4)
  2097  		return true
  2098  	}
  2099  }
  2100  func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
  2101  	v_1 := v.Args[1]
  2102  	v_0 := v.Args[0]
  2103  	b := v.Block
  2104  	typ := &b.Func.Config.Types
  2105  	// match: (Lsh64x64 <t> x y)
  2106  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2107  	for {
  2108  		t := v.Type
  2109  		x := v_0
  2110  		y := v_1
  2111  		v.reset(OpMIPS64AND)
  2112  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2113  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2114  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2115  		v2.AuxInt = int64ToAuxInt(64)
  2116  		v1.AddArg2(v2, y)
  2117  		v0.AddArg(v1)
  2118  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2119  		v3.AddArg2(x, y)
  2120  		v.AddArg2(v0, v3)
  2121  		return true
  2122  	}
  2123  }
  2124  func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
  2125  	v_1 := v.Args[1]
  2126  	v_0 := v.Args[0]
  2127  	b := v.Block
  2128  	typ := &b.Func.Config.Types
  2129  	// match: (Lsh64x8 <t> x y)
  2130  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2131  	for {
  2132  		t := v.Type
  2133  		x := v_0
  2134  		y := v_1
  2135  		v.reset(OpMIPS64AND)
  2136  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2137  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2138  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2139  		v2.AuxInt = int64ToAuxInt(64)
  2140  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2141  		v3.AddArg(y)
  2142  		v1.AddArg2(v2, v3)
  2143  		v0.AddArg(v1)
  2144  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2145  		v4.AddArg2(x, v3)
  2146  		v.AddArg2(v0, v4)
  2147  		return true
  2148  	}
  2149  }
  2150  func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
  2151  	v_1 := v.Args[1]
  2152  	v_0 := v.Args[0]
  2153  	b := v.Block
  2154  	typ := &b.Func.Config.Types
  2155  	// match: (Lsh8x16 <t> x y)
  2156  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2157  	for {
  2158  		t := v.Type
  2159  		x := v_0
  2160  		y := v_1
  2161  		v.reset(OpMIPS64AND)
  2162  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2163  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2164  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2165  		v2.AuxInt = int64ToAuxInt(64)
  2166  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2167  		v3.AddArg(y)
  2168  		v1.AddArg2(v2, v3)
  2169  		v0.AddArg(v1)
  2170  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2171  		v4.AddArg2(x, v3)
  2172  		v.AddArg2(v0, v4)
  2173  		return true
  2174  	}
  2175  }
  2176  func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
  2177  	v_1 := v.Args[1]
  2178  	v_0 := v.Args[0]
  2179  	b := v.Block
  2180  	typ := &b.Func.Config.Types
  2181  	// match: (Lsh8x32 <t> x y)
  2182  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2183  	for {
  2184  		t := v.Type
  2185  		x := v_0
  2186  		y := v_1
  2187  		v.reset(OpMIPS64AND)
  2188  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2189  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2190  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2191  		v2.AuxInt = int64ToAuxInt(64)
  2192  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2193  		v3.AddArg(y)
  2194  		v1.AddArg2(v2, v3)
  2195  		v0.AddArg(v1)
  2196  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2197  		v4.AddArg2(x, v3)
  2198  		v.AddArg2(v0, v4)
  2199  		return true
  2200  	}
  2201  }
  2202  func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
  2203  	v_1 := v.Args[1]
  2204  	v_0 := v.Args[0]
  2205  	b := v.Block
  2206  	typ := &b.Func.Config.Types
  2207  	// match: (Lsh8x64 <t> x y)
  2208  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2209  	for {
  2210  		t := v.Type
  2211  		x := v_0
  2212  		y := v_1
  2213  		v.reset(OpMIPS64AND)
  2214  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2215  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2216  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2217  		v2.AuxInt = int64ToAuxInt(64)
  2218  		v1.AddArg2(v2, y)
  2219  		v0.AddArg(v1)
  2220  		v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2221  		v3.AddArg2(x, y)
  2222  		v.AddArg2(v0, v3)
  2223  		return true
  2224  	}
  2225  }
  2226  func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
  2227  	v_1 := v.Args[1]
  2228  	v_0 := v.Args[0]
  2229  	b := v.Block
  2230  	typ := &b.Func.Config.Types
  2231  	// match: (Lsh8x8 <t> x y)
  2232  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2233  	for {
  2234  		t := v.Type
  2235  		x := v_0
  2236  		y := v_1
  2237  		v.reset(OpMIPS64AND)
  2238  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2239  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2240  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2241  		v2.AuxInt = int64ToAuxInt(64)
  2242  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2243  		v3.AddArg(y)
  2244  		v1.AddArg2(v2, v3)
  2245  		v0.AddArg(v1)
  2246  		v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2247  		v4.AddArg2(x, v3)
  2248  		v.AddArg2(v0, v4)
  2249  		return true
  2250  	}
  2251  }
  2252  func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
  2253  	v_1 := v.Args[1]
  2254  	v_0 := v.Args[0]
  2255  	// match: (ADDV x (MOVVconst [c]))
  2256  	// cond: is32Bit(c)
  2257  	// result: (ADDVconst [c] x)
  2258  	for {
  2259  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2260  			x := v_0
  2261  			if v_1.Op != OpMIPS64MOVVconst {
  2262  				continue
  2263  			}
  2264  			c := auxIntToInt64(v_1.AuxInt)
  2265  			if !(is32Bit(c)) {
  2266  				continue
  2267  			}
  2268  			v.reset(OpMIPS64ADDVconst)
  2269  			v.AuxInt = int64ToAuxInt(c)
  2270  			v.AddArg(x)
  2271  			return true
  2272  		}
  2273  		break
  2274  	}
  2275  	// match: (ADDV x (NEGV y))
  2276  	// result: (SUBV x y)
  2277  	for {
  2278  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2279  			x := v_0
  2280  			if v_1.Op != OpMIPS64NEGV {
  2281  				continue
  2282  			}
  2283  			y := v_1.Args[0]
  2284  			v.reset(OpMIPS64SUBV)
  2285  			v.AddArg2(x, y)
  2286  			return true
  2287  		}
  2288  		break
  2289  	}
  2290  	return false
  2291  }
  2292  func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
  2293  	v_0 := v.Args[0]
  2294  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  2295  	// cond: is32Bit(off1+int64(off2))
  2296  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  2297  	for {
  2298  		off1 := auxIntToInt64(v.AuxInt)
  2299  		if v_0.Op != OpMIPS64MOVVaddr {
  2300  			break
  2301  		}
  2302  		off2 := auxIntToInt32(v_0.AuxInt)
  2303  		sym := auxToSym(v_0.Aux)
  2304  		ptr := v_0.Args[0]
  2305  		if !(is32Bit(off1 + int64(off2))) {
  2306  			break
  2307  		}
  2308  		v.reset(OpMIPS64MOVVaddr)
  2309  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  2310  		v.Aux = symToAux(sym)
  2311  		v.AddArg(ptr)
  2312  		return true
  2313  	}
  2314  	// match: (ADDVconst [0] x)
  2315  	// result: x
  2316  	for {
  2317  		if auxIntToInt64(v.AuxInt) != 0 {
  2318  			break
  2319  		}
  2320  		x := v_0
  2321  		v.copyOf(x)
  2322  		return true
  2323  	}
  2324  	// match: (ADDVconst [c] (MOVVconst [d]))
  2325  	// result: (MOVVconst [c+d])
  2326  	for {
  2327  		c := auxIntToInt64(v.AuxInt)
  2328  		if v_0.Op != OpMIPS64MOVVconst {
  2329  			break
  2330  		}
  2331  		d := auxIntToInt64(v_0.AuxInt)
  2332  		v.reset(OpMIPS64MOVVconst)
  2333  		v.AuxInt = int64ToAuxInt(c + d)
  2334  		return true
  2335  	}
  2336  	// match: (ADDVconst [c] (ADDVconst [d] x))
  2337  	// cond: is32Bit(c+d)
  2338  	// result: (ADDVconst [c+d] x)
  2339  	for {
  2340  		c := auxIntToInt64(v.AuxInt)
  2341  		if v_0.Op != OpMIPS64ADDVconst {
  2342  			break
  2343  		}
  2344  		d := auxIntToInt64(v_0.AuxInt)
  2345  		x := v_0.Args[0]
  2346  		if !(is32Bit(c + d)) {
  2347  			break
  2348  		}
  2349  		v.reset(OpMIPS64ADDVconst)
  2350  		v.AuxInt = int64ToAuxInt(c + d)
  2351  		v.AddArg(x)
  2352  		return true
  2353  	}
  2354  	// match: (ADDVconst [c] (SUBVconst [d] x))
  2355  	// cond: is32Bit(c-d)
  2356  	// result: (ADDVconst [c-d] x)
  2357  	for {
  2358  		c := auxIntToInt64(v.AuxInt)
  2359  		if v_0.Op != OpMIPS64SUBVconst {
  2360  			break
  2361  		}
  2362  		d := auxIntToInt64(v_0.AuxInt)
  2363  		x := v_0.Args[0]
  2364  		if !(is32Bit(c - d)) {
  2365  			break
  2366  		}
  2367  		v.reset(OpMIPS64ADDVconst)
  2368  		v.AuxInt = int64ToAuxInt(c - d)
  2369  		v.AddArg(x)
  2370  		return true
  2371  	}
  2372  	return false
  2373  }
  2374  func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
  2375  	v_1 := v.Args[1]
  2376  	v_0 := v.Args[0]
  2377  	// match: (AND x (MOVVconst [c]))
  2378  	// cond: is32Bit(c)
  2379  	// result: (ANDconst [c] x)
  2380  	for {
  2381  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2382  			x := v_0
  2383  			if v_1.Op != OpMIPS64MOVVconst {
  2384  				continue
  2385  			}
  2386  			c := auxIntToInt64(v_1.AuxInt)
  2387  			if !(is32Bit(c)) {
  2388  				continue
  2389  			}
  2390  			v.reset(OpMIPS64ANDconst)
  2391  			v.AuxInt = int64ToAuxInt(c)
  2392  			v.AddArg(x)
  2393  			return true
  2394  		}
  2395  		break
  2396  	}
  2397  	// match: (AND x x)
  2398  	// result: x
  2399  	for {
  2400  		x := v_0
  2401  		if x != v_1 {
  2402  			break
  2403  		}
  2404  		v.copyOf(x)
  2405  		return true
  2406  	}
  2407  	return false
  2408  }
  2409  func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
  2410  	v_0 := v.Args[0]
  2411  	// match: (ANDconst [0] _)
  2412  	// result: (MOVVconst [0])
  2413  	for {
  2414  		if auxIntToInt64(v.AuxInt) != 0 {
  2415  			break
  2416  		}
  2417  		v.reset(OpMIPS64MOVVconst)
  2418  		v.AuxInt = int64ToAuxInt(0)
  2419  		return true
  2420  	}
  2421  	// match: (ANDconst [-1] x)
  2422  	// result: x
  2423  	for {
  2424  		if auxIntToInt64(v.AuxInt) != -1 {
  2425  			break
  2426  		}
  2427  		x := v_0
  2428  		v.copyOf(x)
  2429  		return true
  2430  	}
  2431  	// match: (ANDconst [c] (MOVVconst [d]))
  2432  	// result: (MOVVconst [c&d])
  2433  	for {
  2434  		c := auxIntToInt64(v.AuxInt)
  2435  		if v_0.Op != OpMIPS64MOVVconst {
  2436  			break
  2437  		}
  2438  		d := auxIntToInt64(v_0.AuxInt)
  2439  		v.reset(OpMIPS64MOVVconst)
  2440  		v.AuxInt = int64ToAuxInt(c & d)
  2441  		return true
  2442  	}
  2443  	// match: (ANDconst [c] (ANDconst [d] x))
  2444  	// result: (ANDconst [c&d] x)
  2445  	for {
  2446  		c := auxIntToInt64(v.AuxInt)
  2447  		if v_0.Op != OpMIPS64ANDconst {
  2448  			break
  2449  		}
  2450  		d := auxIntToInt64(v_0.AuxInt)
  2451  		x := v_0.Args[0]
  2452  		v.reset(OpMIPS64ANDconst)
  2453  		v.AuxInt = int64ToAuxInt(c & d)
  2454  		v.AddArg(x)
  2455  		return true
  2456  	}
  2457  	return false
  2458  }
  2459  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
  2460  	v_2 := v.Args[2]
  2461  	v_1 := v.Args[1]
  2462  	v_0 := v.Args[0]
  2463  	// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  2464  	// cond: is32Bit(c)
  2465  	// result: (LoweredAtomicAddconst32 [int32(c)] ptr mem)
  2466  	for {
  2467  		ptr := v_0
  2468  		if v_1.Op != OpMIPS64MOVVconst {
  2469  			break
  2470  		}
  2471  		c := auxIntToInt64(v_1.AuxInt)
  2472  		mem := v_2
  2473  		if !(is32Bit(c)) {
  2474  			break
  2475  		}
  2476  		v.reset(OpMIPS64LoweredAtomicAddconst32)
  2477  		v.AuxInt = int32ToAuxInt(int32(c))
  2478  		v.AddArg2(ptr, mem)
  2479  		return true
  2480  	}
  2481  	return false
  2482  }
  2483  func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
  2484  	v_2 := v.Args[2]
  2485  	v_1 := v.Args[1]
  2486  	v_0 := v.Args[0]
  2487  	// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  2488  	// cond: is32Bit(c)
  2489  	// result: (LoweredAtomicAddconst64 [c] ptr mem)
  2490  	for {
  2491  		ptr := v_0
  2492  		if v_1.Op != OpMIPS64MOVVconst {
  2493  			break
  2494  		}
  2495  		c := auxIntToInt64(v_1.AuxInt)
  2496  		mem := v_2
  2497  		if !(is32Bit(c)) {
  2498  			break
  2499  		}
  2500  		v.reset(OpMIPS64LoweredAtomicAddconst64)
  2501  		v.AuxInt = int64ToAuxInt(c)
  2502  		v.AddArg2(ptr, mem)
  2503  		return true
  2504  	}
  2505  	return false
  2506  }
  2507  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
  2508  	v_2 := v.Args[2]
  2509  	v_1 := v.Args[1]
  2510  	v_0 := v.Args[0]
  2511  	// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  2512  	// result: (LoweredAtomicStorezero32 ptr mem)
  2513  	for {
  2514  		ptr := v_0
  2515  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2516  			break
  2517  		}
  2518  		mem := v_2
  2519  		v.reset(OpMIPS64LoweredAtomicStorezero32)
  2520  		v.AddArg2(ptr, mem)
  2521  		return true
  2522  	}
  2523  	return false
  2524  }
  2525  func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
  2526  	v_2 := v.Args[2]
  2527  	v_1 := v.Args[1]
  2528  	v_0 := v.Args[0]
  2529  	// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  2530  	// result: (LoweredAtomicStorezero64 ptr mem)
  2531  	for {
  2532  		ptr := v_0
  2533  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2534  			break
  2535  		}
  2536  		mem := v_2
  2537  		v.reset(OpMIPS64LoweredAtomicStorezero64)
  2538  		v.AddArg2(ptr, mem)
  2539  		return true
  2540  	}
  2541  	return false
  2542  }
  2543  func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
  2544  	v_1 := v.Args[1]
  2545  	v_0 := v.Args[0]
  2546  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2547  	// cond: is32Bit(int64(off1)+off2)
  2548  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2549  	for {
  2550  		off1 := auxIntToInt32(v.AuxInt)
  2551  		sym := auxToSym(v.Aux)
  2552  		if v_0.Op != OpMIPS64ADDVconst {
  2553  			break
  2554  		}
  2555  		off2 := auxIntToInt64(v_0.AuxInt)
  2556  		ptr := v_0.Args[0]
  2557  		mem := v_1
  2558  		if !(is32Bit(int64(off1) + off2)) {
  2559  			break
  2560  		}
  2561  		v.reset(OpMIPS64MOVBUload)
  2562  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2563  		v.Aux = symToAux(sym)
  2564  		v.AddArg2(ptr, mem)
  2565  		return true
  2566  	}
  2567  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2568  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2569  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2570  	for {
  2571  		off1 := auxIntToInt32(v.AuxInt)
  2572  		sym1 := auxToSym(v.Aux)
  2573  		if v_0.Op != OpMIPS64MOVVaddr {
  2574  			break
  2575  		}
  2576  		off2 := auxIntToInt32(v_0.AuxInt)
  2577  		sym2 := auxToSym(v_0.Aux)
  2578  		ptr := v_0.Args[0]
  2579  		mem := v_1
  2580  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2581  			break
  2582  		}
  2583  		v.reset(OpMIPS64MOVBUload)
  2584  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2585  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2586  		v.AddArg2(ptr, mem)
  2587  		return true
  2588  	}
  2589  	return false
  2590  }
  2591  func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
  2592  	v_0 := v.Args[0]
  2593  	// match: (MOVBUreg x:(MOVBUload _ _))
  2594  	// result: (MOVVreg x)
  2595  	for {
  2596  		x := v_0
  2597  		if x.Op != OpMIPS64MOVBUload {
  2598  			break
  2599  		}
  2600  		v.reset(OpMIPS64MOVVreg)
  2601  		v.AddArg(x)
  2602  		return true
  2603  	}
  2604  	// match: (MOVBUreg x:(MOVBUreg _))
  2605  	// result: (MOVVreg x)
  2606  	for {
  2607  		x := v_0
  2608  		if x.Op != OpMIPS64MOVBUreg {
  2609  			break
  2610  		}
  2611  		v.reset(OpMIPS64MOVVreg)
  2612  		v.AddArg(x)
  2613  		return true
  2614  	}
  2615  	// match: (MOVBUreg (MOVVconst [c]))
  2616  	// result: (MOVVconst [int64(uint8(c))])
  2617  	for {
  2618  		if v_0.Op != OpMIPS64MOVVconst {
  2619  			break
  2620  		}
  2621  		c := auxIntToInt64(v_0.AuxInt)
  2622  		v.reset(OpMIPS64MOVVconst)
  2623  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2624  		return true
  2625  	}
  2626  	return false
  2627  }
  2628  func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
  2629  	v_1 := v.Args[1]
  2630  	v_0 := v.Args[0]
  2631  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2632  	// cond: is32Bit(int64(off1)+off2)
  2633  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2634  	for {
  2635  		off1 := auxIntToInt32(v.AuxInt)
  2636  		sym := auxToSym(v.Aux)
  2637  		if v_0.Op != OpMIPS64ADDVconst {
  2638  			break
  2639  		}
  2640  		off2 := auxIntToInt64(v_0.AuxInt)
  2641  		ptr := v_0.Args[0]
  2642  		mem := v_1
  2643  		if !(is32Bit(int64(off1) + off2)) {
  2644  			break
  2645  		}
  2646  		v.reset(OpMIPS64MOVBload)
  2647  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2648  		v.Aux = symToAux(sym)
  2649  		v.AddArg2(ptr, mem)
  2650  		return true
  2651  	}
  2652  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2653  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2654  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2655  	for {
  2656  		off1 := auxIntToInt32(v.AuxInt)
  2657  		sym1 := auxToSym(v.Aux)
  2658  		if v_0.Op != OpMIPS64MOVVaddr {
  2659  			break
  2660  		}
  2661  		off2 := auxIntToInt32(v_0.AuxInt)
  2662  		sym2 := auxToSym(v_0.Aux)
  2663  		ptr := v_0.Args[0]
  2664  		mem := v_1
  2665  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2666  			break
  2667  		}
  2668  		v.reset(OpMIPS64MOVBload)
  2669  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2670  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2671  		v.AddArg2(ptr, mem)
  2672  		return true
  2673  	}
  2674  	// match: (MOVBload [off] {sym} (SB) _)
  2675  	// cond: symIsRO(sym)
  2676  	// result: (MOVVconst [int64(read8(sym, int64(off)))])
  2677  	for {
  2678  		off := auxIntToInt32(v.AuxInt)
  2679  		sym := auxToSym(v.Aux)
  2680  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2681  			break
  2682  		}
  2683  		v.reset(OpMIPS64MOVVconst)
  2684  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  2685  		return true
  2686  	}
  2687  	return false
  2688  }
  2689  func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
  2690  	v_0 := v.Args[0]
  2691  	// match: (MOVBreg x:(MOVBload _ _))
  2692  	// result: (MOVVreg x)
  2693  	for {
  2694  		x := v_0
  2695  		if x.Op != OpMIPS64MOVBload {
  2696  			break
  2697  		}
  2698  		v.reset(OpMIPS64MOVVreg)
  2699  		v.AddArg(x)
  2700  		return true
  2701  	}
  2702  	// match: (MOVBreg x:(MOVBreg _))
  2703  	// result: (MOVVreg x)
  2704  	for {
  2705  		x := v_0
  2706  		if x.Op != OpMIPS64MOVBreg {
  2707  			break
  2708  		}
  2709  		v.reset(OpMIPS64MOVVreg)
  2710  		v.AddArg(x)
  2711  		return true
  2712  	}
  2713  	// match: (MOVBreg (MOVVconst [c]))
  2714  	// result: (MOVVconst [int64(int8(c))])
  2715  	for {
  2716  		if v_0.Op != OpMIPS64MOVVconst {
  2717  			break
  2718  		}
  2719  		c := auxIntToInt64(v_0.AuxInt)
  2720  		v.reset(OpMIPS64MOVVconst)
  2721  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2722  		return true
  2723  	}
  2724  	return false
  2725  }
  2726  func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
  2727  	v_2 := v.Args[2]
  2728  	v_1 := v.Args[1]
  2729  	v_0 := v.Args[0]
  2730  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2731  	// cond: is32Bit(int64(off1)+off2)
  2732  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2733  	for {
  2734  		off1 := auxIntToInt32(v.AuxInt)
  2735  		sym := auxToSym(v.Aux)
  2736  		if v_0.Op != OpMIPS64ADDVconst {
  2737  			break
  2738  		}
  2739  		off2 := auxIntToInt64(v_0.AuxInt)
  2740  		ptr := v_0.Args[0]
  2741  		val := v_1
  2742  		mem := v_2
  2743  		if !(is32Bit(int64(off1) + off2)) {
  2744  			break
  2745  		}
  2746  		v.reset(OpMIPS64MOVBstore)
  2747  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2748  		v.Aux = symToAux(sym)
  2749  		v.AddArg3(ptr, val, mem)
  2750  		return true
  2751  	}
  2752  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2753  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2754  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2755  	for {
  2756  		off1 := auxIntToInt32(v.AuxInt)
  2757  		sym1 := auxToSym(v.Aux)
  2758  		if v_0.Op != OpMIPS64MOVVaddr {
  2759  			break
  2760  		}
  2761  		off2 := auxIntToInt32(v_0.AuxInt)
  2762  		sym2 := auxToSym(v_0.Aux)
  2763  		ptr := v_0.Args[0]
  2764  		val := v_1
  2765  		mem := v_2
  2766  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2767  			break
  2768  		}
  2769  		v.reset(OpMIPS64MOVBstore)
  2770  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2771  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2772  		v.AddArg3(ptr, val, mem)
  2773  		return true
  2774  	}
  2775  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  2776  	// result: (MOVBstorezero [off] {sym} ptr mem)
  2777  	for {
  2778  		off := auxIntToInt32(v.AuxInt)
  2779  		sym := auxToSym(v.Aux)
  2780  		ptr := v_0
  2781  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2782  			break
  2783  		}
  2784  		mem := v_2
  2785  		v.reset(OpMIPS64MOVBstorezero)
  2786  		v.AuxInt = int32ToAuxInt(off)
  2787  		v.Aux = symToAux(sym)
  2788  		v.AddArg2(ptr, mem)
  2789  		return true
  2790  	}
  2791  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  2792  	// result: (MOVBstore [off] {sym} ptr x mem)
  2793  	for {
  2794  		off := auxIntToInt32(v.AuxInt)
  2795  		sym := auxToSym(v.Aux)
  2796  		ptr := v_0
  2797  		if v_1.Op != OpMIPS64MOVBreg {
  2798  			break
  2799  		}
  2800  		x := v_1.Args[0]
  2801  		mem := v_2
  2802  		v.reset(OpMIPS64MOVBstore)
  2803  		v.AuxInt = int32ToAuxInt(off)
  2804  		v.Aux = symToAux(sym)
  2805  		v.AddArg3(ptr, x, mem)
  2806  		return true
  2807  	}
  2808  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  2809  	// result: (MOVBstore [off] {sym} ptr x mem)
  2810  	for {
  2811  		off := auxIntToInt32(v.AuxInt)
  2812  		sym := auxToSym(v.Aux)
  2813  		ptr := v_0
  2814  		if v_1.Op != OpMIPS64MOVBUreg {
  2815  			break
  2816  		}
  2817  		x := v_1.Args[0]
  2818  		mem := v_2
  2819  		v.reset(OpMIPS64MOVBstore)
  2820  		v.AuxInt = int32ToAuxInt(off)
  2821  		v.Aux = symToAux(sym)
  2822  		v.AddArg3(ptr, x, mem)
  2823  		return true
  2824  	}
  2825  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  2826  	// result: (MOVBstore [off] {sym} ptr x mem)
  2827  	for {
  2828  		off := auxIntToInt32(v.AuxInt)
  2829  		sym := auxToSym(v.Aux)
  2830  		ptr := v_0
  2831  		if v_1.Op != OpMIPS64MOVHreg {
  2832  			break
  2833  		}
  2834  		x := v_1.Args[0]
  2835  		mem := v_2
  2836  		v.reset(OpMIPS64MOVBstore)
  2837  		v.AuxInt = int32ToAuxInt(off)
  2838  		v.Aux = symToAux(sym)
  2839  		v.AddArg3(ptr, x, mem)
  2840  		return true
  2841  	}
  2842  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  2843  	// result: (MOVBstore [off] {sym} ptr x mem)
  2844  	for {
  2845  		off := auxIntToInt32(v.AuxInt)
  2846  		sym := auxToSym(v.Aux)
  2847  		ptr := v_0
  2848  		if v_1.Op != OpMIPS64MOVHUreg {
  2849  			break
  2850  		}
  2851  		x := v_1.Args[0]
  2852  		mem := v_2
  2853  		v.reset(OpMIPS64MOVBstore)
  2854  		v.AuxInt = int32ToAuxInt(off)
  2855  		v.Aux = symToAux(sym)
  2856  		v.AddArg3(ptr, x, mem)
  2857  		return true
  2858  	}
  2859  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  2860  	// result: (MOVBstore [off] {sym} ptr x mem)
  2861  	for {
  2862  		off := auxIntToInt32(v.AuxInt)
  2863  		sym := auxToSym(v.Aux)
  2864  		ptr := v_0
  2865  		if v_1.Op != OpMIPS64MOVWreg {
  2866  			break
  2867  		}
  2868  		x := v_1.Args[0]
  2869  		mem := v_2
  2870  		v.reset(OpMIPS64MOVBstore)
  2871  		v.AuxInt = int32ToAuxInt(off)
  2872  		v.Aux = symToAux(sym)
  2873  		v.AddArg3(ptr, x, mem)
  2874  		return true
  2875  	}
  2876  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  2877  	// result: (MOVBstore [off] {sym} ptr x mem)
  2878  	for {
  2879  		off := auxIntToInt32(v.AuxInt)
  2880  		sym := auxToSym(v.Aux)
  2881  		ptr := v_0
  2882  		if v_1.Op != OpMIPS64MOVWUreg {
  2883  			break
  2884  		}
  2885  		x := v_1.Args[0]
  2886  		mem := v_2
  2887  		v.reset(OpMIPS64MOVBstore)
  2888  		v.AuxInt = int32ToAuxInt(off)
  2889  		v.Aux = symToAux(sym)
  2890  		v.AddArg3(ptr, x, mem)
  2891  		return true
  2892  	}
  2893  	return false
  2894  }
  2895  func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
  2896  	v_1 := v.Args[1]
  2897  	v_0 := v.Args[0]
  2898  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2899  	// cond: is32Bit(int64(off1)+off2)
  2900  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  2901  	for {
  2902  		off1 := auxIntToInt32(v.AuxInt)
  2903  		sym := auxToSym(v.Aux)
  2904  		if v_0.Op != OpMIPS64ADDVconst {
  2905  			break
  2906  		}
  2907  		off2 := auxIntToInt64(v_0.AuxInt)
  2908  		ptr := v_0.Args[0]
  2909  		mem := v_1
  2910  		if !(is32Bit(int64(off1) + off2)) {
  2911  			break
  2912  		}
  2913  		v.reset(OpMIPS64MOVBstorezero)
  2914  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2915  		v.Aux = symToAux(sym)
  2916  		v.AddArg2(ptr, mem)
  2917  		return true
  2918  	}
  2919  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2920  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2921  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2922  	for {
  2923  		off1 := auxIntToInt32(v.AuxInt)
  2924  		sym1 := auxToSym(v.Aux)
  2925  		if v_0.Op != OpMIPS64MOVVaddr {
  2926  			break
  2927  		}
  2928  		off2 := auxIntToInt32(v_0.AuxInt)
  2929  		sym2 := auxToSym(v_0.Aux)
  2930  		ptr := v_0.Args[0]
  2931  		mem := v_1
  2932  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2933  			break
  2934  		}
  2935  		v.reset(OpMIPS64MOVBstorezero)
  2936  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2937  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2938  		v.AddArg2(ptr, mem)
  2939  		return true
  2940  	}
  2941  	return false
  2942  }
  2943  func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
  2944  	v_1 := v.Args[1]
  2945  	v_0 := v.Args[0]
  2946  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2947  	// cond: is32Bit(int64(off1)+off2)
  2948  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  2949  	for {
  2950  		off1 := auxIntToInt32(v.AuxInt)
  2951  		sym := auxToSym(v.Aux)
  2952  		if v_0.Op != OpMIPS64ADDVconst {
  2953  			break
  2954  		}
  2955  		off2 := auxIntToInt64(v_0.AuxInt)
  2956  		ptr := v_0.Args[0]
  2957  		mem := v_1
  2958  		if !(is32Bit(int64(off1) + off2)) {
  2959  			break
  2960  		}
  2961  		v.reset(OpMIPS64MOVDload)
  2962  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2963  		v.Aux = symToAux(sym)
  2964  		v.AddArg2(ptr, mem)
  2965  		return true
  2966  	}
  2967  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2968  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  2969  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2970  	for {
  2971  		off1 := auxIntToInt32(v.AuxInt)
  2972  		sym1 := auxToSym(v.Aux)
  2973  		if v_0.Op != OpMIPS64MOVVaddr {
  2974  			break
  2975  		}
  2976  		off2 := auxIntToInt32(v_0.AuxInt)
  2977  		sym2 := auxToSym(v_0.Aux)
  2978  		ptr := v_0.Args[0]
  2979  		mem := v_1
  2980  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  2981  			break
  2982  		}
  2983  		v.reset(OpMIPS64MOVDload)
  2984  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2985  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2986  		v.AddArg2(ptr, mem)
  2987  		return true
  2988  	}
  2989  	return false
  2990  }
  2991  func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
  2992  	v_2 := v.Args[2]
  2993  	v_1 := v.Args[1]
  2994  	v_0 := v.Args[0]
  2995  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2996  	// cond: is32Bit(int64(off1)+off2)
  2997  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  2998  	for {
  2999  		off1 := auxIntToInt32(v.AuxInt)
  3000  		sym := auxToSym(v.Aux)
  3001  		if v_0.Op != OpMIPS64ADDVconst {
  3002  			break
  3003  		}
  3004  		off2 := auxIntToInt64(v_0.AuxInt)
  3005  		ptr := v_0.Args[0]
  3006  		val := v_1
  3007  		mem := v_2
  3008  		if !(is32Bit(int64(off1) + off2)) {
  3009  			break
  3010  		}
  3011  		v.reset(OpMIPS64MOVDstore)
  3012  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3013  		v.Aux = symToAux(sym)
  3014  		v.AddArg3(ptr, val, mem)
  3015  		return true
  3016  	}
  3017  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3018  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3019  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3020  	for {
  3021  		off1 := auxIntToInt32(v.AuxInt)
  3022  		sym1 := auxToSym(v.Aux)
  3023  		if v_0.Op != OpMIPS64MOVVaddr {
  3024  			break
  3025  		}
  3026  		off2 := auxIntToInt32(v_0.AuxInt)
  3027  		sym2 := auxToSym(v_0.Aux)
  3028  		ptr := v_0.Args[0]
  3029  		val := v_1
  3030  		mem := v_2
  3031  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3032  			break
  3033  		}
  3034  		v.reset(OpMIPS64MOVDstore)
  3035  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3036  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3037  		v.AddArg3(ptr, val, mem)
  3038  		return true
  3039  	}
  3040  	return false
  3041  }
  3042  func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
  3043  	v_1 := v.Args[1]
  3044  	v_0 := v.Args[0]
  3045  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3046  	// cond: is32Bit(int64(off1)+off2)
  3047  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3048  	for {
  3049  		off1 := auxIntToInt32(v.AuxInt)
  3050  		sym := auxToSym(v.Aux)
  3051  		if v_0.Op != OpMIPS64ADDVconst {
  3052  			break
  3053  		}
  3054  		off2 := auxIntToInt64(v_0.AuxInt)
  3055  		ptr := v_0.Args[0]
  3056  		mem := v_1
  3057  		if !(is32Bit(int64(off1) + off2)) {
  3058  			break
  3059  		}
  3060  		v.reset(OpMIPS64MOVFload)
  3061  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3062  		v.Aux = symToAux(sym)
  3063  		v.AddArg2(ptr, mem)
  3064  		return true
  3065  	}
  3066  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3067  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3068  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3069  	for {
  3070  		off1 := auxIntToInt32(v.AuxInt)
  3071  		sym1 := auxToSym(v.Aux)
  3072  		if v_0.Op != OpMIPS64MOVVaddr {
  3073  			break
  3074  		}
  3075  		off2 := auxIntToInt32(v_0.AuxInt)
  3076  		sym2 := auxToSym(v_0.Aux)
  3077  		ptr := v_0.Args[0]
  3078  		mem := v_1
  3079  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3080  			break
  3081  		}
  3082  		v.reset(OpMIPS64MOVFload)
  3083  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3084  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3085  		v.AddArg2(ptr, mem)
  3086  		return true
  3087  	}
  3088  	return false
  3089  }
  3090  func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
  3091  	v_2 := v.Args[2]
  3092  	v_1 := v.Args[1]
  3093  	v_0 := v.Args[0]
  3094  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3095  	// cond: is32Bit(int64(off1)+off2)
  3096  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3097  	for {
  3098  		off1 := auxIntToInt32(v.AuxInt)
  3099  		sym := auxToSym(v.Aux)
  3100  		if v_0.Op != OpMIPS64ADDVconst {
  3101  			break
  3102  		}
  3103  		off2 := auxIntToInt64(v_0.AuxInt)
  3104  		ptr := v_0.Args[0]
  3105  		val := v_1
  3106  		mem := v_2
  3107  		if !(is32Bit(int64(off1) + off2)) {
  3108  			break
  3109  		}
  3110  		v.reset(OpMIPS64MOVFstore)
  3111  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3112  		v.Aux = symToAux(sym)
  3113  		v.AddArg3(ptr, val, mem)
  3114  		return true
  3115  	}
  3116  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3117  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3118  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3119  	for {
  3120  		off1 := auxIntToInt32(v.AuxInt)
  3121  		sym1 := auxToSym(v.Aux)
  3122  		if v_0.Op != OpMIPS64MOVVaddr {
  3123  			break
  3124  		}
  3125  		off2 := auxIntToInt32(v_0.AuxInt)
  3126  		sym2 := auxToSym(v_0.Aux)
  3127  		ptr := v_0.Args[0]
  3128  		val := v_1
  3129  		mem := v_2
  3130  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3131  			break
  3132  		}
  3133  		v.reset(OpMIPS64MOVFstore)
  3134  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3135  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3136  		v.AddArg3(ptr, val, mem)
  3137  		return true
  3138  	}
  3139  	return false
  3140  }
  3141  func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
  3142  	v_1 := v.Args[1]
  3143  	v_0 := v.Args[0]
  3144  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3145  	// cond: is32Bit(int64(off1)+off2)
  3146  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3147  	for {
  3148  		off1 := auxIntToInt32(v.AuxInt)
  3149  		sym := auxToSym(v.Aux)
  3150  		if v_0.Op != OpMIPS64ADDVconst {
  3151  			break
  3152  		}
  3153  		off2 := auxIntToInt64(v_0.AuxInt)
  3154  		ptr := v_0.Args[0]
  3155  		mem := v_1
  3156  		if !(is32Bit(int64(off1) + off2)) {
  3157  			break
  3158  		}
  3159  		v.reset(OpMIPS64MOVHUload)
  3160  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3161  		v.Aux = symToAux(sym)
  3162  		v.AddArg2(ptr, mem)
  3163  		return true
  3164  	}
  3165  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3166  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3167  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3168  	for {
  3169  		off1 := auxIntToInt32(v.AuxInt)
  3170  		sym1 := auxToSym(v.Aux)
  3171  		if v_0.Op != OpMIPS64MOVVaddr {
  3172  			break
  3173  		}
  3174  		off2 := auxIntToInt32(v_0.AuxInt)
  3175  		sym2 := auxToSym(v_0.Aux)
  3176  		ptr := v_0.Args[0]
  3177  		mem := v_1
  3178  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3179  			break
  3180  		}
  3181  		v.reset(OpMIPS64MOVHUload)
  3182  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3183  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3184  		v.AddArg2(ptr, mem)
  3185  		return true
  3186  	}
  3187  	return false
  3188  }
  3189  func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
  3190  	v_0 := v.Args[0]
  3191  	// match: (MOVHUreg x:(MOVBUload _ _))
  3192  	// result: (MOVVreg x)
  3193  	for {
  3194  		x := v_0
  3195  		if x.Op != OpMIPS64MOVBUload {
  3196  			break
  3197  		}
  3198  		v.reset(OpMIPS64MOVVreg)
  3199  		v.AddArg(x)
  3200  		return true
  3201  	}
  3202  	// match: (MOVHUreg x:(MOVHUload _ _))
  3203  	// result: (MOVVreg x)
  3204  	for {
  3205  		x := v_0
  3206  		if x.Op != OpMIPS64MOVHUload {
  3207  			break
  3208  		}
  3209  		v.reset(OpMIPS64MOVVreg)
  3210  		v.AddArg(x)
  3211  		return true
  3212  	}
  3213  	// match: (MOVHUreg x:(MOVBUreg _))
  3214  	// result: (MOVVreg x)
  3215  	for {
  3216  		x := v_0
  3217  		if x.Op != OpMIPS64MOVBUreg {
  3218  			break
  3219  		}
  3220  		v.reset(OpMIPS64MOVVreg)
  3221  		v.AddArg(x)
  3222  		return true
  3223  	}
  3224  	// match: (MOVHUreg x:(MOVHUreg _))
  3225  	// result: (MOVVreg x)
  3226  	for {
  3227  		x := v_0
  3228  		if x.Op != OpMIPS64MOVHUreg {
  3229  			break
  3230  		}
  3231  		v.reset(OpMIPS64MOVVreg)
  3232  		v.AddArg(x)
  3233  		return true
  3234  	}
  3235  	// match: (MOVHUreg (MOVVconst [c]))
  3236  	// result: (MOVVconst [int64(uint16(c))])
  3237  	for {
  3238  		if v_0.Op != OpMIPS64MOVVconst {
  3239  			break
  3240  		}
  3241  		c := auxIntToInt64(v_0.AuxInt)
  3242  		v.reset(OpMIPS64MOVVconst)
  3243  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3244  		return true
  3245  	}
  3246  	return false
  3247  }
  3248  func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
  3249  	v_1 := v.Args[1]
  3250  	v_0 := v.Args[0]
  3251  	b := v.Block
  3252  	config := b.Func.Config
  3253  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3254  	// cond: is32Bit(int64(off1)+off2)
  3255  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3256  	for {
  3257  		off1 := auxIntToInt32(v.AuxInt)
  3258  		sym := auxToSym(v.Aux)
  3259  		if v_0.Op != OpMIPS64ADDVconst {
  3260  			break
  3261  		}
  3262  		off2 := auxIntToInt64(v_0.AuxInt)
  3263  		ptr := v_0.Args[0]
  3264  		mem := v_1
  3265  		if !(is32Bit(int64(off1) + off2)) {
  3266  			break
  3267  		}
  3268  		v.reset(OpMIPS64MOVHload)
  3269  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3270  		v.Aux = symToAux(sym)
  3271  		v.AddArg2(ptr, mem)
  3272  		return true
  3273  	}
  3274  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3275  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3276  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3277  	for {
  3278  		off1 := auxIntToInt32(v.AuxInt)
  3279  		sym1 := auxToSym(v.Aux)
  3280  		if v_0.Op != OpMIPS64MOVVaddr {
  3281  			break
  3282  		}
  3283  		off2 := auxIntToInt32(v_0.AuxInt)
  3284  		sym2 := auxToSym(v_0.Aux)
  3285  		ptr := v_0.Args[0]
  3286  		mem := v_1
  3287  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3288  			break
  3289  		}
  3290  		v.reset(OpMIPS64MOVHload)
  3291  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3292  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3293  		v.AddArg2(ptr, mem)
  3294  		return true
  3295  	}
  3296  	// match: (MOVHload [off] {sym} (SB) _)
  3297  	// cond: symIsRO(sym)
  3298  	// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3299  	for {
  3300  		off := auxIntToInt32(v.AuxInt)
  3301  		sym := auxToSym(v.Aux)
  3302  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3303  			break
  3304  		}
  3305  		v.reset(OpMIPS64MOVVconst)
  3306  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3307  		return true
  3308  	}
  3309  	return false
  3310  }
  3311  func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
  3312  	v_0 := v.Args[0]
  3313  	// match: (MOVHreg x:(MOVBload _ _))
  3314  	// result: (MOVVreg x)
  3315  	for {
  3316  		x := v_0
  3317  		if x.Op != OpMIPS64MOVBload {
  3318  			break
  3319  		}
  3320  		v.reset(OpMIPS64MOVVreg)
  3321  		v.AddArg(x)
  3322  		return true
  3323  	}
  3324  	// match: (MOVHreg x:(MOVBUload _ _))
  3325  	// result: (MOVVreg x)
  3326  	for {
  3327  		x := v_0
  3328  		if x.Op != OpMIPS64MOVBUload {
  3329  			break
  3330  		}
  3331  		v.reset(OpMIPS64MOVVreg)
  3332  		v.AddArg(x)
  3333  		return true
  3334  	}
  3335  	// match: (MOVHreg x:(MOVHload _ _))
  3336  	// result: (MOVVreg x)
  3337  	for {
  3338  		x := v_0
  3339  		if x.Op != OpMIPS64MOVHload {
  3340  			break
  3341  		}
  3342  		v.reset(OpMIPS64MOVVreg)
  3343  		v.AddArg(x)
  3344  		return true
  3345  	}
  3346  	// match: (MOVHreg x:(MOVBreg _))
  3347  	// result: (MOVVreg x)
  3348  	for {
  3349  		x := v_0
  3350  		if x.Op != OpMIPS64MOVBreg {
  3351  			break
  3352  		}
  3353  		v.reset(OpMIPS64MOVVreg)
  3354  		v.AddArg(x)
  3355  		return true
  3356  	}
  3357  	// match: (MOVHreg x:(MOVBUreg _))
  3358  	// result: (MOVVreg x)
  3359  	for {
  3360  		x := v_0
  3361  		if x.Op != OpMIPS64MOVBUreg {
  3362  			break
  3363  		}
  3364  		v.reset(OpMIPS64MOVVreg)
  3365  		v.AddArg(x)
  3366  		return true
  3367  	}
  3368  	// match: (MOVHreg x:(MOVHreg _))
  3369  	// result: (MOVVreg x)
  3370  	for {
  3371  		x := v_0
  3372  		if x.Op != OpMIPS64MOVHreg {
  3373  			break
  3374  		}
  3375  		v.reset(OpMIPS64MOVVreg)
  3376  		v.AddArg(x)
  3377  		return true
  3378  	}
  3379  	// match: (MOVHreg (MOVVconst [c]))
  3380  	// result: (MOVVconst [int64(int16(c))])
  3381  	for {
  3382  		if v_0.Op != OpMIPS64MOVVconst {
  3383  			break
  3384  		}
  3385  		c := auxIntToInt64(v_0.AuxInt)
  3386  		v.reset(OpMIPS64MOVVconst)
  3387  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3388  		return true
  3389  	}
  3390  	return false
  3391  }
  3392  func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
  3393  	v_2 := v.Args[2]
  3394  	v_1 := v.Args[1]
  3395  	v_0 := v.Args[0]
  3396  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3397  	// cond: is32Bit(int64(off1)+off2)
  3398  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3399  	for {
  3400  		off1 := auxIntToInt32(v.AuxInt)
  3401  		sym := auxToSym(v.Aux)
  3402  		if v_0.Op != OpMIPS64ADDVconst {
  3403  			break
  3404  		}
  3405  		off2 := auxIntToInt64(v_0.AuxInt)
  3406  		ptr := v_0.Args[0]
  3407  		val := v_1
  3408  		mem := v_2
  3409  		if !(is32Bit(int64(off1) + off2)) {
  3410  			break
  3411  		}
  3412  		v.reset(OpMIPS64MOVHstore)
  3413  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3414  		v.Aux = symToAux(sym)
  3415  		v.AddArg3(ptr, val, mem)
  3416  		return true
  3417  	}
  3418  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3419  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3420  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3421  	for {
  3422  		off1 := auxIntToInt32(v.AuxInt)
  3423  		sym1 := auxToSym(v.Aux)
  3424  		if v_0.Op != OpMIPS64MOVVaddr {
  3425  			break
  3426  		}
  3427  		off2 := auxIntToInt32(v_0.AuxInt)
  3428  		sym2 := auxToSym(v_0.Aux)
  3429  		ptr := v_0.Args[0]
  3430  		val := v_1
  3431  		mem := v_2
  3432  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3433  			break
  3434  		}
  3435  		v.reset(OpMIPS64MOVHstore)
  3436  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3437  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3438  		v.AddArg3(ptr, val, mem)
  3439  		return true
  3440  	}
  3441  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  3442  	// result: (MOVHstorezero [off] {sym} ptr mem)
  3443  	for {
  3444  		off := auxIntToInt32(v.AuxInt)
  3445  		sym := auxToSym(v.Aux)
  3446  		ptr := v_0
  3447  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3448  			break
  3449  		}
  3450  		mem := v_2
  3451  		v.reset(OpMIPS64MOVHstorezero)
  3452  		v.AuxInt = int32ToAuxInt(off)
  3453  		v.Aux = symToAux(sym)
  3454  		v.AddArg2(ptr, mem)
  3455  		return true
  3456  	}
  3457  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  3458  	// result: (MOVHstore [off] {sym} ptr x mem)
  3459  	for {
  3460  		off := auxIntToInt32(v.AuxInt)
  3461  		sym := auxToSym(v.Aux)
  3462  		ptr := v_0
  3463  		if v_1.Op != OpMIPS64MOVHreg {
  3464  			break
  3465  		}
  3466  		x := v_1.Args[0]
  3467  		mem := v_2
  3468  		v.reset(OpMIPS64MOVHstore)
  3469  		v.AuxInt = int32ToAuxInt(off)
  3470  		v.Aux = symToAux(sym)
  3471  		v.AddArg3(ptr, x, mem)
  3472  		return true
  3473  	}
  3474  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  3475  	// result: (MOVHstore [off] {sym} ptr x mem)
  3476  	for {
  3477  		off := auxIntToInt32(v.AuxInt)
  3478  		sym := auxToSym(v.Aux)
  3479  		ptr := v_0
  3480  		if v_1.Op != OpMIPS64MOVHUreg {
  3481  			break
  3482  		}
  3483  		x := v_1.Args[0]
  3484  		mem := v_2
  3485  		v.reset(OpMIPS64MOVHstore)
  3486  		v.AuxInt = int32ToAuxInt(off)
  3487  		v.Aux = symToAux(sym)
  3488  		v.AddArg3(ptr, x, mem)
  3489  		return true
  3490  	}
  3491  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  3492  	// result: (MOVHstore [off] {sym} ptr x mem)
  3493  	for {
  3494  		off := auxIntToInt32(v.AuxInt)
  3495  		sym := auxToSym(v.Aux)
  3496  		ptr := v_0
  3497  		if v_1.Op != OpMIPS64MOVWreg {
  3498  			break
  3499  		}
  3500  		x := v_1.Args[0]
  3501  		mem := v_2
  3502  		v.reset(OpMIPS64MOVHstore)
  3503  		v.AuxInt = int32ToAuxInt(off)
  3504  		v.Aux = symToAux(sym)
  3505  		v.AddArg3(ptr, x, mem)
  3506  		return true
  3507  	}
  3508  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  3509  	// result: (MOVHstore [off] {sym} ptr x mem)
  3510  	for {
  3511  		off := auxIntToInt32(v.AuxInt)
  3512  		sym := auxToSym(v.Aux)
  3513  		ptr := v_0
  3514  		if v_1.Op != OpMIPS64MOVWUreg {
  3515  			break
  3516  		}
  3517  		x := v_1.Args[0]
  3518  		mem := v_2
  3519  		v.reset(OpMIPS64MOVHstore)
  3520  		v.AuxInt = int32ToAuxInt(off)
  3521  		v.Aux = symToAux(sym)
  3522  		v.AddArg3(ptr, x, mem)
  3523  		return true
  3524  	}
  3525  	return false
  3526  }
  3527  func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
  3528  	v_1 := v.Args[1]
  3529  	v_0 := v.Args[0]
  3530  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3531  	// cond: is32Bit(int64(off1)+off2)
  3532  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  3533  	for {
  3534  		off1 := auxIntToInt32(v.AuxInt)
  3535  		sym := auxToSym(v.Aux)
  3536  		if v_0.Op != OpMIPS64ADDVconst {
  3537  			break
  3538  		}
  3539  		off2 := auxIntToInt64(v_0.AuxInt)
  3540  		ptr := v_0.Args[0]
  3541  		mem := v_1
  3542  		if !(is32Bit(int64(off1) + off2)) {
  3543  			break
  3544  		}
  3545  		v.reset(OpMIPS64MOVHstorezero)
  3546  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3547  		v.Aux = symToAux(sym)
  3548  		v.AddArg2(ptr, mem)
  3549  		return true
  3550  	}
  3551  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3552  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3553  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3554  	for {
  3555  		off1 := auxIntToInt32(v.AuxInt)
  3556  		sym1 := auxToSym(v.Aux)
  3557  		if v_0.Op != OpMIPS64MOVVaddr {
  3558  			break
  3559  		}
  3560  		off2 := auxIntToInt32(v_0.AuxInt)
  3561  		sym2 := auxToSym(v_0.Aux)
  3562  		ptr := v_0.Args[0]
  3563  		mem := v_1
  3564  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3565  			break
  3566  		}
  3567  		v.reset(OpMIPS64MOVHstorezero)
  3568  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3569  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3570  		v.AddArg2(ptr, mem)
  3571  		return true
  3572  	}
  3573  	return false
  3574  }
  3575  func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
  3576  	v_1 := v.Args[1]
  3577  	v_0 := v.Args[0]
  3578  	b := v.Block
  3579  	config := b.Func.Config
  3580  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3581  	// cond: is32Bit(int64(off1)+off2)
  3582  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  3583  	for {
  3584  		off1 := auxIntToInt32(v.AuxInt)
  3585  		sym := auxToSym(v.Aux)
  3586  		if v_0.Op != OpMIPS64ADDVconst {
  3587  			break
  3588  		}
  3589  		off2 := auxIntToInt64(v_0.AuxInt)
  3590  		ptr := v_0.Args[0]
  3591  		mem := v_1
  3592  		if !(is32Bit(int64(off1) + off2)) {
  3593  			break
  3594  		}
  3595  		v.reset(OpMIPS64MOVVload)
  3596  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3597  		v.Aux = symToAux(sym)
  3598  		v.AddArg2(ptr, mem)
  3599  		return true
  3600  	}
  3601  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3602  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3603  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3604  	for {
  3605  		off1 := auxIntToInt32(v.AuxInt)
  3606  		sym1 := auxToSym(v.Aux)
  3607  		if v_0.Op != OpMIPS64MOVVaddr {
  3608  			break
  3609  		}
  3610  		off2 := auxIntToInt32(v_0.AuxInt)
  3611  		sym2 := auxToSym(v_0.Aux)
  3612  		ptr := v_0.Args[0]
  3613  		mem := v_1
  3614  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3615  			break
  3616  		}
  3617  		v.reset(OpMIPS64MOVVload)
  3618  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3619  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3620  		v.AddArg2(ptr, mem)
  3621  		return true
  3622  	}
  3623  	// match: (MOVVload [off] {sym} (SB) _)
  3624  	// cond: symIsRO(sym)
  3625  	// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3626  	for {
  3627  		off := auxIntToInt32(v.AuxInt)
  3628  		sym := auxToSym(v.Aux)
  3629  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3630  			break
  3631  		}
  3632  		v.reset(OpMIPS64MOVVconst)
  3633  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3634  		return true
  3635  	}
  3636  	return false
  3637  }
  3638  func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
  3639  	v_0 := v.Args[0]
  3640  	// match: (MOVVnop (MOVVconst [c]))
  3641  	// result: (MOVVconst [c])
  3642  	for {
  3643  		if v_0.Op != OpMIPS64MOVVconst {
  3644  			break
  3645  		}
  3646  		c := auxIntToInt64(v_0.AuxInt)
  3647  		v.reset(OpMIPS64MOVVconst)
  3648  		v.AuxInt = int64ToAuxInt(c)
  3649  		return true
  3650  	}
  3651  	return false
  3652  }
  3653  func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
  3654  	v_0 := v.Args[0]
  3655  	// match: (MOVVreg x)
  3656  	// cond: x.Uses == 1
  3657  	// result: (MOVVnop x)
  3658  	for {
  3659  		x := v_0
  3660  		if !(x.Uses == 1) {
  3661  			break
  3662  		}
  3663  		v.reset(OpMIPS64MOVVnop)
  3664  		v.AddArg(x)
  3665  		return true
  3666  	}
  3667  	// match: (MOVVreg (MOVVconst [c]))
  3668  	// result: (MOVVconst [c])
  3669  	for {
  3670  		if v_0.Op != OpMIPS64MOVVconst {
  3671  			break
  3672  		}
  3673  		c := auxIntToInt64(v_0.AuxInt)
  3674  		v.reset(OpMIPS64MOVVconst)
  3675  		v.AuxInt = int64ToAuxInt(c)
  3676  		return true
  3677  	}
  3678  	return false
  3679  }
  3680  func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
  3681  	v_2 := v.Args[2]
  3682  	v_1 := v.Args[1]
  3683  	v_0 := v.Args[0]
  3684  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3685  	// cond: is32Bit(int64(off1)+off2)
  3686  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  3687  	for {
  3688  		off1 := auxIntToInt32(v.AuxInt)
  3689  		sym := auxToSym(v.Aux)
  3690  		if v_0.Op != OpMIPS64ADDVconst {
  3691  			break
  3692  		}
  3693  		off2 := auxIntToInt64(v_0.AuxInt)
  3694  		ptr := v_0.Args[0]
  3695  		val := v_1
  3696  		mem := v_2
  3697  		if !(is32Bit(int64(off1) + off2)) {
  3698  			break
  3699  		}
  3700  		v.reset(OpMIPS64MOVVstore)
  3701  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3702  		v.Aux = symToAux(sym)
  3703  		v.AddArg3(ptr, val, mem)
  3704  		return true
  3705  	}
  3706  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3707  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3708  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3709  	for {
  3710  		off1 := auxIntToInt32(v.AuxInt)
  3711  		sym1 := auxToSym(v.Aux)
  3712  		if v_0.Op != OpMIPS64MOVVaddr {
  3713  			break
  3714  		}
  3715  		off2 := auxIntToInt32(v_0.AuxInt)
  3716  		sym2 := auxToSym(v_0.Aux)
  3717  		ptr := v_0.Args[0]
  3718  		val := v_1
  3719  		mem := v_2
  3720  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3721  			break
  3722  		}
  3723  		v.reset(OpMIPS64MOVVstore)
  3724  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3725  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3726  		v.AddArg3(ptr, val, mem)
  3727  		return true
  3728  	}
  3729  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  3730  	// result: (MOVVstorezero [off] {sym} ptr mem)
  3731  	for {
  3732  		off := auxIntToInt32(v.AuxInt)
  3733  		sym := auxToSym(v.Aux)
  3734  		ptr := v_0
  3735  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3736  			break
  3737  		}
  3738  		mem := v_2
  3739  		v.reset(OpMIPS64MOVVstorezero)
  3740  		v.AuxInt = int32ToAuxInt(off)
  3741  		v.Aux = symToAux(sym)
  3742  		v.AddArg2(ptr, mem)
  3743  		return true
  3744  	}
  3745  	return false
  3746  }
  3747  func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
  3748  	v_1 := v.Args[1]
  3749  	v_0 := v.Args[0]
  3750  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3751  	// cond: is32Bit(int64(off1)+off2)
  3752  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  3753  	for {
  3754  		off1 := auxIntToInt32(v.AuxInt)
  3755  		sym := auxToSym(v.Aux)
  3756  		if v_0.Op != OpMIPS64ADDVconst {
  3757  			break
  3758  		}
  3759  		off2 := auxIntToInt64(v_0.AuxInt)
  3760  		ptr := v_0.Args[0]
  3761  		mem := v_1
  3762  		if !(is32Bit(int64(off1) + off2)) {
  3763  			break
  3764  		}
  3765  		v.reset(OpMIPS64MOVVstorezero)
  3766  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3767  		v.Aux = symToAux(sym)
  3768  		v.AddArg2(ptr, mem)
  3769  		return true
  3770  	}
  3771  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3772  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3773  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3774  	for {
  3775  		off1 := auxIntToInt32(v.AuxInt)
  3776  		sym1 := auxToSym(v.Aux)
  3777  		if v_0.Op != OpMIPS64MOVVaddr {
  3778  			break
  3779  		}
  3780  		off2 := auxIntToInt32(v_0.AuxInt)
  3781  		sym2 := auxToSym(v_0.Aux)
  3782  		ptr := v_0.Args[0]
  3783  		mem := v_1
  3784  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3785  			break
  3786  		}
  3787  		v.reset(OpMIPS64MOVVstorezero)
  3788  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3789  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3790  		v.AddArg2(ptr, mem)
  3791  		return true
  3792  	}
  3793  	return false
  3794  }
  3795  func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
  3796  	v_1 := v.Args[1]
  3797  	v_0 := v.Args[0]
  3798  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3799  	// cond: is32Bit(int64(off1)+off2)
  3800  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  3801  	for {
  3802  		off1 := auxIntToInt32(v.AuxInt)
  3803  		sym := auxToSym(v.Aux)
  3804  		if v_0.Op != OpMIPS64ADDVconst {
  3805  			break
  3806  		}
  3807  		off2 := auxIntToInt64(v_0.AuxInt)
  3808  		ptr := v_0.Args[0]
  3809  		mem := v_1
  3810  		if !(is32Bit(int64(off1) + off2)) {
  3811  			break
  3812  		}
  3813  		v.reset(OpMIPS64MOVWUload)
  3814  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3815  		v.Aux = symToAux(sym)
  3816  		v.AddArg2(ptr, mem)
  3817  		return true
  3818  	}
  3819  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3820  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3821  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3822  	for {
  3823  		off1 := auxIntToInt32(v.AuxInt)
  3824  		sym1 := auxToSym(v.Aux)
  3825  		if v_0.Op != OpMIPS64MOVVaddr {
  3826  			break
  3827  		}
  3828  		off2 := auxIntToInt32(v_0.AuxInt)
  3829  		sym2 := auxToSym(v_0.Aux)
  3830  		ptr := v_0.Args[0]
  3831  		mem := v_1
  3832  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3833  			break
  3834  		}
  3835  		v.reset(OpMIPS64MOVWUload)
  3836  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3837  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3838  		v.AddArg2(ptr, mem)
  3839  		return true
  3840  	}
  3841  	return false
  3842  }
  3843  func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
  3844  	v_0 := v.Args[0]
  3845  	// match: (MOVWUreg x:(MOVBUload _ _))
  3846  	// result: (MOVVreg x)
  3847  	for {
  3848  		x := v_0
  3849  		if x.Op != OpMIPS64MOVBUload {
  3850  			break
  3851  		}
  3852  		v.reset(OpMIPS64MOVVreg)
  3853  		v.AddArg(x)
  3854  		return true
  3855  	}
  3856  	// match: (MOVWUreg x:(MOVHUload _ _))
  3857  	// result: (MOVVreg x)
  3858  	for {
  3859  		x := v_0
  3860  		if x.Op != OpMIPS64MOVHUload {
  3861  			break
  3862  		}
  3863  		v.reset(OpMIPS64MOVVreg)
  3864  		v.AddArg(x)
  3865  		return true
  3866  	}
  3867  	// match: (MOVWUreg x:(MOVWUload _ _))
  3868  	// result: (MOVVreg x)
  3869  	for {
  3870  		x := v_0
  3871  		if x.Op != OpMIPS64MOVWUload {
  3872  			break
  3873  		}
  3874  		v.reset(OpMIPS64MOVVreg)
  3875  		v.AddArg(x)
  3876  		return true
  3877  	}
  3878  	// match: (MOVWUreg x:(MOVBUreg _))
  3879  	// result: (MOVVreg x)
  3880  	for {
  3881  		x := v_0
  3882  		if x.Op != OpMIPS64MOVBUreg {
  3883  			break
  3884  		}
  3885  		v.reset(OpMIPS64MOVVreg)
  3886  		v.AddArg(x)
  3887  		return true
  3888  	}
  3889  	// match: (MOVWUreg x:(MOVHUreg _))
  3890  	// result: (MOVVreg x)
  3891  	for {
  3892  		x := v_0
  3893  		if x.Op != OpMIPS64MOVHUreg {
  3894  			break
  3895  		}
  3896  		v.reset(OpMIPS64MOVVreg)
  3897  		v.AddArg(x)
  3898  		return true
  3899  	}
  3900  	// match: (MOVWUreg x:(MOVWUreg _))
  3901  	// result: (MOVVreg x)
  3902  	for {
  3903  		x := v_0
  3904  		if x.Op != OpMIPS64MOVWUreg {
  3905  			break
  3906  		}
  3907  		v.reset(OpMIPS64MOVVreg)
  3908  		v.AddArg(x)
  3909  		return true
  3910  	}
  3911  	// match: (MOVWUreg (MOVVconst [c]))
  3912  	// result: (MOVVconst [int64(uint32(c))])
  3913  	for {
  3914  		if v_0.Op != OpMIPS64MOVVconst {
  3915  			break
  3916  		}
  3917  		c := auxIntToInt64(v_0.AuxInt)
  3918  		v.reset(OpMIPS64MOVVconst)
  3919  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  3920  		return true
  3921  	}
  3922  	return false
  3923  }
  3924  func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
  3925  	v_1 := v.Args[1]
  3926  	v_0 := v.Args[0]
  3927  	b := v.Block
  3928  	config := b.Func.Config
  3929  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3930  	// cond: is32Bit(int64(off1)+off2)
  3931  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  3932  	for {
  3933  		off1 := auxIntToInt32(v.AuxInt)
  3934  		sym := auxToSym(v.Aux)
  3935  		if v_0.Op != OpMIPS64ADDVconst {
  3936  			break
  3937  		}
  3938  		off2 := auxIntToInt64(v_0.AuxInt)
  3939  		ptr := v_0.Args[0]
  3940  		mem := v_1
  3941  		if !(is32Bit(int64(off1) + off2)) {
  3942  			break
  3943  		}
  3944  		v.reset(OpMIPS64MOVWload)
  3945  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3946  		v.Aux = symToAux(sym)
  3947  		v.AddArg2(ptr, mem)
  3948  		return true
  3949  	}
  3950  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3951  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  3952  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3953  	for {
  3954  		off1 := auxIntToInt32(v.AuxInt)
  3955  		sym1 := auxToSym(v.Aux)
  3956  		if v_0.Op != OpMIPS64MOVVaddr {
  3957  			break
  3958  		}
  3959  		off2 := auxIntToInt32(v_0.AuxInt)
  3960  		sym2 := auxToSym(v_0.Aux)
  3961  		ptr := v_0.Args[0]
  3962  		mem := v_1
  3963  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  3964  			break
  3965  		}
  3966  		v.reset(OpMIPS64MOVWload)
  3967  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3968  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3969  		v.AddArg2(ptr, mem)
  3970  		return true
  3971  	}
  3972  	// match: (MOVWload [off] {sym} (SB) _)
  3973  	// cond: symIsRO(sym)
  3974  	// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3975  	for {
  3976  		off := auxIntToInt32(v.AuxInt)
  3977  		sym := auxToSym(v.Aux)
  3978  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3979  			break
  3980  		}
  3981  		v.reset(OpMIPS64MOVVconst)
  3982  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3983  		return true
  3984  	}
  3985  	return false
  3986  }
  3987  func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
  3988  	v_0 := v.Args[0]
  3989  	// match: (MOVWreg x:(MOVBload _ _))
  3990  	// result: (MOVVreg x)
  3991  	for {
  3992  		x := v_0
  3993  		if x.Op != OpMIPS64MOVBload {
  3994  			break
  3995  		}
  3996  		v.reset(OpMIPS64MOVVreg)
  3997  		v.AddArg(x)
  3998  		return true
  3999  	}
  4000  	// match: (MOVWreg x:(MOVBUload _ _))
  4001  	// result: (MOVVreg x)
  4002  	for {
  4003  		x := v_0
  4004  		if x.Op != OpMIPS64MOVBUload {
  4005  			break
  4006  		}
  4007  		v.reset(OpMIPS64MOVVreg)
  4008  		v.AddArg(x)
  4009  		return true
  4010  	}
  4011  	// match: (MOVWreg x:(MOVHload _ _))
  4012  	// result: (MOVVreg x)
  4013  	for {
  4014  		x := v_0
  4015  		if x.Op != OpMIPS64MOVHload {
  4016  			break
  4017  		}
  4018  		v.reset(OpMIPS64MOVVreg)
  4019  		v.AddArg(x)
  4020  		return true
  4021  	}
  4022  	// match: (MOVWreg x:(MOVHUload _ _))
  4023  	// result: (MOVVreg x)
  4024  	for {
  4025  		x := v_0
  4026  		if x.Op != OpMIPS64MOVHUload {
  4027  			break
  4028  		}
  4029  		v.reset(OpMIPS64MOVVreg)
  4030  		v.AddArg(x)
  4031  		return true
  4032  	}
  4033  	// match: (MOVWreg x:(MOVWload _ _))
  4034  	// result: (MOVVreg x)
  4035  	for {
  4036  		x := v_0
  4037  		if x.Op != OpMIPS64MOVWload {
  4038  			break
  4039  		}
  4040  		v.reset(OpMIPS64MOVVreg)
  4041  		v.AddArg(x)
  4042  		return true
  4043  	}
  4044  	// match: (MOVWreg x:(MOVBreg _))
  4045  	// result: (MOVVreg x)
  4046  	for {
  4047  		x := v_0
  4048  		if x.Op != OpMIPS64MOVBreg {
  4049  			break
  4050  		}
  4051  		v.reset(OpMIPS64MOVVreg)
  4052  		v.AddArg(x)
  4053  		return true
  4054  	}
  4055  	// match: (MOVWreg x:(MOVBUreg _))
  4056  	// result: (MOVVreg x)
  4057  	for {
  4058  		x := v_0
  4059  		if x.Op != OpMIPS64MOVBUreg {
  4060  			break
  4061  		}
  4062  		v.reset(OpMIPS64MOVVreg)
  4063  		v.AddArg(x)
  4064  		return true
  4065  	}
  4066  	// match: (MOVWreg x:(MOVHreg _))
  4067  	// result: (MOVVreg x)
  4068  	for {
  4069  		x := v_0
  4070  		if x.Op != OpMIPS64MOVHreg {
  4071  			break
  4072  		}
  4073  		v.reset(OpMIPS64MOVVreg)
  4074  		v.AddArg(x)
  4075  		return true
  4076  	}
  4077  	// match: (MOVWreg x:(MOVWreg _))
  4078  	// result: (MOVVreg x)
  4079  	for {
  4080  		x := v_0
  4081  		if x.Op != OpMIPS64MOVWreg {
  4082  			break
  4083  		}
  4084  		v.reset(OpMIPS64MOVVreg)
  4085  		v.AddArg(x)
  4086  		return true
  4087  	}
  4088  	// match: (MOVWreg (MOVVconst [c]))
  4089  	// result: (MOVVconst [int64(int32(c))])
  4090  	for {
  4091  		if v_0.Op != OpMIPS64MOVVconst {
  4092  			break
  4093  		}
  4094  		c := auxIntToInt64(v_0.AuxInt)
  4095  		v.reset(OpMIPS64MOVVconst)
  4096  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  4097  		return true
  4098  	}
  4099  	return false
  4100  }
  4101  func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
  4102  	v_2 := v.Args[2]
  4103  	v_1 := v.Args[1]
  4104  	v_0 := v.Args[0]
  4105  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4106  	// cond: is32Bit(int64(off1)+off2)
  4107  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  4108  	for {
  4109  		off1 := auxIntToInt32(v.AuxInt)
  4110  		sym := auxToSym(v.Aux)
  4111  		if v_0.Op != OpMIPS64ADDVconst {
  4112  			break
  4113  		}
  4114  		off2 := auxIntToInt64(v_0.AuxInt)
  4115  		ptr := v_0.Args[0]
  4116  		val := v_1
  4117  		mem := v_2
  4118  		if !(is32Bit(int64(off1) + off2)) {
  4119  			break
  4120  		}
  4121  		v.reset(OpMIPS64MOVWstore)
  4122  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4123  		v.Aux = symToAux(sym)
  4124  		v.AddArg3(ptr, val, mem)
  4125  		return true
  4126  	}
  4127  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4128  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4129  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4130  	for {
  4131  		off1 := auxIntToInt32(v.AuxInt)
  4132  		sym1 := auxToSym(v.Aux)
  4133  		if v_0.Op != OpMIPS64MOVVaddr {
  4134  			break
  4135  		}
  4136  		off2 := auxIntToInt32(v_0.AuxInt)
  4137  		sym2 := auxToSym(v_0.Aux)
  4138  		ptr := v_0.Args[0]
  4139  		val := v_1
  4140  		mem := v_2
  4141  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4142  			break
  4143  		}
  4144  		v.reset(OpMIPS64MOVWstore)
  4145  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4146  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4147  		v.AddArg3(ptr, val, mem)
  4148  		return true
  4149  	}
  4150  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  4151  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4152  	for {
  4153  		off := auxIntToInt32(v.AuxInt)
  4154  		sym := auxToSym(v.Aux)
  4155  		ptr := v_0
  4156  		if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4157  			break
  4158  		}
  4159  		mem := v_2
  4160  		v.reset(OpMIPS64MOVWstorezero)
  4161  		v.AuxInt = int32ToAuxInt(off)
  4162  		v.Aux = symToAux(sym)
  4163  		v.AddArg2(ptr, mem)
  4164  		return true
  4165  	}
  4166  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4167  	// result: (MOVWstore [off] {sym} ptr x mem)
  4168  	for {
  4169  		off := auxIntToInt32(v.AuxInt)
  4170  		sym := auxToSym(v.Aux)
  4171  		ptr := v_0
  4172  		if v_1.Op != OpMIPS64MOVWreg {
  4173  			break
  4174  		}
  4175  		x := v_1.Args[0]
  4176  		mem := v_2
  4177  		v.reset(OpMIPS64MOVWstore)
  4178  		v.AuxInt = int32ToAuxInt(off)
  4179  		v.Aux = symToAux(sym)
  4180  		v.AddArg3(ptr, x, mem)
  4181  		return true
  4182  	}
  4183  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  4184  	// result: (MOVWstore [off] {sym} ptr x mem)
  4185  	for {
  4186  		off := auxIntToInt32(v.AuxInt)
  4187  		sym := auxToSym(v.Aux)
  4188  		ptr := v_0
  4189  		if v_1.Op != OpMIPS64MOVWUreg {
  4190  			break
  4191  		}
  4192  		x := v_1.Args[0]
  4193  		mem := v_2
  4194  		v.reset(OpMIPS64MOVWstore)
  4195  		v.AuxInt = int32ToAuxInt(off)
  4196  		v.Aux = symToAux(sym)
  4197  		v.AddArg3(ptr, x, mem)
  4198  		return true
  4199  	}
  4200  	return false
  4201  }
  4202  func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
  4203  	v_1 := v.Args[1]
  4204  	v_0 := v.Args[0]
  4205  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4206  	// cond: is32Bit(int64(off1)+off2)
  4207  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  4208  	for {
  4209  		off1 := auxIntToInt32(v.AuxInt)
  4210  		sym := auxToSym(v.Aux)
  4211  		if v_0.Op != OpMIPS64ADDVconst {
  4212  			break
  4213  		}
  4214  		off2 := auxIntToInt64(v_0.AuxInt)
  4215  		ptr := v_0.Args[0]
  4216  		mem := v_1
  4217  		if !(is32Bit(int64(off1) + off2)) {
  4218  			break
  4219  		}
  4220  		v.reset(OpMIPS64MOVWstorezero)
  4221  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4222  		v.Aux = symToAux(sym)
  4223  		v.AddArg2(ptr, mem)
  4224  		return true
  4225  	}
  4226  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4227  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4228  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4229  	for {
  4230  		off1 := auxIntToInt32(v.AuxInt)
  4231  		sym1 := auxToSym(v.Aux)
  4232  		if v_0.Op != OpMIPS64MOVVaddr {
  4233  			break
  4234  		}
  4235  		off2 := auxIntToInt32(v_0.AuxInt)
  4236  		sym2 := auxToSym(v_0.Aux)
  4237  		ptr := v_0.Args[0]
  4238  		mem := v_1
  4239  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4240  			break
  4241  		}
  4242  		v.reset(OpMIPS64MOVWstorezero)
  4243  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4244  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4245  		v.AddArg2(ptr, mem)
  4246  		return true
  4247  	}
  4248  	return false
  4249  }
  4250  func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
  4251  	v_0 := v.Args[0]
  4252  	// match: (NEGV (MOVVconst [c]))
  4253  	// result: (MOVVconst [-c])
  4254  	for {
  4255  		if v_0.Op != OpMIPS64MOVVconst {
  4256  			break
  4257  		}
  4258  		c := auxIntToInt64(v_0.AuxInt)
  4259  		v.reset(OpMIPS64MOVVconst)
  4260  		v.AuxInt = int64ToAuxInt(-c)
  4261  		return true
  4262  	}
  4263  	return false
  4264  }
  4265  func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
  4266  	v_1 := v.Args[1]
  4267  	v_0 := v.Args[0]
  4268  	// match: (NOR x (MOVVconst [c]))
  4269  	// cond: is32Bit(c)
  4270  	// result: (NORconst [c] x)
  4271  	for {
  4272  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4273  			x := v_0
  4274  			if v_1.Op != OpMIPS64MOVVconst {
  4275  				continue
  4276  			}
  4277  			c := auxIntToInt64(v_1.AuxInt)
  4278  			if !(is32Bit(c)) {
  4279  				continue
  4280  			}
  4281  			v.reset(OpMIPS64NORconst)
  4282  			v.AuxInt = int64ToAuxInt(c)
  4283  			v.AddArg(x)
  4284  			return true
  4285  		}
  4286  		break
  4287  	}
  4288  	return false
  4289  }
  4290  func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
  4291  	v_0 := v.Args[0]
  4292  	// match: (NORconst [c] (MOVVconst [d]))
  4293  	// result: (MOVVconst [^(c|d)])
  4294  	for {
  4295  		c := auxIntToInt64(v.AuxInt)
  4296  		if v_0.Op != OpMIPS64MOVVconst {
  4297  			break
  4298  		}
  4299  		d := auxIntToInt64(v_0.AuxInt)
  4300  		v.reset(OpMIPS64MOVVconst)
  4301  		v.AuxInt = int64ToAuxInt(^(c | d))
  4302  		return true
  4303  	}
  4304  	return false
  4305  }
  4306  func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
  4307  	v_1 := v.Args[1]
  4308  	v_0 := v.Args[0]
  4309  	// match: (OR x (MOVVconst [c]))
  4310  	// cond: is32Bit(c)
  4311  	// result: (ORconst [c] x)
  4312  	for {
  4313  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4314  			x := v_0
  4315  			if v_1.Op != OpMIPS64MOVVconst {
  4316  				continue
  4317  			}
  4318  			c := auxIntToInt64(v_1.AuxInt)
  4319  			if !(is32Bit(c)) {
  4320  				continue
  4321  			}
  4322  			v.reset(OpMIPS64ORconst)
  4323  			v.AuxInt = int64ToAuxInt(c)
  4324  			v.AddArg(x)
  4325  			return true
  4326  		}
  4327  		break
  4328  	}
  4329  	// match: (OR x x)
  4330  	// result: x
  4331  	for {
  4332  		x := v_0
  4333  		if x != v_1 {
  4334  			break
  4335  		}
  4336  		v.copyOf(x)
  4337  		return true
  4338  	}
  4339  	return false
  4340  }
  4341  func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
  4342  	v_0 := v.Args[0]
  4343  	// match: (ORconst [0] x)
  4344  	// result: x
  4345  	for {
  4346  		if auxIntToInt64(v.AuxInt) != 0 {
  4347  			break
  4348  		}
  4349  		x := v_0
  4350  		v.copyOf(x)
  4351  		return true
  4352  	}
  4353  	// match: (ORconst [-1] _)
  4354  	// result: (MOVVconst [-1])
  4355  	for {
  4356  		if auxIntToInt64(v.AuxInt) != -1 {
  4357  			break
  4358  		}
  4359  		v.reset(OpMIPS64MOVVconst)
  4360  		v.AuxInt = int64ToAuxInt(-1)
  4361  		return true
  4362  	}
  4363  	// match: (ORconst [c] (MOVVconst [d]))
  4364  	// result: (MOVVconst [c|d])
  4365  	for {
  4366  		c := auxIntToInt64(v.AuxInt)
  4367  		if v_0.Op != OpMIPS64MOVVconst {
  4368  			break
  4369  		}
  4370  		d := auxIntToInt64(v_0.AuxInt)
  4371  		v.reset(OpMIPS64MOVVconst)
  4372  		v.AuxInt = int64ToAuxInt(c | d)
  4373  		return true
  4374  	}
  4375  	// match: (ORconst [c] (ORconst [d] x))
  4376  	// cond: is32Bit(c|d)
  4377  	// result: (ORconst [c|d] x)
  4378  	for {
  4379  		c := auxIntToInt64(v.AuxInt)
  4380  		if v_0.Op != OpMIPS64ORconst {
  4381  			break
  4382  		}
  4383  		d := auxIntToInt64(v_0.AuxInt)
  4384  		x := v_0.Args[0]
  4385  		if !(is32Bit(c | d)) {
  4386  			break
  4387  		}
  4388  		v.reset(OpMIPS64ORconst)
  4389  		v.AuxInt = int64ToAuxInt(c | d)
  4390  		v.AddArg(x)
  4391  		return true
  4392  	}
  4393  	return false
  4394  }
  4395  func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
  4396  	v_1 := v.Args[1]
  4397  	v_0 := v.Args[0]
  4398  	// match: (SGT (MOVVconst [c]) x)
  4399  	// cond: is32Bit(c)
  4400  	// result: (SGTconst [c] x)
  4401  	for {
  4402  		if v_0.Op != OpMIPS64MOVVconst {
  4403  			break
  4404  		}
  4405  		c := auxIntToInt64(v_0.AuxInt)
  4406  		x := v_1
  4407  		if !(is32Bit(c)) {
  4408  			break
  4409  		}
  4410  		v.reset(OpMIPS64SGTconst)
  4411  		v.AuxInt = int64ToAuxInt(c)
  4412  		v.AddArg(x)
  4413  		return true
  4414  	}
  4415  	return false
  4416  }
  4417  func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
  4418  	v_1 := v.Args[1]
  4419  	v_0 := v.Args[0]
  4420  	// match: (SGTU (MOVVconst [c]) x)
  4421  	// cond: is32Bit(c)
  4422  	// result: (SGTUconst [c] x)
  4423  	for {
  4424  		if v_0.Op != OpMIPS64MOVVconst {
  4425  			break
  4426  		}
  4427  		c := auxIntToInt64(v_0.AuxInt)
  4428  		x := v_1
  4429  		if !(is32Bit(c)) {
  4430  			break
  4431  		}
  4432  		v.reset(OpMIPS64SGTUconst)
  4433  		v.AuxInt = int64ToAuxInt(c)
  4434  		v.AddArg(x)
  4435  		return true
  4436  	}
  4437  	return false
  4438  }
  4439  func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
  4440  	v_0 := v.Args[0]
  4441  	// match: (SGTUconst [c] (MOVVconst [d]))
  4442  	// cond: uint64(c)>uint64(d)
  4443  	// result: (MOVVconst [1])
  4444  	for {
  4445  		c := auxIntToInt64(v.AuxInt)
  4446  		if v_0.Op != OpMIPS64MOVVconst {
  4447  			break
  4448  		}
  4449  		d := auxIntToInt64(v_0.AuxInt)
  4450  		if !(uint64(c) > uint64(d)) {
  4451  			break
  4452  		}
  4453  		v.reset(OpMIPS64MOVVconst)
  4454  		v.AuxInt = int64ToAuxInt(1)
  4455  		return true
  4456  	}
  4457  	// match: (SGTUconst [c] (MOVVconst [d]))
  4458  	// cond: uint64(c)<=uint64(d)
  4459  	// result: (MOVVconst [0])
  4460  	for {
  4461  		c := auxIntToInt64(v.AuxInt)
  4462  		if v_0.Op != OpMIPS64MOVVconst {
  4463  			break
  4464  		}
  4465  		d := auxIntToInt64(v_0.AuxInt)
  4466  		if !(uint64(c) <= uint64(d)) {
  4467  			break
  4468  		}
  4469  		v.reset(OpMIPS64MOVVconst)
  4470  		v.AuxInt = int64ToAuxInt(0)
  4471  		return true
  4472  	}
  4473  	// match: (SGTUconst [c] (MOVBUreg _))
  4474  	// cond: 0xff < uint64(c)
  4475  	// result: (MOVVconst [1])
  4476  	for {
  4477  		c := auxIntToInt64(v.AuxInt)
  4478  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
  4479  			break
  4480  		}
  4481  		v.reset(OpMIPS64MOVVconst)
  4482  		v.AuxInt = int64ToAuxInt(1)
  4483  		return true
  4484  	}
  4485  	// match: (SGTUconst [c] (MOVHUreg _))
  4486  	// cond: 0xffff < uint64(c)
  4487  	// result: (MOVVconst [1])
  4488  	for {
  4489  		c := auxIntToInt64(v.AuxInt)
  4490  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
  4491  			break
  4492  		}
  4493  		v.reset(OpMIPS64MOVVconst)
  4494  		v.AuxInt = int64ToAuxInt(1)
  4495  		return true
  4496  	}
  4497  	// match: (SGTUconst [c] (ANDconst [m] _))
  4498  	// cond: uint64(m) < uint64(c)
  4499  	// result: (MOVVconst [1])
  4500  	for {
  4501  		c := auxIntToInt64(v.AuxInt)
  4502  		if v_0.Op != OpMIPS64ANDconst {
  4503  			break
  4504  		}
  4505  		m := auxIntToInt64(v_0.AuxInt)
  4506  		if !(uint64(m) < uint64(c)) {
  4507  			break
  4508  		}
  4509  		v.reset(OpMIPS64MOVVconst)
  4510  		v.AuxInt = int64ToAuxInt(1)
  4511  		return true
  4512  	}
  4513  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  4514  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4515  	// result: (MOVVconst [1])
  4516  	for {
  4517  		c := auxIntToInt64(v.AuxInt)
  4518  		if v_0.Op != OpMIPS64SRLVconst {
  4519  			break
  4520  		}
  4521  		d := auxIntToInt64(v_0.AuxInt)
  4522  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4523  			break
  4524  		}
  4525  		v.reset(OpMIPS64MOVVconst)
  4526  		v.AuxInt = int64ToAuxInt(1)
  4527  		return true
  4528  	}
  4529  	return false
  4530  }
  4531  func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
  4532  	v_0 := v.Args[0]
  4533  	// match: (SGTconst [c] (MOVVconst [d]))
  4534  	// cond: c>d
  4535  	// result: (MOVVconst [1])
  4536  	for {
  4537  		c := auxIntToInt64(v.AuxInt)
  4538  		if v_0.Op != OpMIPS64MOVVconst {
  4539  			break
  4540  		}
  4541  		d := auxIntToInt64(v_0.AuxInt)
  4542  		if !(c > d) {
  4543  			break
  4544  		}
  4545  		v.reset(OpMIPS64MOVVconst)
  4546  		v.AuxInt = int64ToAuxInt(1)
  4547  		return true
  4548  	}
  4549  	// match: (SGTconst [c] (MOVVconst [d]))
  4550  	// cond: c<=d
  4551  	// result: (MOVVconst [0])
  4552  	for {
  4553  		c := auxIntToInt64(v.AuxInt)
  4554  		if v_0.Op != OpMIPS64MOVVconst {
  4555  			break
  4556  		}
  4557  		d := auxIntToInt64(v_0.AuxInt)
  4558  		if !(c <= d) {
  4559  			break
  4560  		}
  4561  		v.reset(OpMIPS64MOVVconst)
  4562  		v.AuxInt = int64ToAuxInt(0)
  4563  		return true
  4564  	}
  4565  	// match: (SGTconst [c] (MOVBreg _))
  4566  	// cond: 0x7f < c
  4567  	// result: (MOVVconst [1])
  4568  	for {
  4569  		c := auxIntToInt64(v.AuxInt)
  4570  		if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
  4571  			break
  4572  		}
  4573  		v.reset(OpMIPS64MOVVconst)
  4574  		v.AuxInt = int64ToAuxInt(1)
  4575  		return true
  4576  	}
  4577  	// match: (SGTconst [c] (MOVBreg _))
  4578  	// cond: c <= -0x80
  4579  	// result: (MOVVconst [0])
  4580  	for {
  4581  		c := auxIntToInt64(v.AuxInt)
  4582  		if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
  4583  			break
  4584  		}
  4585  		v.reset(OpMIPS64MOVVconst)
  4586  		v.AuxInt = int64ToAuxInt(0)
  4587  		return true
  4588  	}
  4589  	// match: (SGTconst [c] (MOVBUreg _))
  4590  	// cond: 0xff < c
  4591  	// result: (MOVVconst [1])
  4592  	for {
  4593  		c := auxIntToInt64(v.AuxInt)
  4594  		if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
  4595  			break
  4596  		}
  4597  		v.reset(OpMIPS64MOVVconst)
  4598  		v.AuxInt = int64ToAuxInt(1)
  4599  		return true
  4600  	}
  4601  	// match: (SGTconst [c] (MOVBUreg _))
  4602  	// cond: c < 0
  4603  	// result: (MOVVconst [0])
  4604  	for {
  4605  		c := auxIntToInt64(v.AuxInt)
  4606  		if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
  4607  			break
  4608  		}
  4609  		v.reset(OpMIPS64MOVVconst)
  4610  		v.AuxInt = int64ToAuxInt(0)
  4611  		return true
  4612  	}
  4613  	// match: (SGTconst [c] (MOVHreg _))
  4614  	// cond: 0x7fff < c
  4615  	// result: (MOVVconst [1])
  4616  	for {
  4617  		c := auxIntToInt64(v.AuxInt)
  4618  		if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
  4619  			break
  4620  		}
  4621  		v.reset(OpMIPS64MOVVconst)
  4622  		v.AuxInt = int64ToAuxInt(1)
  4623  		return true
  4624  	}
  4625  	// match: (SGTconst [c] (MOVHreg _))
  4626  	// cond: c <= -0x8000
  4627  	// result: (MOVVconst [0])
  4628  	for {
  4629  		c := auxIntToInt64(v.AuxInt)
  4630  		if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
  4631  			break
  4632  		}
  4633  		v.reset(OpMIPS64MOVVconst)
  4634  		v.AuxInt = int64ToAuxInt(0)
  4635  		return true
  4636  	}
  4637  	// match: (SGTconst [c] (MOVHUreg _))
  4638  	// cond: 0xffff < c
  4639  	// result: (MOVVconst [1])
  4640  	for {
  4641  		c := auxIntToInt64(v.AuxInt)
  4642  		if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
  4643  			break
  4644  		}
  4645  		v.reset(OpMIPS64MOVVconst)
  4646  		v.AuxInt = int64ToAuxInt(1)
  4647  		return true
  4648  	}
  4649  	// match: (SGTconst [c] (MOVHUreg _))
  4650  	// cond: c < 0
  4651  	// result: (MOVVconst [0])
  4652  	for {
  4653  		c := auxIntToInt64(v.AuxInt)
  4654  		if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
  4655  			break
  4656  		}
  4657  		v.reset(OpMIPS64MOVVconst)
  4658  		v.AuxInt = int64ToAuxInt(0)
  4659  		return true
  4660  	}
  4661  	// match: (SGTconst [c] (MOVWUreg _))
  4662  	// cond: c < 0
  4663  	// result: (MOVVconst [0])
  4664  	for {
  4665  		c := auxIntToInt64(v.AuxInt)
  4666  		if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
  4667  			break
  4668  		}
  4669  		v.reset(OpMIPS64MOVVconst)
  4670  		v.AuxInt = int64ToAuxInt(0)
  4671  		return true
  4672  	}
  4673  	// match: (SGTconst [c] (ANDconst [m] _))
  4674  	// cond: 0 <= m && m < c
  4675  	// result: (MOVVconst [1])
  4676  	for {
  4677  		c := auxIntToInt64(v.AuxInt)
  4678  		if v_0.Op != OpMIPS64ANDconst {
  4679  			break
  4680  		}
  4681  		m := auxIntToInt64(v_0.AuxInt)
  4682  		if !(0 <= m && m < c) {
  4683  			break
  4684  		}
  4685  		v.reset(OpMIPS64MOVVconst)
  4686  		v.AuxInt = int64ToAuxInt(1)
  4687  		return true
  4688  	}
  4689  	// match: (SGTconst [c] (SRLVconst _ [d]))
  4690  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  4691  	// result: (MOVVconst [1])
  4692  	for {
  4693  		c := auxIntToInt64(v.AuxInt)
  4694  		if v_0.Op != OpMIPS64SRLVconst {
  4695  			break
  4696  		}
  4697  		d := auxIntToInt64(v_0.AuxInt)
  4698  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  4699  			break
  4700  		}
  4701  		v.reset(OpMIPS64MOVVconst)
  4702  		v.AuxInt = int64ToAuxInt(1)
  4703  		return true
  4704  	}
  4705  	return false
  4706  }
  4707  func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
  4708  	v_1 := v.Args[1]
  4709  	v_0 := v.Args[0]
  4710  	// match: (SLLV _ (MOVVconst [c]))
  4711  	// cond: uint64(c)>=64
  4712  	// result: (MOVVconst [0])
  4713  	for {
  4714  		if v_1.Op != OpMIPS64MOVVconst {
  4715  			break
  4716  		}
  4717  		c := auxIntToInt64(v_1.AuxInt)
  4718  		if !(uint64(c) >= 64) {
  4719  			break
  4720  		}
  4721  		v.reset(OpMIPS64MOVVconst)
  4722  		v.AuxInt = int64ToAuxInt(0)
  4723  		return true
  4724  	}
  4725  	// match: (SLLV x (MOVVconst [c]))
  4726  	// result: (SLLVconst x [c])
  4727  	for {
  4728  		x := v_0
  4729  		if v_1.Op != OpMIPS64MOVVconst {
  4730  			break
  4731  		}
  4732  		c := auxIntToInt64(v_1.AuxInt)
  4733  		v.reset(OpMIPS64SLLVconst)
  4734  		v.AuxInt = int64ToAuxInt(c)
  4735  		v.AddArg(x)
  4736  		return true
  4737  	}
  4738  	return false
  4739  }
  4740  func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
  4741  	v_0 := v.Args[0]
  4742  	// match: (SLLVconst [c] (MOVVconst [d]))
  4743  	// result: (MOVVconst [d<<uint64(c)])
  4744  	for {
  4745  		c := auxIntToInt64(v.AuxInt)
  4746  		if v_0.Op != OpMIPS64MOVVconst {
  4747  			break
  4748  		}
  4749  		d := auxIntToInt64(v_0.AuxInt)
  4750  		v.reset(OpMIPS64MOVVconst)
  4751  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  4752  		return true
  4753  	}
  4754  	return false
  4755  }
  4756  func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
  4757  	v_1 := v.Args[1]
  4758  	v_0 := v.Args[0]
  4759  	// match: (SRAV x (MOVVconst [c]))
  4760  	// cond: uint64(c)>=64
  4761  	// result: (SRAVconst x [63])
  4762  	for {
  4763  		x := v_0
  4764  		if v_1.Op != OpMIPS64MOVVconst {
  4765  			break
  4766  		}
  4767  		c := auxIntToInt64(v_1.AuxInt)
  4768  		if !(uint64(c) >= 64) {
  4769  			break
  4770  		}
  4771  		v.reset(OpMIPS64SRAVconst)
  4772  		v.AuxInt = int64ToAuxInt(63)
  4773  		v.AddArg(x)
  4774  		return true
  4775  	}
  4776  	// match: (SRAV x (MOVVconst [c]))
  4777  	// result: (SRAVconst x [c])
  4778  	for {
  4779  		x := v_0
  4780  		if v_1.Op != OpMIPS64MOVVconst {
  4781  			break
  4782  		}
  4783  		c := auxIntToInt64(v_1.AuxInt)
  4784  		v.reset(OpMIPS64SRAVconst)
  4785  		v.AuxInt = int64ToAuxInt(c)
  4786  		v.AddArg(x)
  4787  		return true
  4788  	}
  4789  	return false
  4790  }
  4791  func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
  4792  	v_0 := v.Args[0]
  4793  	// match: (SRAVconst [c] (MOVVconst [d]))
  4794  	// result: (MOVVconst [d>>uint64(c)])
  4795  	for {
  4796  		c := auxIntToInt64(v.AuxInt)
  4797  		if v_0.Op != OpMIPS64MOVVconst {
  4798  			break
  4799  		}
  4800  		d := auxIntToInt64(v_0.AuxInt)
  4801  		v.reset(OpMIPS64MOVVconst)
  4802  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  4803  		return true
  4804  	}
  4805  	return false
  4806  }
  4807  func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
  4808  	v_1 := v.Args[1]
  4809  	v_0 := v.Args[0]
  4810  	// match: (SRLV _ (MOVVconst [c]))
  4811  	// cond: uint64(c)>=64
  4812  	// result: (MOVVconst [0])
  4813  	for {
  4814  		if v_1.Op != OpMIPS64MOVVconst {
  4815  			break
  4816  		}
  4817  		c := auxIntToInt64(v_1.AuxInt)
  4818  		if !(uint64(c) >= 64) {
  4819  			break
  4820  		}
  4821  		v.reset(OpMIPS64MOVVconst)
  4822  		v.AuxInt = int64ToAuxInt(0)
  4823  		return true
  4824  	}
  4825  	// match: (SRLV x (MOVVconst [c]))
  4826  	// result: (SRLVconst x [c])
  4827  	for {
  4828  		x := v_0
  4829  		if v_1.Op != OpMIPS64MOVVconst {
  4830  			break
  4831  		}
  4832  		c := auxIntToInt64(v_1.AuxInt)
  4833  		v.reset(OpMIPS64SRLVconst)
  4834  		v.AuxInt = int64ToAuxInt(c)
  4835  		v.AddArg(x)
  4836  		return true
  4837  	}
  4838  	return false
  4839  }
  4840  func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
  4841  	v_0 := v.Args[0]
  4842  	// match: (SRLVconst [c] (MOVVconst [d]))
  4843  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  4844  	for {
  4845  		c := auxIntToInt64(v.AuxInt)
  4846  		if v_0.Op != OpMIPS64MOVVconst {
  4847  			break
  4848  		}
  4849  		d := auxIntToInt64(v_0.AuxInt)
  4850  		v.reset(OpMIPS64MOVVconst)
  4851  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  4852  		return true
  4853  	}
  4854  	return false
  4855  }
  4856  func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
  4857  	v_1 := v.Args[1]
  4858  	v_0 := v.Args[0]
  4859  	// match: (SUBV x (MOVVconst [c]))
  4860  	// cond: is32Bit(c)
  4861  	// result: (SUBVconst [c] x)
  4862  	for {
  4863  		x := v_0
  4864  		if v_1.Op != OpMIPS64MOVVconst {
  4865  			break
  4866  		}
  4867  		c := auxIntToInt64(v_1.AuxInt)
  4868  		if !(is32Bit(c)) {
  4869  			break
  4870  		}
  4871  		v.reset(OpMIPS64SUBVconst)
  4872  		v.AuxInt = int64ToAuxInt(c)
  4873  		v.AddArg(x)
  4874  		return true
  4875  	}
  4876  	// match: (SUBV x x)
  4877  	// result: (MOVVconst [0])
  4878  	for {
  4879  		x := v_0
  4880  		if x != v_1 {
  4881  			break
  4882  		}
  4883  		v.reset(OpMIPS64MOVVconst)
  4884  		v.AuxInt = int64ToAuxInt(0)
  4885  		return true
  4886  	}
  4887  	// match: (SUBV (MOVVconst [0]) x)
  4888  	// result: (NEGV x)
  4889  	for {
  4890  		if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  4891  			break
  4892  		}
  4893  		x := v_1
  4894  		v.reset(OpMIPS64NEGV)
  4895  		v.AddArg(x)
  4896  		return true
  4897  	}
  4898  	return false
  4899  }
  4900  func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
  4901  	v_0 := v.Args[0]
  4902  	// match: (SUBVconst [0] x)
  4903  	// result: x
  4904  	for {
  4905  		if auxIntToInt64(v.AuxInt) != 0 {
  4906  			break
  4907  		}
  4908  		x := v_0
  4909  		v.copyOf(x)
  4910  		return true
  4911  	}
  4912  	// match: (SUBVconst [c] (MOVVconst [d]))
  4913  	// result: (MOVVconst [d-c])
  4914  	for {
  4915  		c := auxIntToInt64(v.AuxInt)
  4916  		if v_0.Op != OpMIPS64MOVVconst {
  4917  			break
  4918  		}
  4919  		d := auxIntToInt64(v_0.AuxInt)
  4920  		v.reset(OpMIPS64MOVVconst)
  4921  		v.AuxInt = int64ToAuxInt(d - c)
  4922  		return true
  4923  	}
  4924  	// match: (SUBVconst [c] (SUBVconst [d] x))
  4925  	// cond: is32Bit(-c-d)
  4926  	// result: (ADDVconst [-c-d] x)
  4927  	for {
  4928  		c := auxIntToInt64(v.AuxInt)
  4929  		if v_0.Op != OpMIPS64SUBVconst {
  4930  			break
  4931  		}
  4932  		d := auxIntToInt64(v_0.AuxInt)
  4933  		x := v_0.Args[0]
  4934  		if !(is32Bit(-c - d)) {
  4935  			break
  4936  		}
  4937  		v.reset(OpMIPS64ADDVconst)
  4938  		v.AuxInt = int64ToAuxInt(-c - d)
  4939  		v.AddArg(x)
  4940  		return true
  4941  	}
  4942  	// match: (SUBVconst [c] (ADDVconst [d] x))
  4943  	// cond: is32Bit(-c+d)
  4944  	// result: (ADDVconst [-c+d] x)
  4945  	for {
  4946  		c := auxIntToInt64(v.AuxInt)
  4947  		if v_0.Op != OpMIPS64ADDVconst {
  4948  			break
  4949  		}
  4950  		d := auxIntToInt64(v_0.AuxInt)
  4951  		x := v_0.Args[0]
  4952  		if !(is32Bit(-c + d)) {
  4953  			break
  4954  		}
  4955  		v.reset(OpMIPS64ADDVconst)
  4956  		v.AuxInt = int64ToAuxInt(-c + d)
  4957  		v.AddArg(x)
  4958  		return true
  4959  	}
  4960  	return false
  4961  }
  4962  func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
  4963  	v_1 := v.Args[1]
  4964  	v_0 := v.Args[0]
  4965  	// match: (XOR x (MOVVconst [c]))
  4966  	// cond: is32Bit(c)
  4967  	// result: (XORconst [c] x)
  4968  	for {
  4969  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4970  			x := v_0
  4971  			if v_1.Op != OpMIPS64MOVVconst {
  4972  				continue
  4973  			}
  4974  			c := auxIntToInt64(v_1.AuxInt)
  4975  			if !(is32Bit(c)) {
  4976  				continue
  4977  			}
  4978  			v.reset(OpMIPS64XORconst)
  4979  			v.AuxInt = int64ToAuxInt(c)
  4980  			v.AddArg(x)
  4981  			return true
  4982  		}
  4983  		break
  4984  	}
  4985  	// match: (XOR x x)
  4986  	// result: (MOVVconst [0])
  4987  	for {
  4988  		x := v_0
  4989  		if x != v_1 {
  4990  			break
  4991  		}
  4992  		v.reset(OpMIPS64MOVVconst)
  4993  		v.AuxInt = int64ToAuxInt(0)
  4994  		return true
  4995  	}
  4996  	return false
  4997  }
  4998  func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
  4999  	v_0 := v.Args[0]
  5000  	// match: (XORconst [0] x)
  5001  	// result: x
  5002  	for {
  5003  		if auxIntToInt64(v.AuxInt) != 0 {
  5004  			break
  5005  		}
  5006  		x := v_0
  5007  		v.copyOf(x)
  5008  		return true
  5009  	}
  5010  	// match: (XORconst [-1] x)
  5011  	// result: (NORconst [0] x)
  5012  	for {
  5013  		if auxIntToInt64(v.AuxInt) != -1 {
  5014  			break
  5015  		}
  5016  		x := v_0
  5017  		v.reset(OpMIPS64NORconst)
  5018  		v.AuxInt = int64ToAuxInt(0)
  5019  		v.AddArg(x)
  5020  		return true
  5021  	}
  5022  	// match: (XORconst [c] (MOVVconst [d]))
  5023  	// result: (MOVVconst [c^d])
  5024  	for {
  5025  		c := auxIntToInt64(v.AuxInt)
  5026  		if v_0.Op != OpMIPS64MOVVconst {
  5027  			break
  5028  		}
  5029  		d := auxIntToInt64(v_0.AuxInt)
  5030  		v.reset(OpMIPS64MOVVconst)
  5031  		v.AuxInt = int64ToAuxInt(c ^ d)
  5032  		return true
  5033  	}
  5034  	// match: (XORconst [c] (XORconst [d] x))
  5035  	// cond: is32Bit(c^d)
  5036  	// result: (XORconst [c^d] x)
  5037  	for {
  5038  		c := auxIntToInt64(v.AuxInt)
  5039  		if v_0.Op != OpMIPS64XORconst {
  5040  			break
  5041  		}
  5042  		d := auxIntToInt64(v_0.AuxInt)
  5043  		x := v_0.Args[0]
  5044  		if !(is32Bit(c ^ d)) {
  5045  			break
  5046  		}
  5047  		v.reset(OpMIPS64XORconst)
  5048  		v.AuxInt = int64ToAuxInt(c ^ d)
  5049  		v.AddArg(x)
  5050  		return true
  5051  	}
  5052  	return false
  5053  }
  5054  func rewriteValueMIPS64_OpMod16(v *Value) bool {
  5055  	v_1 := v.Args[1]
  5056  	v_0 := v.Args[0]
  5057  	b := v.Block
  5058  	typ := &b.Func.Config.Types
  5059  	// match: (Mod16 x y)
  5060  	// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  5061  	for {
  5062  		x := v_0
  5063  		y := v_1
  5064  		v.reset(OpSelect0)
  5065  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5066  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5067  		v1.AddArg(x)
  5068  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  5069  		v2.AddArg(y)
  5070  		v0.AddArg2(v1, v2)
  5071  		v.AddArg(v0)
  5072  		return true
  5073  	}
  5074  }
  5075  func rewriteValueMIPS64_OpMod16u(v *Value) bool {
  5076  	v_1 := v.Args[1]
  5077  	v_0 := v.Args[0]
  5078  	b := v.Block
  5079  	typ := &b.Func.Config.Types
  5080  	// match: (Mod16u x y)
  5081  	// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  5082  	for {
  5083  		x := v_0
  5084  		y := v_1
  5085  		v.reset(OpSelect0)
  5086  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5087  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5088  		v1.AddArg(x)
  5089  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5090  		v2.AddArg(y)
  5091  		v0.AddArg2(v1, v2)
  5092  		v.AddArg(v0)
  5093  		return true
  5094  	}
  5095  }
  5096  func rewriteValueMIPS64_OpMod32(v *Value) bool {
  5097  	v_1 := v.Args[1]
  5098  	v_0 := v.Args[0]
  5099  	b := v.Block
  5100  	typ := &b.Func.Config.Types
  5101  	// match: (Mod32 x y)
  5102  	// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  5103  	for {
  5104  		x := v_0
  5105  		y := v_1
  5106  		v.reset(OpSelect0)
  5107  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5108  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5109  		v1.AddArg(x)
  5110  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  5111  		v2.AddArg(y)
  5112  		v0.AddArg2(v1, v2)
  5113  		v.AddArg(v0)
  5114  		return true
  5115  	}
  5116  }
  5117  func rewriteValueMIPS64_OpMod32u(v *Value) bool {
  5118  	v_1 := v.Args[1]
  5119  	v_0 := v.Args[0]
  5120  	b := v.Block
  5121  	typ := &b.Func.Config.Types
  5122  	// match: (Mod32u x y)
  5123  	// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  5124  	for {
  5125  		x := v_0
  5126  		y := v_1
  5127  		v.reset(OpSelect0)
  5128  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5129  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5130  		v1.AddArg(x)
  5131  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5132  		v2.AddArg(y)
  5133  		v0.AddArg2(v1, v2)
  5134  		v.AddArg(v0)
  5135  		return true
  5136  	}
  5137  }
  5138  func rewriteValueMIPS64_OpMod64(v *Value) bool {
  5139  	v_1 := v.Args[1]
  5140  	v_0 := v.Args[0]
  5141  	b := v.Block
  5142  	typ := &b.Func.Config.Types
  5143  	// match: (Mod64 x y)
  5144  	// result: (Select0 (DIVV x y))
  5145  	for {
  5146  		x := v_0
  5147  		y := v_1
  5148  		v.reset(OpSelect0)
  5149  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5150  		v0.AddArg2(x, y)
  5151  		v.AddArg(v0)
  5152  		return true
  5153  	}
  5154  }
  5155  func rewriteValueMIPS64_OpMod64u(v *Value) bool {
  5156  	v_1 := v.Args[1]
  5157  	v_0 := v.Args[0]
  5158  	b := v.Block
  5159  	typ := &b.Func.Config.Types
  5160  	// match: (Mod64u x y)
  5161  	// result: (Select0 (DIVVU x y))
  5162  	for {
  5163  		x := v_0
  5164  		y := v_1
  5165  		v.reset(OpSelect0)
  5166  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5167  		v0.AddArg2(x, y)
  5168  		v.AddArg(v0)
  5169  		return true
  5170  	}
  5171  }
  5172  func rewriteValueMIPS64_OpMod8(v *Value) bool {
  5173  	v_1 := v.Args[1]
  5174  	v_0 := v.Args[0]
  5175  	b := v.Block
  5176  	typ := &b.Func.Config.Types
  5177  	// match: (Mod8 x y)
  5178  	// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  5179  	for {
  5180  		x := v_0
  5181  		y := v_1
  5182  		v.reset(OpSelect0)
  5183  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  5184  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5185  		v1.AddArg(x)
  5186  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  5187  		v2.AddArg(y)
  5188  		v0.AddArg2(v1, v2)
  5189  		v.AddArg(v0)
  5190  		return true
  5191  	}
  5192  }
  5193  func rewriteValueMIPS64_OpMod8u(v *Value) bool {
  5194  	v_1 := v.Args[1]
  5195  	v_0 := v.Args[0]
  5196  	b := v.Block
  5197  	typ := &b.Func.Config.Types
  5198  	// match: (Mod8u x y)
  5199  	// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  5200  	for {
  5201  		x := v_0
  5202  		y := v_1
  5203  		v.reset(OpSelect0)
  5204  		v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5205  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5206  		v1.AddArg(x)
  5207  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5208  		v2.AddArg(y)
  5209  		v0.AddArg2(v1, v2)
  5210  		v.AddArg(v0)
  5211  		return true
  5212  	}
  5213  }
  5214  func rewriteValueMIPS64_OpMove(v *Value) bool {
  5215  	v_2 := v.Args[2]
  5216  	v_1 := v.Args[1]
  5217  	v_0 := v.Args[0]
  5218  	b := v.Block
  5219  	config := b.Func.Config
  5220  	typ := &b.Func.Config.Types
  5221  	// match: (Move [0] _ _ mem)
  5222  	// result: mem
  5223  	for {
  5224  		if auxIntToInt64(v.AuxInt) != 0 {
  5225  			break
  5226  		}
  5227  		mem := v_2
  5228  		v.copyOf(mem)
  5229  		return true
  5230  	}
  5231  	// match: (Move [1] dst src mem)
  5232  	// result: (MOVBstore dst (MOVBload src mem) mem)
  5233  	for {
  5234  		if auxIntToInt64(v.AuxInt) != 1 {
  5235  			break
  5236  		}
  5237  		dst := v_0
  5238  		src := v_1
  5239  		mem := v_2
  5240  		v.reset(OpMIPS64MOVBstore)
  5241  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5242  		v0.AddArg2(src, mem)
  5243  		v.AddArg3(dst, v0, mem)
  5244  		return true
  5245  	}
  5246  	// match: (Move [2] {t} dst src mem)
  5247  	// cond: t.Alignment()%2 == 0
  5248  	// result: (MOVHstore dst (MOVHload src mem) mem)
  5249  	for {
  5250  		if auxIntToInt64(v.AuxInt) != 2 {
  5251  			break
  5252  		}
  5253  		t := auxToType(v.Aux)
  5254  		dst := v_0
  5255  		src := v_1
  5256  		mem := v_2
  5257  		if !(t.Alignment()%2 == 0) {
  5258  			break
  5259  		}
  5260  		v.reset(OpMIPS64MOVHstore)
  5261  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5262  		v0.AddArg2(src, mem)
  5263  		v.AddArg3(dst, v0, mem)
  5264  		return true
  5265  	}
  5266  	// match: (Move [2] dst src mem)
  5267  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  5268  	for {
  5269  		if auxIntToInt64(v.AuxInt) != 2 {
  5270  			break
  5271  		}
  5272  		dst := v_0
  5273  		src := v_1
  5274  		mem := v_2
  5275  		v.reset(OpMIPS64MOVBstore)
  5276  		v.AuxInt = int32ToAuxInt(1)
  5277  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5278  		v0.AuxInt = int32ToAuxInt(1)
  5279  		v0.AddArg2(src, mem)
  5280  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5281  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5282  		v2.AddArg2(src, mem)
  5283  		v1.AddArg3(dst, v2, mem)
  5284  		v.AddArg3(dst, v0, v1)
  5285  		return true
  5286  	}
  5287  	// match: (Move [4] {t} dst src mem)
  5288  	// cond: t.Alignment()%4 == 0
  5289  	// result: (MOVWstore dst (MOVWload src mem) mem)
  5290  	for {
  5291  		if auxIntToInt64(v.AuxInt) != 4 {
  5292  			break
  5293  		}
  5294  		t := auxToType(v.Aux)
  5295  		dst := v_0
  5296  		src := v_1
  5297  		mem := v_2
  5298  		if !(t.Alignment()%4 == 0) {
  5299  			break
  5300  		}
  5301  		v.reset(OpMIPS64MOVWstore)
  5302  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5303  		v0.AddArg2(src, mem)
  5304  		v.AddArg3(dst, v0, mem)
  5305  		return true
  5306  	}
  5307  	// match: (Move [4] {t} dst src mem)
  5308  	// cond: t.Alignment()%2 == 0
  5309  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  5310  	for {
  5311  		if auxIntToInt64(v.AuxInt) != 4 {
  5312  			break
  5313  		}
  5314  		t := auxToType(v.Aux)
  5315  		dst := v_0
  5316  		src := v_1
  5317  		mem := v_2
  5318  		if !(t.Alignment()%2 == 0) {
  5319  			break
  5320  		}
  5321  		v.reset(OpMIPS64MOVHstore)
  5322  		v.AuxInt = int32ToAuxInt(2)
  5323  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5324  		v0.AuxInt = int32ToAuxInt(2)
  5325  		v0.AddArg2(src, mem)
  5326  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5327  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5328  		v2.AddArg2(src, mem)
  5329  		v1.AddArg3(dst, v2, mem)
  5330  		v.AddArg3(dst, v0, v1)
  5331  		return true
  5332  	}
  5333  	// match: (Move [4] dst src mem)
  5334  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  5335  	for {
  5336  		if auxIntToInt64(v.AuxInt) != 4 {
  5337  			break
  5338  		}
  5339  		dst := v_0
  5340  		src := v_1
  5341  		mem := v_2
  5342  		v.reset(OpMIPS64MOVBstore)
  5343  		v.AuxInt = int32ToAuxInt(3)
  5344  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5345  		v0.AuxInt = int32ToAuxInt(3)
  5346  		v0.AddArg2(src, mem)
  5347  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5348  		v1.AuxInt = int32ToAuxInt(2)
  5349  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5350  		v2.AuxInt = int32ToAuxInt(2)
  5351  		v2.AddArg2(src, mem)
  5352  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5353  		v3.AuxInt = int32ToAuxInt(1)
  5354  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5355  		v4.AuxInt = int32ToAuxInt(1)
  5356  		v4.AddArg2(src, mem)
  5357  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5358  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5359  		v6.AddArg2(src, mem)
  5360  		v5.AddArg3(dst, v6, mem)
  5361  		v3.AddArg3(dst, v4, v5)
  5362  		v1.AddArg3(dst, v2, v3)
  5363  		v.AddArg3(dst, v0, v1)
  5364  		return true
  5365  	}
  5366  	// match: (Move [8] {t} dst src mem)
  5367  	// cond: t.Alignment()%8 == 0
  5368  	// result: (MOVVstore dst (MOVVload src mem) mem)
  5369  	for {
  5370  		if auxIntToInt64(v.AuxInt) != 8 {
  5371  			break
  5372  		}
  5373  		t := auxToType(v.Aux)
  5374  		dst := v_0
  5375  		src := v_1
  5376  		mem := v_2
  5377  		if !(t.Alignment()%8 == 0) {
  5378  			break
  5379  		}
  5380  		v.reset(OpMIPS64MOVVstore)
  5381  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5382  		v0.AddArg2(src, mem)
  5383  		v.AddArg3(dst, v0, mem)
  5384  		return true
  5385  	}
  5386  	// match: (Move [8] {t} dst src mem)
  5387  	// cond: t.Alignment()%4 == 0
  5388  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  5389  	for {
  5390  		if auxIntToInt64(v.AuxInt) != 8 {
  5391  			break
  5392  		}
  5393  		t := auxToType(v.Aux)
  5394  		dst := v_0
  5395  		src := v_1
  5396  		mem := v_2
  5397  		if !(t.Alignment()%4 == 0) {
  5398  			break
  5399  		}
  5400  		v.reset(OpMIPS64MOVWstore)
  5401  		v.AuxInt = int32ToAuxInt(4)
  5402  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5403  		v0.AuxInt = int32ToAuxInt(4)
  5404  		v0.AddArg2(src, mem)
  5405  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5406  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5407  		v2.AddArg2(src, mem)
  5408  		v1.AddArg3(dst, v2, mem)
  5409  		v.AddArg3(dst, v0, v1)
  5410  		return true
  5411  	}
  5412  	// match: (Move [8] {t} dst src mem)
  5413  	// cond: t.Alignment()%2 == 0
  5414  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  5415  	for {
  5416  		if auxIntToInt64(v.AuxInt) != 8 {
  5417  			break
  5418  		}
  5419  		t := auxToType(v.Aux)
  5420  		dst := v_0
  5421  		src := v_1
  5422  		mem := v_2
  5423  		if !(t.Alignment()%2 == 0) {
  5424  			break
  5425  		}
  5426  		v.reset(OpMIPS64MOVHstore)
  5427  		v.AuxInt = int32ToAuxInt(6)
  5428  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5429  		v0.AuxInt = int32ToAuxInt(6)
  5430  		v0.AddArg2(src, mem)
  5431  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5432  		v1.AuxInt = int32ToAuxInt(4)
  5433  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5434  		v2.AuxInt = int32ToAuxInt(4)
  5435  		v2.AddArg2(src, mem)
  5436  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5437  		v3.AuxInt = int32ToAuxInt(2)
  5438  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5439  		v4.AuxInt = int32ToAuxInt(2)
  5440  		v4.AddArg2(src, mem)
  5441  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5442  		v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5443  		v6.AddArg2(src, mem)
  5444  		v5.AddArg3(dst, v6, mem)
  5445  		v3.AddArg3(dst, v4, v5)
  5446  		v1.AddArg3(dst, v2, v3)
  5447  		v.AddArg3(dst, v0, v1)
  5448  		return true
  5449  	}
  5450  	// match: (Move [3] dst src mem)
  5451  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  5452  	for {
  5453  		if auxIntToInt64(v.AuxInt) != 3 {
  5454  			break
  5455  		}
  5456  		dst := v_0
  5457  		src := v_1
  5458  		mem := v_2
  5459  		v.reset(OpMIPS64MOVBstore)
  5460  		v.AuxInt = int32ToAuxInt(2)
  5461  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5462  		v0.AuxInt = int32ToAuxInt(2)
  5463  		v0.AddArg2(src, mem)
  5464  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5465  		v1.AuxInt = int32ToAuxInt(1)
  5466  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5467  		v2.AuxInt = int32ToAuxInt(1)
  5468  		v2.AddArg2(src, mem)
  5469  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  5470  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  5471  		v4.AddArg2(src, mem)
  5472  		v3.AddArg3(dst, v4, mem)
  5473  		v1.AddArg3(dst, v2, v3)
  5474  		v.AddArg3(dst, v0, v1)
  5475  		return true
  5476  	}
  5477  	// match: (Move [6] {t} dst src mem)
  5478  	// cond: t.Alignment()%2 == 0
  5479  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  5480  	for {
  5481  		if auxIntToInt64(v.AuxInt) != 6 {
  5482  			break
  5483  		}
  5484  		t := auxToType(v.Aux)
  5485  		dst := v_0
  5486  		src := v_1
  5487  		mem := v_2
  5488  		if !(t.Alignment()%2 == 0) {
  5489  			break
  5490  		}
  5491  		v.reset(OpMIPS64MOVHstore)
  5492  		v.AuxInt = int32ToAuxInt(4)
  5493  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5494  		v0.AuxInt = int32ToAuxInt(4)
  5495  		v0.AddArg2(src, mem)
  5496  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5497  		v1.AuxInt = int32ToAuxInt(2)
  5498  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5499  		v2.AuxInt = int32ToAuxInt(2)
  5500  		v2.AddArg2(src, mem)
  5501  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  5502  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  5503  		v4.AddArg2(src, mem)
  5504  		v3.AddArg3(dst, v4, mem)
  5505  		v1.AddArg3(dst, v2, v3)
  5506  		v.AddArg3(dst, v0, v1)
  5507  		return true
  5508  	}
  5509  	// match: (Move [12] {t} dst src mem)
  5510  	// cond: t.Alignment()%4 == 0
  5511  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  5512  	for {
  5513  		if auxIntToInt64(v.AuxInt) != 12 {
  5514  			break
  5515  		}
  5516  		t := auxToType(v.Aux)
  5517  		dst := v_0
  5518  		src := v_1
  5519  		mem := v_2
  5520  		if !(t.Alignment()%4 == 0) {
  5521  			break
  5522  		}
  5523  		v.reset(OpMIPS64MOVWstore)
  5524  		v.AuxInt = int32ToAuxInt(8)
  5525  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5526  		v0.AuxInt = int32ToAuxInt(8)
  5527  		v0.AddArg2(src, mem)
  5528  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5529  		v1.AuxInt = int32ToAuxInt(4)
  5530  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5531  		v2.AuxInt = int32ToAuxInt(4)
  5532  		v2.AddArg2(src, mem)
  5533  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  5534  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  5535  		v4.AddArg2(src, mem)
  5536  		v3.AddArg3(dst, v4, mem)
  5537  		v1.AddArg3(dst, v2, v3)
  5538  		v.AddArg3(dst, v0, v1)
  5539  		return true
  5540  	}
  5541  	// match: (Move [16] {t} dst src mem)
  5542  	// cond: t.Alignment()%8 == 0
  5543  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  5544  	for {
  5545  		if auxIntToInt64(v.AuxInt) != 16 {
  5546  			break
  5547  		}
  5548  		t := auxToType(v.Aux)
  5549  		dst := v_0
  5550  		src := v_1
  5551  		mem := v_2
  5552  		if !(t.Alignment()%8 == 0) {
  5553  			break
  5554  		}
  5555  		v.reset(OpMIPS64MOVVstore)
  5556  		v.AuxInt = int32ToAuxInt(8)
  5557  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5558  		v0.AuxInt = int32ToAuxInt(8)
  5559  		v0.AddArg2(src, mem)
  5560  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5561  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5562  		v2.AddArg2(src, mem)
  5563  		v1.AddArg3(dst, v2, mem)
  5564  		v.AddArg3(dst, v0, v1)
  5565  		return true
  5566  	}
  5567  	// match: (Move [24] {t} dst src mem)
  5568  	// cond: t.Alignment()%8 == 0
  5569  	// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  5570  	for {
  5571  		if auxIntToInt64(v.AuxInt) != 24 {
  5572  			break
  5573  		}
  5574  		t := auxToType(v.Aux)
  5575  		dst := v_0
  5576  		src := v_1
  5577  		mem := v_2
  5578  		if !(t.Alignment()%8 == 0) {
  5579  			break
  5580  		}
  5581  		v.reset(OpMIPS64MOVVstore)
  5582  		v.AuxInt = int32ToAuxInt(16)
  5583  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5584  		v0.AuxInt = int32ToAuxInt(16)
  5585  		v0.AddArg2(src, mem)
  5586  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5587  		v1.AuxInt = int32ToAuxInt(8)
  5588  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5589  		v2.AuxInt = int32ToAuxInt(8)
  5590  		v2.AddArg2(src, mem)
  5591  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  5592  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  5593  		v4.AddArg2(src, mem)
  5594  		v3.AddArg3(dst, v4, mem)
  5595  		v1.AddArg3(dst, v2, v3)
  5596  		v.AddArg3(dst, v0, v1)
  5597  		return true
  5598  	}
  5599  	// match: (Move [s] {t} dst src mem)
  5600  	// cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
  5601  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  5602  	for {
  5603  		s := auxIntToInt64(v.AuxInt)
  5604  		t := auxToType(v.Aux)
  5605  		dst := v_0
  5606  		src := v_1
  5607  		mem := v_2
  5608  		if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
  5609  			break
  5610  		}
  5611  		v.reset(OpMIPS64DUFFCOPY)
  5612  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  5613  		v.AddArg3(dst, src, mem)
  5614  		return true
  5615  	}
  5616  	// match: (Move [s] {t} dst src mem)
  5617  	// cond: s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0
  5618  	// result: (LoweredMove [t.Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.Alignment(), config)]) mem)
  5619  	for {
  5620  		s := auxIntToInt64(v.AuxInt)
  5621  		t := auxToType(v.Aux)
  5622  		dst := v_0
  5623  		src := v_1
  5624  		mem := v_2
  5625  		if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
  5626  			break
  5627  		}
  5628  		v.reset(OpMIPS64LoweredMove)
  5629  		v.AuxInt = int64ToAuxInt(t.Alignment())
  5630  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  5631  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  5632  		v0.AddArg(src)
  5633  		v.AddArg4(dst, src, v0, mem)
  5634  		return true
  5635  	}
  5636  	return false
  5637  }
  5638  func rewriteValueMIPS64_OpMul16(v *Value) bool {
  5639  	v_1 := v.Args[1]
  5640  	v_0 := v.Args[0]
  5641  	b := v.Block
  5642  	typ := &b.Func.Config.Types
  5643  	// match: (Mul16 x y)
  5644  	// result: (Select1 (MULVU x y))
  5645  	for {
  5646  		x := v_0
  5647  		y := v_1
  5648  		v.reset(OpSelect1)
  5649  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5650  		v0.AddArg2(x, y)
  5651  		v.AddArg(v0)
  5652  		return true
  5653  	}
  5654  }
  5655  func rewriteValueMIPS64_OpMul32(v *Value) bool {
  5656  	v_1 := v.Args[1]
  5657  	v_0 := v.Args[0]
  5658  	b := v.Block
  5659  	typ := &b.Func.Config.Types
  5660  	// match: (Mul32 x y)
  5661  	// result: (Select1 (MULVU x y))
  5662  	for {
  5663  		x := v_0
  5664  		y := v_1
  5665  		v.reset(OpSelect1)
  5666  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5667  		v0.AddArg2(x, y)
  5668  		v.AddArg(v0)
  5669  		return true
  5670  	}
  5671  }
  5672  func rewriteValueMIPS64_OpMul64(v *Value) bool {
  5673  	v_1 := v.Args[1]
  5674  	v_0 := v.Args[0]
  5675  	b := v.Block
  5676  	typ := &b.Func.Config.Types
  5677  	// match: (Mul64 x y)
  5678  	// result: (Select1 (MULVU x y))
  5679  	for {
  5680  		x := v_0
  5681  		y := v_1
  5682  		v.reset(OpSelect1)
  5683  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5684  		v0.AddArg2(x, y)
  5685  		v.AddArg(v0)
  5686  		return true
  5687  	}
  5688  }
  5689  func rewriteValueMIPS64_OpMul8(v *Value) bool {
  5690  	v_1 := v.Args[1]
  5691  	v_0 := v.Args[0]
  5692  	b := v.Block
  5693  	typ := &b.Func.Config.Types
  5694  	// match: (Mul8 x y)
  5695  	// result: (Select1 (MULVU x y))
  5696  	for {
  5697  		x := v_0
  5698  		y := v_1
  5699  		v.reset(OpSelect1)
  5700  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  5701  		v0.AddArg2(x, y)
  5702  		v.AddArg(v0)
  5703  		return true
  5704  	}
  5705  }
  5706  func rewriteValueMIPS64_OpNeq16(v *Value) bool {
  5707  	v_1 := v.Args[1]
  5708  	v_0 := v.Args[0]
  5709  	b := v.Block
  5710  	typ := &b.Func.Config.Types
  5711  	// match: (Neq16 x y)
  5712  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  5713  	for {
  5714  		x := v_0
  5715  		y := v_1
  5716  		v.reset(OpMIPS64SGTU)
  5717  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5718  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  5719  		v1.AddArg(x)
  5720  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  5721  		v2.AddArg(y)
  5722  		v0.AddArg2(v1, v2)
  5723  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5724  		v3.AuxInt = int64ToAuxInt(0)
  5725  		v.AddArg2(v0, v3)
  5726  		return true
  5727  	}
  5728  }
  5729  func rewriteValueMIPS64_OpNeq32(v *Value) bool {
  5730  	v_1 := v.Args[1]
  5731  	v_0 := v.Args[0]
  5732  	b := v.Block
  5733  	typ := &b.Func.Config.Types
  5734  	// match: (Neq32 x y)
  5735  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  5736  	for {
  5737  		x := v_0
  5738  		y := v_1
  5739  		v.reset(OpMIPS64SGTU)
  5740  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5741  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5742  		v1.AddArg(x)
  5743  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  5744  		v2.AddArg(y)
  5745  		v0.AddArg2(v1, v2)
  5746  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5747  		v3.AuxInt = int64ToAuxInt(0)
  5748  		v.AddArg2(v0, v3)
  5749  		return true
  5750  	}
  5751  }
  5752  func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
  5753  	v_1 := v.Args[1]
  5754  	v_0 := v.Args[0]
  5755  	b := v.Block
  5756  	// match: (Neq32F x y)
  5757  	// result: (FPFlagFalse (CMPEQF x y))
  5758  	for {
  5759  		x := v_0
  5760  		y := v_1
  5761  		v.reset(OpMIPS64FPFlagFalse)
  5762  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  5763  		v0.AddArg2(x, y)
  5764  		v.AddArg(v0)
  5765  		return true
  5766  	}
  5767  }
  5768  func rewriteValueMIPS64_OpNeq64(v *Value) bool {
  5769  	v_1 := v.Args[1]
  5770  	v_0 := v.Args[0]
  5771  	b := v.Block
  5772  	typ := &b.Func.Config.Types
  5773  	// match: (Neq64 x y)
  5774  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  5775  	for {
  5776  		x := v_0
  5777  		y := v_1
  5778  		v.reset(OpMIPS64SGTU)
  5779  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5780  		v0.AddArg2(x, y)
  5781  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5782  		v1.AuxInt = int64ToAuxInt(0)
  5783  		v.AddArg2(v0, v1)
  5784  		return true
  5785  	}
  5786  }
  5787  func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
  5788  	v_1 := v.Args[1]
  5789  	v_0 := v.Args[0]
  5790  	b := v.Block
  5791  	// match: (Neq64F x y)
  5792  	// result: (FPFlagFalse (CMPEQD x y))
  5793  	for {
  5794  		x := v_0
  5795  		y := v_1
  5796  		v.reset(OpMIPS64FPFlagFalse)
  5797  		v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  5798  		v0.AddArg2(x, y)
  5799  		v.AddArg(v0)
  5800  		return true
  5801  	}
  5802  }
  5803  func rewriteValueMIPS64_OpNeq8(v *Value) bool {
  5804  	v_1 := v.Args[1]
  5805  	v_0 := v.Args[0]
  5806  	b := v.Block
  5807  	typ := &b.Func.Config.Types
  5808  	// match: (Neq8 x y)
  5809  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  5810  	for {
  5811  		x := v_0
  5812  		y := v_1
  5813  		v.reset(OpMIPS64SGTU)
  5814  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5815  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5816  		v1.AddArg(x)
  5817  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  5818  		v2.AddArg(y)
  5819  		v0.AddArg2(v1, v2)
  5820  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5821  		v3.AuxInt = int64ToAuxInt(0)
  5822  		v.AddArg2(v0, v3)
  5823  		return true
  5824  	}
  5825  }
  5826  func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
  5827  	v_1 := v.Args[1]
  5828  	v_0 := v.Args[0]
  5829  	b := v.Block
  5830  	typ := &b.Func.Config.Types
  5831  	// match: (NeqPtr x y)
  5832  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  5833  	for {
  5834  		x := v_0
  5835  		y := v_1
  5836  		v.reset(OpMIPS64SGTU)
  5837  		v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  5838  		v0.AddArg2(x, y)
  5839  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5840  		v1.AuxInt = int64ToAuxInt(0)
  5841  		v.AddArg2(v0, v1)
  5842  		return true
  5843  	}
  5844  }
  5845  func rewriteValueMIPS64_OpNot(v *Value) bool {
  5846  	v_0 := v.Args[0]
  5847  	// match: (Not x)
  5848  	// result: (XORconst [1] x)
  5849  	for {
  5850  		x := v_0
  5851  		v.reset(OpMIPS64XORconst)
  5852  		v.AuxInt = int64ToAuxInt(1)
  5853  		v.AddArg(x)
  5854  		return true
  5855  	}
  5856  }
  5857  func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
  5858  	v_0 := v.Args[0]
  5859  	// match: (OffPtr [off] ptr:(SP))
  5860  	// cond: is32Bit(off)
  5861  	// result: (MOVVaddr [int32(off)] ptr)
  5862  	for {
  5863  		off := auxIntToInt64(v.AuxInt)
  5864  		ptr := v_0
  5865  		if ptr.Op != OpSP || !(is32Bit(off)) {
  5866  			break
  5867  		}
  5868  		v.reset(OpMIPS64MOVVaddr)
  5869  		v.AuxInt = int32ToAuxInt(int32(off))
  5870  		v.AddArg(ptr)
  5871  		return true
  5872  	}
  5873  	// match: (OffPtr [off] ptr)
  5874  	// result: (ADDVconst [off] ptr)
  5875  	for {
  5876  		off := auxIntToInt64(v.AuxInt)
  5877  		ptr := v_0
  5878  		v.reset(OpMIPS64ADDVconst)
  5879  		v.AuxInt = int64ToAuxInt(off)
  5880  		v.AddArg(ptr)
  5881  		return true
  5882  	}
  5883  }
  5884  func rewriteValueMIPS64_OpPanicBounds(v *Value) bool {
  5885  	v_2 := v.Args[2]
  5886  	v_1 := v.Args[1]
  5887  	v_0 := v.Args[0]
  5888  	// match: (PanicBounds [kind] x y mem)
  5889  	// cond: boundsABI(kind) == 0
  5890  	// result: (LoweredPanicBoundsA [kind] x y mem)
  5891  	for {
  5892  		kind := auxIntToInt64(v.AuxInt)
  5893  		x := v_0
  5894  		y := v_1
  5895  		mem := v_2
  5896  		if !(boundsABI(kind) == 0) {
  5897  			break
  5898  		}
  5899  		v.reset(OpMIPS64LoweredPanicBoundsA)
  5900  		v.AuxInt = int64ToAuxInt(kind)
  5901  		v.AddArg3(x, y, mem)
  5902  		return true
  5903  	}
  5904  	// match: (PanicBounds [kind] x y mem)
  5905  	// cond: boundsABI(kind) == 1
  5906  	// result: (LoweredPanicBoundsB [kind] x y mem)
  5907  	for {
  5908  		kind := auxIntToInt64(v.AuxInt)
  5909  		x := v_0
  5910  		y := v_1
  5911  		mem := v_2
  5912  		if !(boundsABI(kind) == 1) {
  5913  			break
  5914  		}
  5915  		v.reset(OpMIPS64LoweredPanicBoundsB)
  5916  		v.AuxInt = int64ToAuxInt(kind)
  5917  		v.AddArg3(x, y, mem)
  5918  		return true
  5919  	}
  5920  	// match: (PanicBounds [kind] x y mem)
  5921  	// cond: boundsABI(kind) == 2
  5922  	// result: (LoweredPanicBoundsC [kind] x y mem)
  5923  	for {
  5924  		kind := auxIntToInt64(v.AuxInt)
  5925  		x := v_0
  5926  		y := v_1
  5927  		mem := v_2
  5928  		if !(boundsABI(kind) == 2) {
  5929  			break
  5930  		}
  5931  		v.reset(OpMIPS64LoweredPanicBoundsC)
  5932  		v.AuxInt = int64ToAuxInt(kind)
  5933  		v.AddArg3(x, y, mem)
  5934  		return true
  5935  	}
  5936  	return false
  5937  }
  5938  func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
  5939  	v_1 := v.Args[1]
  5940  	v_0 := v.Args[0]
  5941  	b := v.Block
  5942  	typ := &b.Func.Config.Types
  5943  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  5944  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  5945  	for {
  5946  		t := v.Type
  5947  		x := v_0
  5948  		if v_1.Op != OpMIPS64MOVVconst {
  5949  			break
  5950  		}
  5951  		c := auxIntToInt64(v_1.AuxInt)
  5952  		v.reset(OpOr16)
  5953  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  5954  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5955  		v1.AuxInt = int64ToAuxInt(c & 15)
  5956  		v0.AddArg2(x, v1)
  5957  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  5958  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5959  		v3.AuxInt = int64ToAuxInt(-c & 15)
  5960  		v2.AddArg2(x, v3)
  5961  		v.AddArg2(v0, v2)
  5962  		return true
  5963  	}
  5964  	return false
  5965  }
  5966  func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
  5967  	v_1 := v.Args[1]
  5968  	v_0 := v.Args[0]
  5969  	b := v.Block
  5970  	typ := &b.Func.Config.Types
  5971  	// match: (RotateLeft32 <t> x (MOVVconst [c]))
  5972  	// result: (Or32 (Lsh32x64 <t> x (MOVVconst [c&31])) (Rsh32Ux64 <t> x (MOVVconst [-c&31])))
  5973  	for {
  5974  		t := v.Type
  5975  		x := v_0
  5976  		if v_1.Op != OpMIPS64MOVVconst {
  5977  			break
  5978  		}
  5979  		c := auxIntToInt64(v_1.AuxInt)
  5980  		v.reset(OpOr32)
  5981  		v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  5982  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5983  		v1.AuxInt = int64ToAuxInt(c & 31)
  5984  		v0.AddArg2(x, v1)
  5985  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  5986  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  5987  		v3.AuxInt = int64ToAuxInt(-c & 31)
  5988  		v2.AddArg2(x, v3)
  5989  		v.AddArg2(v0, v2)
  5990  		return true
  5991  	}
  5992  	return false
  5993  }
  5994  func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
  5995  	v_1 := v.Args[1]
  5996  	v_0 := v.Args[0]
  5997  	b := v.Block
  5998  	typ := &b.Func.Config.Types
  5999  	// match: (RotateLeft64 <t> x (MOVVconst [c]))
  6000  	// result: (Or64 (Lsh64x64 <t> x (MOVVconst [c&63])) (Rsh64Ux64 <t> x (MOVVconst [-c&63])))
  6001  	for {
  6002  		t := v.Type
  6003  		x := v_0
  6004  		if v_1.Op != OpMIPS64MOVVconst {
  6005  			break
  6006  		}
  6007  		c := auxIntToInt64(v_1.AuxInt)
  6008  		v.reset(OpOr64)
  6009  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  6010  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6011  		v1.AuxInt = int64ToAuxInt(c & 63)
  6012  		v0.AddArg2(x, v1)
  6013  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  6014  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6015  		v3.AuxInt = int64ToAuxInt(-c & 63)
  6016  		v2.AddArg2(x, v3)
  6017  		v.AddArg2(v0, v2)
  6018  		return true
  6019  	}
  6020  	return false
  6021  }
  6022  func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
  6023  	v_1 := v.Args[1]
  6024  	v_0 := v.Args[0]
  6025  	b := v.Block
  6026  	typ := &b.Func.Config.Types
  6027  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  6028  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  6029  	for {
  6030  		t := v.Type
  6031  		x := v_0
  6032  		if v_1.Op != OpMIPS64MOVVconst {
  6033  			break
  6034  		}
  6035  		c := auxIntToInt64(v_1.AuxInt)
  6036  		v.reset(OpOr8)
  6037  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6038  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6039  		v1.AuxInt = int64ToAuxInt(c & 7)
  6040  		v0.AddArg2(x, v1)
  6041  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  6042  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6043  		v3.AuxInt = int64ToAuxInt(-c & 7)
  6044  		v2.AddArg2(x, v3)
  6045  		v.AddArg2(v0, v2)
  6046  		return true
  6047  	}
  6048  	return false
  6049  }
  6050  func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
  6051  	v_1 := v.Args[1]
  6052  	v_0 := v.Args[0]
  6053  	b := v.Block
  6054  	typ := &b.Func.Config.Types
  6055  	// match: (Rsh16Ux16 <t> x y)
  6056  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6057  	for {
  6058  		t := v.Type
  6059  		x := v_0
  6060  		y := v_1
  6061  		v.reset(OpMIPS64AND)
  6062  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6063  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6064  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6065  		v2.AuxInt = int64ToAuxInt(64)
  6066  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6067  		v3.AddArg(y)
  6068  		v1.AddArg2(v2, v3)
  6069  		v0.AddArg(v1)
  6070  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6071  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6072  		v5.AddArg(x)
  6073  		v4.AddArg2(v5, v3)
  6074  		v.AddArg2(v0, v4)
  6075  		return true
  6076  	}
  6077  }
  6078  func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
  6079  	v_1 := v.Args[1]
  6080  	v_0 := v.Args[0]
  6081  	b := v.Block
  6082  	typ := &b.Func.Config.Types
  6083  	// match: (Rsh16Ux32 <t> x y)
  6084  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  6085  	for {
  6086  		t := v.Type
  6087  		x := v_0
  6088  		y := v_1
  6089  		v.reset(OpMIPS64AND)
  6090  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6091  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6092  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6093  		v2.AuxInt = int64ToAuxInt(64)
  6094  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6095  		v3.AddArg(y)
  6096  		v1.AddArg2(v2, v3)
  6097  		v0.AddArg(v1)
  6098  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6099  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6100  		v5.AddArg(x)
  6101  		v4.AddArg2(v5, v3)
  6102  		v.AddArg2(v0, v4)
  6103  		return true
  6104  	}
  6105  }
  6106  func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
  6107  	v_1 := v.Args[1]
  6108  	v_0 := v.Args[0]
  6109  	b := v.Block
  6110  	typ := &b.Func.Config.Types
  6111  	// match: (Rsh16Ux64 <t> x y)
  6112  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  6113  	for {
  6114  		t := v.Type
  6115  		x := v_0
  6116  		y := v_1
  6117  		v.reset(OpMIPS64AND)
  6118  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6119  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6120  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6121  		v2.AuxInt = int64ToAuxInt(64)
  6122  		v1.AddArg2(v2, y)
  6123  		v0.AddArg(v1)
  6124  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6125  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6126  		v4.AddArg(x)
  6127  		v3.AddArg2(v4, y)
  6128  		v.AddArg2(v0, v3)
  6129  		return true
  6130  	}
  6131  }
  6132  func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
  6133  	v_1 := v.Args[1]
  6134  	v_0 := v.Args[0]
  6135  	b := v.Block
  6136  	typ := &b.Func.Config.Types
  6137  	// match: (Rsh16Ux8 <t> x y)
  6138  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
  6139  	for {
  6140  		t := v.Type
  6141  		x := v_0
  6142  		y := v_1
  6143  		v.reset(OpMIPS64AND)
  6144  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6145  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6146  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6147  		v2.AuxInt = int64ToAuxInt(64)
  6148  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6149  		v3.AddArg(y)
  6150  		v1.AddArg2(v2, v3)
  6151  		v0.AddArg(v1)
  6152  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6153  		v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6154  		v5.AddArg(x)
  6155  		v4.AddArg2(v5, v3)
  6156  		v.AddArg2(v0, v4)
  6157  		return true
  6158  	}
  6159  }
  6160  func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
  6161  	v_1 := v.Args[1]
  6162  	v_0 := v.Args[0]
  6163  	b := v.Block
  6164  	typ := &b.Func.Config.Types
  6165  	// match: (Rsh16x16 <t> x y)
  6166  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6167  	for {
  6168  		t := v.Type
  6169  		x := v_0
  6170  		y := v_1
  6171  		v.reset(OpMIPS64SRAV)
  6172  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6173  		v0.AddArg(x)
  6174  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6175  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6176  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6177  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6178  		v4.AddArg(y)
  6179  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6180  		v5.AuxInt = int64ToAuxInt(63)
  6181  		v3.AddArg2(v4, v5)
  6182  		v2.AddArg(v3)
  6183  		v1.AddArg2(v2, v4)
  6184  		v.AddArg2(v0, v1)
  6185  		return true
  6186  	}
  6187  }
  6188  func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
  6189  	v_1 := v.Args[1]
  6190  	v_0 := v.Args[0]
  6191  	b := v.Block
  6192  	typ := &b.Func.Config.Types
  6193  	// match: (Rsh16x32 <t> x y)
  6194  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6195  	for {
  6196  		t := v.Type
  6197  		x := v_0
  6198  		y := v_1
  6199  		v.reset(OpMIPS64SRAV)
  6200  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6201  		v0.AddArg(x)
  6202  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6203  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6204  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6205  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6206  		v4.AddArg(y)
  6207  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6208  		v5.AuxInt = int64ToAuxInt(63)
  6209  		v3.AddArg2(v4, v5)
  6210  		v2.AddArg(v3)
  6211  		v1.AddArg2(v2, v4)
  6212  		v.AddArg2(v0, v1)
  6213  		return true
  6214  	}
  6215  }
  6216  func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
  6217  	v_1 := v.Args[1]
  6218  	v_0 := v.Args[0]
  6219  	b := v.Block
  6220  	typ := &b.Func.Config.Types
  6221  	// match: (Rsh16x64 <t> x y)
  6222  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6223  	for {
  6224  		t := v.Type
  6225  		x := v_0
  6226  		y := v_1
  6227  		v.reset(OpMIPS64SRAV)
  6228  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6229  		v0.AddArg(x)
  6230  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6231  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6232  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6233  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6234  		v4.AuxInt = int64ToAuxInt(63)
  6235  		v3.AddArg2(y, v4)
  6236  		v2.AddArg(v3)
  6237  		v1.AddArg2(v2, y)
  6238  		v.AddArg2(v0, v1)
  6239  		return true
  6240  	}
  6241  }
  6242  func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
  6243  	v_1 := v.Args[1]
  6244  	v_0 := v.Args[0]
  6245  	b := v.Block
  6246  	typ := &b.Func.Config.Types
  6247  	// match: (Rsh16x8 <t> x y)
  6248  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6249  	for {
  6250  		t := v.Type
  6251  		x := v_0
  6252  		y := v_1
  6253  		v.reset(OpMIPS64SRAV)
  6254  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6255  		v0.AddArg(x)
  6256  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6257  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6258  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6259  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6260  		v4.AddArg(y)
  6261  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6262  		v5.AuxInt = int64ToAuxInt(63)
  6263  		v3.AddArg2(v4, v5)
  6264  		v2.AddArg(v3)
  6265  		v1.AddArg2(v2, v4)
  6266  		v.AddArg2(v0, v1)
  6267  		return true
  6268  	}
  6269  }
  6270  func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
  6271  	v_1 := v.Args[1]
  6272  	v_0 := v.Args[0]
  6273  	b := v.Block
  6274  	typ := &b.Func.Config.Types
  6275  	// match: (Rsh32Ux16 <t> x y)
  6276  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  6277  	for {
  6278  		t := v.Type
  6279  		x := v_0
  6280  		y := v_1
  6281  		v.reset(OpMIPS64AND)
  6282  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6283  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6284  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6285  		v2.AuxInt = int64ToAuxInt(64)
  6286  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6287  		v3.AddArg(y)
  6288  		v1.AddArg2(v2, v3)
  6289  		v0.AddArg(v1)
  6290  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6291  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6292  		v5.AddArg(x)
  6293  		v4.AddArg2(v5, v3)
  6294  		v.AddArg2(v0, v4)
  6295  		return true
  6296  	}
  6297  }
  6298  func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
  6299  	v_1 := v.Args[1]
  6300  	v_0 := v.Args[0]
  6301  	b := v.Block
  6302  	typ := &b.Func.Config.Types
  6303  	// match: (Rsh32Ux32 <t> x y)
  6304  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6305  	for {
  6306  		t := v.Type
  6307  		x := v_0
  6308  		y := v_1
  6309  		v.reset(OpMIPS64AND)
  6310  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6311  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6312  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6313  		v2.AuxInt = int64ToAuxInt(64)
  6314  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6315  		v3.AddArg(y)
  6316  		v1.AddArg2(v2, v3)
  6317  		v0.AddArg(v1)
  6318  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6319  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6320  		v5.AddArg(x)
  6321  		v4.AddArg2(v5, v3)
  6322  		v.AddArg2(v0, v4)
  6323  		return true
  6324  	}
  6325  }
  6326  func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
  6327  	v_1 := v.Args[1]
  6328  	v_0 := v.Args[0]
  6329  	b := v.Block
  6330  	typ := &b.Func.Config.Types
  6331  	// match: (Rsh32Ux64 <t> x y)
  6332  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  6333  	for {
  6334  		t := v.Type
  6335  		x := v_0
  6336  		y := v_1
  6337  		v.reset(OpMIPS64AND)
  6338  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6339  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6340  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6341  		v2.AuxInt = int64ToAuxInt(64)
  6342  		v1.AddArg2(v2, y)
  6343  		v0.AddArg(v1)
  6344  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6345  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6346  		v4.AddArg(x)
  6347  		v3.AddArg2(v4, y)
  6348  		v.AddArg2(v0, v3)
  6349  		return true
  6350  	}
  6351  }
  6352  func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
  6353  	v_1 := v.Args[1]
  6354  	v_0 := v.Args[0]
  6355  	b := v.Block
  6356  	typ := &b.Func.Config.Types
  6357  	// match: (Rsh32Ux8 <t> x y)
  6358  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
  6359  	for {
  6360  		t := v.Type
  6361  		x := v_0
  6362  		y := v_1
  6363  		v.reset(OpMIPS64AND)
  6364  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6365  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6366  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6367  		v2.AuxInt = int64ToAuxInt(64)
  6368  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6369  		v3.AddArg(y)
  6370  		v1.AddArg2(v2, v3)
  6371  		v0.AddArg(v1)
  6372  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6373  		v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6374  		v5.AddArg(x)
  6375  		v4.AddArg2(v5, v3)
  6376  		v.AddArg2(v0, v4)
  6377  		return true
  6378  	}
  6379  }
  6380  func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
  6381  	v_1 := v.Args[1]
  6382  	v_0 := v.Args[0]
  6383  	b := v.Block
  6384  	typ := &b.Func.Config.Types
  6385  	// match: (Rsh32x16 <t> x y)
  6386  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6387  	for {
  6388  		t := v.Type
  6389  		x := v_0
  6390  		y := v_1
  6391  		v.reset(OpMIPS64SRAV)
  6392  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6393  		v0.AddArg(x)
  6394  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6395  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6396  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6397  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6398  		v4.AddArg(y)
  6399  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6400  		v5.AuxInt = int64ToAuxInt(63)
  6401  		v3.AddArg2(v4, v5)
  6402  		v2.AddArg(v3)
  6403  		v1.AddArg2(v2, v4)
  6404  		v.AddArg2(v0, v1)
  6405  		return true
  6406  	}
  6407  }
  6408  func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
  6409  	v_1 := v.Args[1]
  6410  	v_0 := v.Args[0]
  6411  	b := v.Block
  6412  	typ := &b.Func.Config.Types
  6413  	// match: (Rsh32x32 <t> x y)
  6414  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6415  	for {
  6416  		t := v.Type
  6417  		x := v_0
  6418  		y := v_1
  6419  		v.reset(OpMIPS64SRAV)
  6420  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6421  		v0.AddArg(x)
  6422  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6423  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6424  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6425  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6426  		v4.AddArg(y)
  6427  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6428  		v5.AuxInt = int64ToAuxInt(63)
  6429  		v3.AddArg2(v4, v5)
  6430  		v2.AddArg(v3)
  6431  		v1.AddArg2(v2, v4)
  6432  		v.AddArg2(v0, v1)
  6433  		return true
  6434  	}
  6435  }
  6436  func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
  6437  	v_1 := v.Args[1]
  6438  	v_0 := v.Args[0]
  6439  	b := v.Block
  6440  	typ := &b.Func.Config.Types
  6441  	// match: (Rsh32x64 <t> x y)
  6442  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6443  	for {
  6444  		t := v.Type
  6445  		x := v_0
  6446  		y := v_1
  6447  		v.reset(OpMIPS64SRAV)
  6448  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6449  		v0.AddArg(x)
  6450  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6451  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6452  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6453  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6454  		v4.AuxInt = int64ToAuxInt(63)
  6455  		v3.AddArg2(y, v4)
  6456  		v2.AddArg(v3)
  6457  		v1.AddArg2(v2, y)
  6458  		v.AddArg2(v0, v1)
  6459  		return true
  6460  	}
  6461  }
  6462  func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
  6463  	v_1 := v.Args[1]
  6464  	v_0 := v.Args[0]
  6465  	b := v.Block
  6466  	typ := &b.Func.Config.Types
  6467  	// match: (Rsh32x8 <t> x y)
  6468  	// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6469  	for {
  6470  		t := v.Type
  6471  		x := v_0
  6472  		y := v_1
  6473  		v.reset(OpMIPS64SRAV)
  6474  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6475  		v0.AddArg(x)
  6476  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6477  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6478  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6479  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6480  		v4.AddArg(y)
  6481  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6482  		v5.AuxInt = int64ToAuxInt(63)
  6483  		v3.AddArg2(v4, v5)
  6484  		v2.AddArg(v3)
  6485  		v1.AddArg2(v2, v4)
  6486  		v.AddArg2(v0, v1)
  6487  		return true
  6488  	}
  6489  }
  6490  func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
  6491  	v_1 := v.Args[1]
  6492  	v_0 := v.Args[0]
  6493  	b := v.Block
  6494  	typ := &b.Func.Config.Types
  6495  	// match: (Rsh64Ux16 <t> x y)
  6496  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  6497  	for {
  6498  		t := v.Type
  6499  		x := v_0
  6500  		y := v_1
  6501  		v.reset(OpMIPS64AND)
  6502  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6503  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6504  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6505  		v2.AuxInt = int64ToAuxInt(64)
  6506  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6507  		v3.AddArg(y)
  6508  		v1.AddArg2(v2, v3)
  6509  		v0.AddArg(v1)
  6510  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6511  		v4.AddArg2(x, v3)
  6512  		v.AddArg2(v0, v4)
  6513  		return true
  6514  	}
  6515  }
  6516  func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
  6517  	v_1 := v.Args[1]
  6518  	v_0 := v.Args[0]
  6519  	b := v.Block
  6520  	typ := &b.Func.Config.Types
  6521  	// match: (Rsh64Ux32 <t> x y)
  6522  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  6523  	for {
  6524  		t := v.Type
  6525  		x := v_0
  6526  		y := v_1
  6527  		v.reset(OpMIPS64AND)
  6528  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6529  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6530  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6531  		v2.AuxInt = int64ToAuxInt(64)
  6532  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6533  		v3.AddArg(y)
  6534  		v1.AddArg2(v2, v3)
  6535  		v0.AddArg(v1)
  6536  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6537  		v4.AddArg2(x, v3)
  6538  		v.AddArg2(v0, v4)
  6539  		return true
  6540  	}
  6541  }
  6542  func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
  6543  	v_1 := v.Args[1]
  6544  	v_0 := v.Args[0]
  6545  	b := v.Block
  6546  	typ := &b.Func.Config.Types
  6547  	// match: (Rsh64Ux64 <t> x y)
  6548  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  6549  	for {
  6550  		t := v.Type
  6551  		x := v_0
  6552  		y := v_1
  6553  		v.reset(OpMIPS64AND)
  6554  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6555  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6556  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6557  		v2.AuxInt = int64ToAuxInt(64)
  6558  		v1.AddArg2(v2, y)
  6559  		v0.AddArg(v1)
  6560  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6561  		v3.AddArg2(x, y)
  6562  		v.AddArg2(v0, v3)
  6563  		return true
  6564  	}
  6565  }
  6566  func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
  6567  	v_1 := v.Args[1]
  6568  	v_0 := v.Args[0]
  6569  	b := v.Block
  6570  	typ := &b.Func.Config.Types
  6571  	// match: (Rsh64Ux8 <t> x y)
  6572  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
  6573  	for {
  6574  		t := v.Type
  6575  		x := v_0
  6576  		y := v_1
  6577  		v.reset(OpMIPS64AND)
  6578  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6579  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6580  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6581  		v2.AuxInt = int64ToAuxInt(64)
  6582  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6583  		v3.AddArg(y)
  6584  		v1.AddArg2(v2, v3)
  6585  		v0.AddArg(v1)
  6586  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6587  		v4.AddArg2(x, v3)
  6588  		v.AddArg2(v0, v4)
  6589  		return true
  6590  	}
  6591  }
  6592  func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
  6593  	v_1 := v.Args[1]
  6594  	v_0 := v.Args[0]
  6595  	b := v.Block
  6596  	typ := &b.Func.Config.Types
  6597  	// match: (Rsh64x16 <t> x y)
  6598  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6599  	for {
  6600  		t := v.Type
  6601  		x := v_0
  6602  		y := v_1
  6603  		v.reset(OpMIPS64SRAV)
  6604  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6605  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6606  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6607  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6608  		v3.AddArg(y)
  6609  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6610  		v4.AuxInt = int64ToAuxInt(63)
  6611  		v2.AddArg2(v3, v4)
  6612  		v1.AddArg(v2)
  6613  		v0.AddArg2(v1, v3)
  6614  		v.AddArg2(x, v0)
  6615  		return true
  6616  	}
  6617  }
  6618  func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
  6619  	v_1 := v.Args[1]
  6620  	v_0 := v.Args[0]
  6621  	b := v.Block
  6622  	typ := &b.Func.Config.Types
  6623  	// match: (Rsh64x32 <t> x y)
  6624  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6625  	for {
  6626  		t := v.Type
  6627  		x := v_0
  6628  		y := v_1
  6629  		v.reset(OpMIPS64SRAV)
  6630  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6631  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6632  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6633  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6634  		v3.AddArg(y)
  6635  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6636  		v4.AuxInt = int64ToAuxInt(63)
  6637  		v2.AddArg2(v3, v4)
  6638  		v1.AddArg(v2)
  6639  		v0.AddArg2(v1, v3)
  6640  		v.AddArg2(x, v0)
  6641  		return true
  6642  	}
  6643  }
  6644  func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
  6645  	v_1 := v.Args[1]
  6646  	v_0 := v.Args[0]
  6647  	b := v.Block
  6648  	typ := &b.Func.Config.Types
  6649  	// match: (Rsh64x64 <t> x y)
  6650  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6651  	for {
  6652  		t := v.Type
  6653  		x := v_0
  6654  		y := v_1
  6655  		v.reset(OpMIPS64SRAV)
  6656  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6657  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6658  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6659  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6660  		v3.AuxInt = int64ToAuxInt(63)
  6661  		v2.AddArg2(y, v3)
  6662  		v1.AddArg(v2)
  6663  		v0.AddArg2(v1, y)
  6664  		v.AddArg2(x, v0)
  6665  		return true
  6666  	}
  6667  }
  6668  func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
  6669  	v_1 := v.Args[1]
  6670  	v_0 := v.Args[0]
  6671  	b := v.Block
  6672  	typ := &b.Func.Config.Types
  6673  	// match: (Rsh64x8 <t> x y)
  6674  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6675  	for {
  6676  		t := v.Type
  6677  		x := v_0
  6678  		y := v_1
  6679  		v.reset(OpMIPS64SRAV)
  6680  		v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6681  		v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6682  		v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6683  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6684  		v3.AddArg(y)
  6685  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6686  		v4.AuxInt = int64ToAuxInt(63)
  6687  		v2.AddArg2(v3, v4)
  6688  		v1.AddArg(v2)
  6689  		v0.AddArg2(v1, v3)
  6690  		v.AddArg2(x, v0)
  6691  		return true
  6692  	}
  6693  }
  6694  func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
  6695  	v_1 := v.Args[1]
  6696  	v_0 := v.Args[0]
  6697  	b := v.Block
  6698  	typ := &b.Func.Config.Types
  6699  	// match: (Rsh8Ux16 <t> x y)
  6700  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  6701  	for {
  6702  		t := v.Type
  6703  		x := v_0
  6704  		y := v_1
  6705  		v.reset(OpMIPS64AND)
  6706  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6707  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6708  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6709  		v2.AuxInt = int64ToAuxInt(64)
  6710  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6711  		v3.AddArg(y)
  6712  		v1.AddArg2(v2, v3)
  6713  		v0.AddArg(v1)
  6714  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6715  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6716  		v5.AddArg(x)
  6717  		v4.AddArg2(v5, v3)
  6718  		v.AddArg2(v0, v4)
  6719  		return true
  6720  	}
  6721  }
  6722  func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
  6723  	v_1 := v.Args[1]
  6724  	v_0 := v.Args[0]
  6725  	b := v.Block
  6726  	typ := &b.Func.Config.Types
  6727  	// match: (Rsh8Ux32 <t> x y)
  6728  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  6729  	for {
  6730  		t := v.Type
  6731  		x := v_0
  6732  		y := v_1
  6733  		v.reset(OpMIPS64AND)
  6734  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6735  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6736  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6737  		v2.AuxInt = int64ToAuxInt(64)
  6738  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6739  		v3.AddArg(y)
  6740  		v1.AddArg2(v2, v3)
  6741  		v0.AddArg(v1)
  6742  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6743  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6744  		v5.AddArg(x)
  6745  		v4.AddArg2(v5, v3)
  6746  		v.AddArg2(v0, v4)
  6747  		return true
  6748  	}
  6749  }
  6750  func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
  6751  	v_1 := v.Args[1]
  6752  	v_0 := v.Args[0]
  6753  	b := v.Block
  6754  	typ := &b.Func.Config.Types
  6755  	// match: (Rsh8Ux64 <t> x y)
  6756  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  6757  	for {
  6758  		t := v.Type
  6759  		x := v_0
  6760  		y := v_1
  6761  		v.reset(OpMIPS64AND)
  6762  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6763  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6764  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6765  		v2.AuxInt = int64ToAuxInt(64)
  6766  		v1.AddArg2(v2, y)
  6767  		v0.AddArg(v1)
  6768  		v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6769  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6770  		v4.AddArg(x)
  6771  		v3.AddArg2(v4, y)
  6772  		v.AddArg2(v0, v3)
  6773  		return true
  6774  	}
  6775  }
  6776  func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
  6777  	v_1 := v.Args[1]
  6778  	v_0 := v.Args[0]
  6779  	b := v.Block
  6780  	typ := &b.Func.Config.Types
  6781  	// match: (Rsh8Ux8 <t> x y)
  6782  	// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6783  	for {
  6784  		t := v.Type
  6785  		x := v_0
  6786  		y := v_1
  6787  		v.reset(OpMIPS64AND)
  6788  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6789  		v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6790  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6791  		v2.AuxInt = int64ToAuxInt(64)
  6792  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6793  		v3.AddArg(y)
  6794  		v1.AddArg2(v2, v3)
  6795  		v0.AddArg(v1)
  6796  		v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  6797  		v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6798  		v5.AddArg(x)
  6799  		v4.AddArg2(v5, v3)
  6800  		v.AddArg2(v0, v4)
  6801  		return true
  6802  	}
  6803  }
  6804  func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
  6805  	v_1 := v.Args[1]
  6806  	v_0 := v.Args[0]
  6807  	b := v.Block
  6808  	typ := &b.Func.Config.Types
  6809  	// match: (Rsh8x16 <t> x y)
  6810  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  6811  	for {
  6812  		t := v.Type
  6813  		x := v_0
  6814  		y := v_1
  6815  		v.reset(OpMIPS64SRAV)
  6816  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6817  		v0.AddArg(x)
  6818  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6819  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6820  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6821  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6822  		v4.AddArg(y)
  6823  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6824  		v5.AuxInt = int64ToAuxInt(63)
  6825  		v3.AddArg2(v4, v5)
  6826  		v2.AddArg(v3)
  6827  		v1.AddArg2(v2, v4)
  6828  		v.AddArg2(v0, v1)
  6829  		return true
  6830  	}
  6831  }
  6832  func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
  6833  	v_1 := v.Args[1]
  6834  	v_0 := v.Args[0]
  6835  	b := v.Block
  6836  	typ := &b.Func.Config.Types
  6837  	// match: (Rsh8x32 <t> x y)
  6838  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  6839  	for {
  6840  		t := v.Type
  6841  		x := v_0
  6842  		y := v_1
  6843  		v.reset(OpMIPS64SRAV)
  6844  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6845  		v0.AddArg(x)
  6846  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6847  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6848  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6849  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6850  		v4.AddArg(y)
  6851  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6852  		v5.AuxInt = int64ToAuxInt(63)
  6853  		v3.AddArg2(v4, v5)
  6854  		v2.AddArg(v3)
  6855  		v1.AddArg2(v2, v4)
  6856  		v.AddArg2(v0, v1)
  6857  		return true
  6858  	}
  6859  }
  6860  func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
  6861  	v_1 := v.Args[1]
  6862  	v_0 := v.Args[0]
  6863  	b := v.Block
  6864  	typ := &b.Func.Config.Types
  6865  	// match: (Rsh8x64 <t> x y)
  6866  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  6867  	for {
  6868  		t := v.Type
  6869  		x := v_0
  6870  		y := v_1
  6871  		v.reset(OpMIPS64SRAV)
  6872  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6873  		v0.AddArg(x)
  6874  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6875  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6876  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6877  		v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6878  		v4.AuxInt = int64ToAuxInt(63)
  6879  		v3.AddArg2(y, v4)
  6880  		v2.AddArg(v3)
  6881  		v1.AddArg2(v2, y)
  6882  		v.AddArg2(v0, v1)
  6883  		return true
  6884  	}
  6885  }
  6886  func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
  6887  	v_1 := v.Args[1]
  6888  	v_0 := v.Args[0]
  6889  	b := v.Block
  6890  	typ := &b.Func.Config.Types
  6891  	// match: (Rsh8x8 <t> x y)
  6892  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  6893  	for {
  6894  		t := v.Type
  6895  		x := v_0
  6896  		y := v_1
  6897  		v.reset(OpMIPS64SRAV)
  6898  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6899  		v0.AddArg(x)
  6900  		v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  6901  		v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  6902  		v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  6903  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6904  		v4.AddArg(y)
  6905  		v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  6906  		v5.AuxInt = int64ToAuxInt(63)
  6907  		v3.AddArg2(v4, v5)
  6908  		v2.AddArg(v3)
  6909  		v1.AddArg2(v2, v4)
  6910  		v.AddArg2(v0, v1)
  6911  		return true
  6912  	}
  6913  }
  6914  func rewriteValueMIPS64_OpSelect0(v *Value) bool {
  6915  	v_0 := v.Args[0]
  6916  	b := v.Block
  6917  	typ := &b.Func.Config.Types
  6918  	// match: (Select0 (Mul64uover x y))
  6919  	// result: (Select1 <typ.UInt64> (MULVU x y))
  6920  	for {
  6921  		if v_0.Op != OpMul64uover {
  6922  			break
  6923  		}
  6924  		y := v_0.Args[1]
  6925  		x := v_0.Args[0]
  6926  		v.reset(OpSelect1)
  6927  		v.Type = typ.UInt64
  6928  		v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6929  		v0.AddArg2(x, y)
  6930  		v.AddArg(v0)
  6931  		return true
  6932  	}
  6933  	// match: (Select0 (DIVVU _ (MOVVconst [1])))
  6934  	// result: (MOVVconst [0])
  6935  	for {
  6936  		if v_0.Op != OpMIPS64DIVVU {
  6937  			break
  6938  		}
  6939  		_ = v_0.Args[1]
  6940  		v_0_1 := v_0.Args[1]
  6941  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  6942  			break
  6943  		}
  6944  		v.reset(OpMIPS64MOVVconst)
  6945  		v.AuxInt = int64ToAuxInt(0)
  6946  		return true
  6947  	}
  6948  	// match: (Select0 (DIVVU x (MOVVconst [c])))
  6949  	// cond: isPowerOfTwo64(c)
  6950  	// result: (ANDconst [c-1] x)
  6951  	for {
  6952  		if v_0.Op != OpMIPS64DIVVU {
  6953  			break
  6954  		}
  6955  		_ = v_0.Args[1]
  6956  		x := v_0.Args[0]
  6957  		v_0_1 := v_0.Args[1]
  6958  		if v_0_1.Op != OpMIPS64MOVVconst {
  6959  			break
  6960  		}
  6961  		c := auxIntToInt64(v_0_1.AuxInt)
  6962  		if !(isPowerOfTwo64(c)) {
  6963  			break
  6964  		}
  6965  		v.reset(OpMIPS64ANDconst)
  6966  		v.AuxInt = int64ToAuxInt(c - 1)
  6967  		v.AddArg(x)
  6968  		return true
  6969  	}
  6970  	// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  6971  	// cond: d != 0
  6972  	// result: (MOVVconst [c%d])
  6973  	for {
  6974  		if v_0.Op != OpMIPS64DIVV {
  6975  			break
  6976  		}
  6977  		_ = v_0.Args[1]
  6978  		v_0_0 := v_0.Args[0]
  6979  		if v_0_0.Op != OpMIPS64MOVVconst {
  6980  			break
  6981  		}
  6982  		c := auxIntToInt64(v_0_0.AuxInt)
  6983  		v_0_1 := v_0.Args[1]
  6984  		if v_0_1.Op != OpMIPS64MOVVconst {
  6985  			break
  6986  		}
  6987  		d := auxIntToInt64(v_0_1.AuxInt)
  6988  		if !(d != 0) {
  6989  			break
  6990  		}
  6991  		v.reset(OpMIPS64MOVVconst)
  6992  		v.AuxInt = int64ToAuxInt(c % d)
  6993  		return true
  6994  	}
  6995  	// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  6996  	// cond: d != 0
  6997  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  6998  	for {
  6999  		if v_0.Op != OpMIPS64DIVVU {
  7000  			break
  7001  		}
  7002  		_ = v_0.Args[1]
  7003  		v_0_0 := v_0.Args[0]
  7004  		if v_0_0.Op != OpMIPS64MOVVconst {
  7005  			break
  7006  		}
  7007  		c := auxIntToInt64(v_0_0.AuxInt)
  7008  		v_0_1 := v_0.Args[1]
  7009  		if v_0_1.Op != OpMIPS64MOVVconst {
  7010  			break
  7011  		}
  7012  		d := auxIntToInt64(v_0_1.AuxInt)
  7013  		if !(d != 0) {
  7014  			break
  7015  		}
  7016  		v.reset(OpMIPS64MOVVconst)
  7017  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  7018  		return true
  7019  	}
  7020  	return false
  7021  }
  7022  func rewriteValueMIPS64_OpSelect1(v *Value) bool {
  7023  	v_0 := v.Args[0]
  7024  	b := v.Block
  7025  	typ := &b.Func.Config.Types
  7026  	// match: (Select1 (Mul64uover x y))
  7027  	// result: (SGTU <typ.Bool> (Select0 <typ.UInt64> (MULVU x y)) (MOVVconst <typ.UInt64> [0]))
  7028  	for {
  7029  		if v_0.Op != OpMul64uover {
  7030  			break
  7031  		}
  7032  		y := v_0.Args[1]
  7033  		x := v_0.Args[0]
  7034  		v.reset(OpMIPS64SGTU)
  7035  		v.Type = typ.Bool
  7036  		v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
  7037  		v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7038  		v1.AddArg2(x, y)
  7039  		v0.AddArg(v1)
  7040  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7041  		v2.AuxInt = int64ToAuxInt(0)
  7042  		v.AddArg2(v0, v2)
  7043  		return true
  7044  	}
  7045  	// match: (Select1 (MULVU x (MOVVconst [-1])))
  7046  	// result: (NEGV x)
  7047  	for {
  7048  		if v_0.Op != OpMIPS64MULVU {
  7049  			break
  7050  		}
  7051  		_ = v_0.Args[1]
  7052  		v_0_0 := v_0.Args[0]
  7053  		v_0_1 := v_0.Args[1]
  7054  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7055  			x := v_0_0
  7056  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
  7057  				continue
  7058  			}
  7059  			v.reset(OpMIPS64NEGV)
  7060  			v.AddArg(x)
  7061  			return true
  7062  		}
  7063  		break
  7064  	}
  7065  	// match: (Select1 (MULVU _ (MOVVconst [0])))
  7066  	// result: (MOVVconst [0])
  7067  	for {
  7068  		if v_0.Op != OpMIPS64MULVU {
  7069  			break
  7070  		}
  7071  		_ = v_0.Args[1]
  7072  		v_0_0 := v_0.Args[0]
  7073  		v_0_1 := v_0.Args[1]
  7074  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7075  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7076  				continue
  7077  			}
  7078  			v.reset(OpMIPS64MOVVconst)
  7079  			v.AuxInt = int64ToAuxInt(0)
  7080  			return true
  7081  		}
  7082  		break
  7083  	}
  7084  	// match: (Select1 (MULVU x (MOVVconst [1])))
  7085  	// result: x
  7086  	for {
  7087  		if v_0.Op != OpMIPS64MULVU {
  7088  			break
  7089  		}
  7090  		_ = v_0.Args[1]
  7091  		v_0_0 := v_0.Args[0]
  7092  		v_0_1 := v_0.Args[1]
  7093  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7094  			x := v_0_0
  7095  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7096  				continue
  7097  			}
  7098  			v.copyOf(x)
  7099  			return true
  7100  		}
  7101  		break
  7102  	}
  7103  	// match: (Select1 (MULVU x (MOVVconst [c])))
  7104  	// cond: isPowerOfTwo64(c)
  7105  	// result: (SLLVconst [log64(c)] x)
  7106  	for {
  7107  		if v_0.Op != OpMIPS64MULVU {
  7108  			break
  7109  		}
  7110  		_ = v_0.Args[1]
  7111  		v_0_0 := v_0.Args[0]
  7112  		v_0_1 := v_0.Args[1]
  7113  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7114  			x := v_0_0
  7115  			if v_0_1.Op != OpMIPS64MOVVconst {
  7116  				continue
  7117  			}
  7118  			c := auxIntToInt64(v_0_1.AuxInt)
  7119  			if !(isPowerOfTwo64(c)) {
  7120  				continue
  7121  			}
  7122  			v.reset(OpMIPS64SLLVconst)
  7123  			v.AuxInt = int64ToAuxInt(log64(c))
  7124  			v.AddArg(x)
  7125  			return true
  7126  		}
  7127  		break
  7128  	}
  7129  	// match: (Select1 (DIVVU x (MOVVconst [1])))
  7130  	// result: x
  7131  	for {
  7132  		if v_0.Op != OpMIPS64DIVVU {
  7133  			break
  7134  		}
  7135  		_ = v_0.Args[1]
  7136  		x := v_0.Args[0]
  7137  		v_0_1 := v_0.Args[1]
  7138  		if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
  7139  			break
  7140  		}
  7141  		v.copyOf(x)
  7142  		return true
  7143  	}
  7144  	// match: (Select1 (DIVVU x (MOVVconst [c])))
  7145  	// cond: isPowerOfTwo64(c)
  7146  	// result: (SRLVconst [log64(c)] x)
  7147  	for {
  7148  		if v_0.Op != OpMIPS64DIVVU {
  7149  			break
  7150  		}
  7151  		_ = v_0.Args[1]
  7152  		x := v_0.Args[0]
  7153  		v_0_1 := v_0.Args[1]
  7154  		if v_0_1.Op != OpMIPS64MOVVconst {
  7155  			break
  7156  		}
  7157  		c := auxIntToInt64(v_0_1.AuxInt)
  7158  		if !(isPowerOfTwo64(c)) {
  7159  			break
  7160  		}
  7161  		v.reset(OpMIPS64SRLVconst)
  7162  		v.AuxInt = int64ToAuxInt(log64(c))
  7163  		v.AddArg(x)
  7164  		return true
  7165  	}
  7166  	// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  7167  	// result: (MOVVconst [c*d])
  7168  	for {
  7169  		if v_0.Op != OpMIPS64MULVU {
  7170  			break
  7171  		}
  7172  		_ = v_0.Args[1]
  7173  		v_0_0 := v_0.Args[0]
  7174  		v_0_1 := v_0.Args[1]
  7175  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  7176  			if v_0_0.Op != OpMIPS64MOVVconst {
  7177  				continue
  7178  			}
  7179  			c := auxIntToInt64(v_0_0.AuxInt)
  7180  			if v_0_1.Op != OpMIPS64MOVVconst {
  7181  				continue
  7182  			}
  7183  			d := auxIntToInt64(v_0_1.AuxInt)
  7184  			v.reset(OpMIPS64MOVVconst)
  7185  			v.AuxInt = int64ToAuxInt(c * d)
  7186  			return true
  7187  		}
  7188  		break
  7189  	}
  7190  	// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  7191  	// cond: d != 0
  7192  	// result: (MOVVconst [c/d])
  7193  	for {
  7194  		if v_0.Op != OpMIPS64DIVV {
  7195  			break
  7196  		}
  7197  		_ = v_0.Args[1]
  7198  		v_0_0 := v_0.Args[0]
  7199  		if v_0_0.Op != OpMIPS64MOVVconst {
  7200  			break
  7201  		}
  7202  		c := auxIntToInt64(v_0_0.AuxInt)
  7203  		v_0_1 := v_0.Args[1]
  7204  		if v_0_1.Op != OpMIPS64MOVVconst {
  7205  			break
  7206  		}
  7207  		d := auxIntToInt64(v_0_1.AuxInt)
  7208  		if !(d != 0) {
  7209  			break
  7210  		}
  7211  		v.reset(OpMIPS64MOVVconst)
  7212  		v.AuxInt = int64ToAuxInt(c / d)
  7213  		return true
  7214  	}
  7215  	// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  7216  	// cond: d != 0
  7217  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  7218  	for {
  7219  		if v_0.Op != OpMIPS64DIVVU {
  7220  			break
  7221  		}
  7222  		_ = v_0.Args[1]
  7223  		v_0_0 := v_0.Args[0]
  7224  		if v_0_0.Op != OpMIPS64MOVVconst {
  7225  			break
  7226  		}
  7227  		c := auxIntToInt64(v_0_0.AuxInt)
  7228  		v_0_1 := v_0.Args[1]
  7229  		if v_0_1.Op != OpMIPS64MOVVconst {
  7230  			break
  7231  		}
  7232  		d := auxIntToInt64(v_0_1.AuxInt)
  7233  		if !(d != 0) {
  7234  			break
  7235  		}
  7236  		v.reset(OpMIPS64MOVVconst)
  7237  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  7238  		return true
  7239  	}
  7240  	return false
  7241  }
  7242  func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
  7243  	v_0 := v.Args[0]
  7244  	b := v.Block
  7245  	// match: (Slicemask <t> x)
  7246  	// result: (SRAVconst (NEGV <t> x) [63])
  7247  	for {
  7248  		t := v.Type
  7249  		x := v_0
  7250  		v.reset(OpMIPS64SRAVconst)
  7251  		v.AuxInt = int64ToAuxInt(63)
  7252  		v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7253  		v0.AddArg(x)
  7254  		v.AddArg(v0)
  7255  		return true
  7256  	}
  7257  }
  7258  func rewriteValueMIPS64_OpStore(v *Value) bool {
  7259  	v_2 := v.Args[2]
  7260  	v_1 := v.Args[1]
  7261  	v_0 := v.Args[0]
  7262  	// match: (Store {t} ptr val mem)
  7263  	// cond: t.Size() == 1
  7264  	// result: (MOVBstore ptr val mem)
  7265  	for {
  7266  		t := auxToType(v.Aux)
  7267  		ptr := v_0
  7268  		val := v_1
  7269  		mem := v_2
  7270  		if !(t.Size() == 1) {
  7271  			break
  7272  		}
  7273  		v.reset(OpMIPS64MOVBstore)
  7274  		v.AddArg3(ptr, val, mem)
  7275  		return true
  7276  	}
  7277  	// match: (Store {t} ptr val mem)
  7278  	// cond: t.Size() == 2
  7279  	// result: (MOVHstore ptr val mem)
  7280  	for {
  7281  		t := auxToType(v.Aux)
  7282  		ptr := v_0
  7283  		val := v_1
  7284  		mem := v_2
  7285  		if !(t.Size() == 2) {
  7286  			break
  7287  		}
  7288  		v.reset(OpMIPS64MOVHstore)
  7289  		v.AddArg3(ptr, val, mem)
  7290  		return true
  7291  	}
  7292  	// match: (Store {t} ptr val mem)
  7293  	// cond: t.Size() == 4 && !is32BitFloat(val.Type)
  7294  	// result: (MOVWstore ptr val mem)
  7295  	for {
  7296  		t := auxToType(v.Aux)
  7297  		ptr := v_0
  7298  		val := v_1
  7299  		mem := v_2
  7300  		if !(t.Size() == 4 && !is32BitFloat(val.Type)) {
  7301  			break
  7302  		}
  7303  		v.reset(OpMIPS64MOVWstore)
  7304  		v.AddArg3(ptr, val, mem)
  7305  		return true
  7306  	}
  7307  	// match: (Store {t} ptr val mem)
  7308  	// cond: t.Size() == 8 && !is64BitFloat(val.Type)
  7309  	// result: (MOVVstore ptr val mem)
  7310  	for {
  7311  		t := auxToType(v.Aux)
  7312  		ptr := v_0
  7313  		val := v_1
  7314  		mem := v_2
  7315  		if !(t.Size() == 8 && !is64BitFloat(val.Type)) {
  7316  			break
  7317  		}
  7318  		v.reset(OpMIPS64MOVVstore)
  7319  		v.AddArg3(ptr, val, mem)
  7320  		return true
  7321  	}
  7322  	// match: (Store {t} ptr val mem)
  7323  	// cond: t.Size() == 4 && is32BitFloat(val.Type)
  7324  	// result: (MOVFstore ptr val mem)
  7325  	for {
  7326  		t := auxToType(v.Aux)
  7327  		ptr := v_0
  7328  		val := v_1
  7329  		mem := v_2
  7330  		if !(t.Size() == 4 && is32BitFloat(val.Type)) {
  7331  			break
  7332  		}
  7333  		v.reset(OpMIPS64MOVFstore)
  7334  		v.AddArg3(ptr, val, mem)
  7335  		return true
  7336  	}
  7337  	// match: (Store {t} ptr val mem)
  7338  	// cond: t.Size() == 8 && is64BitFloat(val.Type)
  7339  	// result: (MOVDstore ptr val mem)
  7340  	for {
  7341  		t := auxToType(v.Aux)
  7342  		ptr := v_0
  7343  		val := v_1
  7344  		mem := v_2
  7345  		if !(t.Size() == 8 && is64BitFloat(val.Type)) {
  7346  			break
  7347  		}
  7348  		v.reset(OpMIPS64MOVDstore)
  7349  		v.AddArg3(ptr, val, mem)
  7350  		return true
  7351  	}
  7352  	return false
  7353  }
  7354  func rewriteValueMIPS64_OpZero(v *Value) bool {
  7355  	v_1 := v.Args[1]
  7356  	v_0 := v.Args[0]
  7357  	b := v.Block
  7358  	config := b.Func.Config
  7359  	typ := &b.Func.Config.Types
  7360  	// match: (Zero [0] _ mem)
  7361  	// result: mem
  7362  	for {
  7363  		if auxIntToInt64(v.AuxInt) != 0 {
  7364  			break
  7365  		}
  7366  		mem := v_1
  7367  		v.copyOf(mem)
  7368  		return true
  7369  	}
  7370  	// match: (Zero [1] ptr mem)
  7371  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
  7372  	for {
  7373  		if auxIntToInt64(v.AuxInt) != 1 {
  7374  			break
  7375  		}
  7376  		ptr := v_0
  7377  		mem := v_1
  7378  		v.reset(OpMIPS64MOVBstore)
  7379  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7380  		v0.AuxInt = int64ToAuxInt(0)
  7381  		v.AddArg3(ptr, v0, mem)
  7382  		return true
  7383  	}
  7384  	// match: (Zero [2] {t} ptr mem)
  7385  	// cond: t.Alignment()%2 == 0
  7386  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
  7387  	for {
  7388  		if auxIntToInt64(v.AuxInt) != 2 {
  7389  			break
  7390  		}
  7391  		t := auxToType(v.Aux)
  7392  		ptr := v_0
  7393  		mem := v_1
  7394  		if !(t.Alignment()%2 == 0) {
  7395  			break
  7396  		}
  7397  		v.reset(OpMIPS64MOVHstore)
  7398  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7399  		v0.AuxInt = int64ToAuxInt(0)
  7400  		v.AddArg3(ptr, v0, mem)
  7401  		return true
  7402  	}
  7403  	// match: (Zero [2] ptr mem)
  7404  	// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
  7405  	for {
  7406  		if auxIntToInt64(v.AuxInt) != 2 {
  7407  			break
  7408  		}
  7409  		ptr := v_0
  7410  		mem := v_1
  7411  		v.reset(OpMIPS64MOVBstore)
  7412  		v.AuxInt = int32ToAuxInt(1)
  7413  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7414  		v0.AuxInt = int64ToAuxInt(0)
  7415  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7416  		v1.AuxInt = int32ToAuxInt(0)
  7417  		v1.AddArg3(ptr, v0, mem)
  7418  		v.AddArg3(ptr, v0, v1)
  7419  		return true
  7420  	}
  7421  	// match: (Zero [4] {t} ptr mem)
  7422  	// cond: t.Alignment()%4 == 0
  7423  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
  7424  	for {
  7425  		if auxIntToInt64(v.AuxInt) != 4 {
  7426  			break
  7427  		}
  7428  		t := auxToType(v.Aux)
  7429  		ptr := v_0
  7430  		mem := v_1
  7431  		if !(t.Alignment()%4 == 0) {
  7432  			break
  7433  		}
  7434  		v.reset(OpMIPS64MOVWstore)
  7435  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7436  		v0.AuxInt = int64ToAuxInt(0)
  7437  		v.AddArg3(ptr, v0, mem)
  7438  		return true
  7439  	}
  7440  	// match: (Zero [4] {t} ptr mem)
  7441  	// cond: t.Alignment()%2 == 0
  7442  	// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
  7443  	for {
  7444  		if auxIntToInt64(v.AuxInt) != 4 {
  7445  			break
  7446  		}
  7447  		t := auxToType(v.Aux)
  7448  		ptr := v_0
  7449  		mem := v_1
  7450  		if !(t.Alignment()%2 == 0) {
  7451  			break
  7452  		}
  7453  		v.reset(OpMIPS64MOVHstore)
  7454  		v.AuxInt = int32ToAuxInt(2)
  7455  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7456  		v0.AuxInt = int64ToAuxInt(0)
  7457  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7458  		v1.AuxInt = int32ToAuxInt(0)
  7459  		v1.AddArg3(ptr, v0, mem)
  7460  		v.AddArg3(ptr, v0, v1)
  7461  		return true
  7462  	}
  7463  	// match: (Zero [4] ptr mem)
  7464  	// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
  7465  	for {
  7466  		if auxIntToInt64(v.AuxInt) != 4 {
  7467  			break
  7468  		}
  7469  		ptr := v_0
  7470  		mem := v_1
  7471  		v.reset(OpMIPS64MOVBstore)
  7472  		v.AuxInt = int32ToAuxInt(3)
  7473  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7474  		v0.AuxInt = int64ToAuxInt(0)
  7475  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7476  		v1.AuxInt = int32ToAuxInt(2)
  7477  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7478  		v2.AuxInt = int32ToAuxInt(1)
  7479  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7480  		v3.AuxInt = int32ToAuxInt(0)
  7481  		v3.AddArg3(ptr, v0, mem)
  7482  		v2.AddArg3(ptr, v0, v3)
  7483  		v1.AddArg3(ptr, v0, v2)
  7484  		v.AddArg3(ptr, v0, v1)
  7485  		return true
  7486  	}
  7487  	// match: (Zero [8] {t} ptr mem)
  7488  	// cond: t.Alignment()%8 == 0
  7489  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
  7490  	for {
  7491  		if auxIntToInt64(v.AuxInt) != 8 {
  7492  			break
  7493  		}
  7494  		t := auxToType(v.Aux)
  7495  		ptr := v_0
  7496  		mem := v_1
  7497  		if !(t.Alignment()%8 == 0) {
  7498  			break
  7499  		}
  7500  		v.reset(OpMIPS64MOVVstore)
  7501  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7502  		v0.AuxInt = int64ToAuxInt(0)
  7503  		v.AddArg3(ptr, v0, mem)
  7504  		return true
  7505  	}
  7506  	// match: (Zero [8] {t} ptr mem)
  7507  	// cond: t.Alignment()%4 == 0
  7508  	// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
  7509  	for {
  7510  		if auxIntToInt64(v.AuxInt) != 8 {
  7511  			break
  7512  		}
  7513  		t := auxToType(v.Aux)
  7514  		ptr := v_0
  7515  		mem := v_1
  7516  		if !(t.Alignment()%4 == 0) {
  7517  			break
  7518  		}
  7519  		v.reset(OpMIPS64MOVWstore)
  7520  		v.AuxInt = int32ToAuxInt(4)
  7521  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7522  		v0.AuxInt = int64ToAuxInt(0)
  7523  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7524  		v1.AuxInt = int32ToAuxInt(0)
  7525  		v1.AddArg3(ptr, v0, mem)
  7526  		v.AddArg3(ptr, v0, v1)
  7527  		return true
  7528  	}
  7529  	// match: (Zero [8] {t} ptr mem)
  7530  	// cond: t.Alignment()%2 == 0
  7531  	// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
  7532  	for {
  7533  		if auxIntToInt64(v.AuxInt) != 8 {
  7534  			break
  7535  		}
  7536  		t := auxToType(v.Aux)
  7537  		ptr := v_0
  7538  		mem := v_1
  7539  		if !(t.Alignment()%2 == 0) {
  7540  			break
  7541  		}
  7542  		v.reset(OpMIPS64MOVHstore)
  7543  		v.AuxInt = int32ToAuxInt(6)
  7544  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7545  		v0.AuxInt = int64ToAuxInt(0)
  7546  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7547  		v1.AuxInt = int32ToAuxInt(4)
  7548  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7549  		v2.AuxInt = int32ToAuxInt(2)
  7550  		v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7551  		v3.AuxInt = int32ToAuxInt(0)
  7552  		v3.AddArg3(ptr, v0, mem)
  7553  		v2.AddArg3(ptr, v0, v3)
  7554  		v1.AddArg3(ptr, v0, v2)
  7555  		v.AddArg3(ptr, v0, v1)
  7556  		return true
  7557  	}
  7558  	// match: (Zero [3] ptr mem)
  7559  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
  7560  	for {
  7561  		if auxIntToInt64(v.AuxInt) != 3 {
  7562  			break
  7563  		}
  7564  		ptr := v_0
  7565  		mem := v_1
  7566  		v.reset(OpMIPS64MOVBstore)
  7567  		v.AuxInt = int32ToAuxInt(2)
  7568  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7569  		v0.AuxInt = int64ToAuxInt(0)
  7570  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7571  		v1.AuxInt = int32ToAuxInt(1)
  7572  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  7573  		v2.AuxInt = int32ToAuxInt(0)
  7574  		v2.AddArg3(ptr, v0, mem)
  7575  		v1.AddArg3(ptr, v0, v2)
  7576  		v.AddArg3(ptr, v0, v1)
  7577  		return true
  7578  	}
  7579  	// match: (Zero [6] {t} ptr mem)
  7580  	// cond: t.Alignment()%2 == 0
  7581  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
  7582  	for {
  7583  		if auxIntToInt64(v.AuxInt) != 6 {
  7584  			break
  7585  		}
  7586  		t := auxToType(v.Aux)
  7587  		ptr := v_0
  7588  		mem := v_1
  7589  		if !(t.Alignment()%2 == 0) {
  7590  			break
  7591  		}
  7592  		v.reset(OpMIPS64MOVHstore)
  7593  		v.AuxInt = int32ToAuxInt(4)
  7594  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7595  		v0.AuxInt = int64ToAuxInt(0)
  7596  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7597  		v1.AuxInt = int32ToAuxInt(2)
  7598  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  7599  		v2.AuxInt = int32ToAuxInt(0)
  7600  		v2.AddArg3(ptr, v0, mem)
  7601  		v1.AddArg3(ptr, v0, v2)
  7602  		v.AddArg3(ptr, v0, v1)
  7603  		return true
  7604  	}
  7605  	// match: (Zero [12] {t} ptr mem)
  7606  	// cond: t.Alignment()%4 == 0
  7607  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
  7608  	for {
  7609  		if auxIntToInt64(v.AuxInt) != 12 {
  7610  			break
  7611  		}
  7612  		t := auxToType(v.Aux)
  7613  		ptr := v_0
  7614  		mem := v_1
  7615  		if !(t.Alignment()%4 == 0) {
  7616  			break
  7617  		}
  7618  		v.reset(OpMIPS64MOVWstore)
  7619  		v.AuxInt = int32ToAuxInt(8)
  7620  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7621  		v0.AuxInt = int64ToAuxInt(0)
  7622  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7623  		v1.AuxInt = int32ToAuxInt(4)
  7624  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  7625  		v2.AuxInt = int32ToAuxInt(0)
  7626  		v2.AddArg3(ptr, v0, mem)
  7627  		v1.AddArg3(ptr, v0, v2)
  7628  		v.AddArg3(ptr, v0, v1)
  7629  		return true
  7630  	}
  7631  	// match: (Zero [16] {t} ptr mem)
  7632  	// cond: t.Alignment()%8 == 0
  7633  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
  7634  	for {
  7635  		if auxIntToInt64(v.AuxInt) != 16 {
  7636  			break
  7637  		}
  7638  		t := auxToType(v.Aux)
  7639  		ptr := v_0
  7640  		mem := v_1
  7641  		if !(t.Alignment()%8 == 0) {
  7642  			break
  7643  		}
  7644  		v.reset(OpMIPS64MOVVstore)
  7645  		v.AuxInt = int32ToAuxInt(8)
  7646  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7647  		v0.AuxInt = int64ToAuxInt(0)
  7648  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7649  		v1.AuxInt = int32ToAuxInt(0)
  7650  		v1.AddArg3(ptr, v0, mem)
  7651  		v.AddArg3(ptr, v0, v1)
  7652  		return true
  7653  	}
  7654  	// match: (Zero [24] {t} ptr mem)
  7655  	// cond: t.Alignment()%8 == 0
  7656  	// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
  7657  	for {
  7658  		if auxIntToInt64(v.AuxInt) != 24 {
  7659  			break
  7660  		}
  7661  		t := auxToType(v.Aux)
  7662  		ptr := v_0
  7663  		mem := v_1
  7664  		if !(t.Alignment()%8 == 0) {
  7665  			break
  7666  		}
  7667  		v.reset(OpMIPS64MOVVstore)
  7668  		v.AuxInt = int32ToAuxInt(16)
  7669  		v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7670  		v0.AuxInt = int64ToAuxInt(0)
  7671  		v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7672  		v1.AuxInt = int32ToAuxInt(8)
  7673  		v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  7674  		v2.AuxInt = int32ToAuxInt(0)
  7675  		v2.AddArg3(ptr, v0, mem)
  7676  		v1.AddArg3(ptr, v0, v2)
  7677  		v.AddArg3(ptr, v0, v1)
  7678  		return true
  7679  	}
  7680  	// match: (Zero [s] {t} ptr mem)
  7681  	// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
  7682  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  7683  	for {
  7684  		s := auxIntToInt64(v.AuxInt)
  7685  		t := auxToType(v.Aux)
  7686  		ptr := v_0
  7687  		mem := v_1
  7688  		if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
  7689  			break
  7690  		}
  7691  		v.reset(OpMIPS64DUFFZERO)
  7692  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  7693  		v.AddArg2(ptr, mem)
  7694  		return true
  7695  	}
  7696  	// match: (Zero [s] {t} ptr mem)
  7697  	// cond: (s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0
  7698  	// result: (LoweredZero [t.Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.Alignment(), config)]) mem)
  7699  	for {
  7700  		s := auxIntToInt64(v.AuxInt)
  7701  		t := auxToType(v.Aux)
  7702  		ptr := v_0
  7703  		mem := v_1
  7704  		if !((s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0) {
  7705  			break
  7706  		}
  7707  		v.reset(OpMIPS64LoweredZero)
  7708  		v.AuxInt = int64ToAuxInt(t.Alignment())
  7709  		v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
  7710  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  7711  		v0.AddArg(ptr)
  7712  		v.AddArg3(ptr, v0, mem)
  7713  		return true
  7714  	}
  7715  	return false
  7716  }
  7717  func rewriteBlockMIPS64(b *Block) bool {
  7718  	switch b.Kind {
  7719  	case BlockMIPS64EQ:
  7720  		// match: (EQ (FPFlagTrue cmp) yes no)
  7721  		// result: (FPF cmp yes no)
  7722  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  7723  			v_0 := b.Controls[0]
  7724  			cmp := v_0.Args[0]
  7725  			b.resetWithControl(BlockMIPS64FPF, cmp)
  7726  			return true
  7727  		}
  7728  		// match: (EQ (FPFlagFalse cmp) yes no)
  7729  		// result: (FPT cmp yes no)
  7730  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  7731  			v_0 := b.Controls[0]
  7732  			cmp := v_0.Args[0]
  7733  			b.resetWithControl(BlockMIPS64FPT, cmp)
  7734  			return true
  7735  		}
  7736  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  7737  		// result: (NE cmp yes no)
  7738  		for b.Controls[0].Op == OpMIPS64XORconst {
  7739  			v_0 := b.Controls[0]
  7740  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7741  				break
  7742  			}
  7743  			cmp := v_0.Args[0]
  7744  			if cmp.Op != OpMIPS64SGT {
  7745  				break
  7746  			}
  7747  			b.resetWithControl(BlockMIPS64NE, cmp)
  7748  			return true
  7749  		}
  7750  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  7751  		// result: (NE cmp yes no)
  7752  		for b.Controls[0].Op == OpMIPS64XORconst {
  7753  			v_0 := b.Controls[0]
  7754  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7755  				break
  7756  			}
  7757  			cmp := v_0.Args[0]
  7758  			if cmp.Op != OpMIPS64SGTU {
  7759  				break
  7760  			}
  7761  			b.resetWithControl(BlockMIPS64NE, cmp)
  7762  			return true
  7763  		}
  7764  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  7765  		// result: (NE cmp yes no)
  7766  		for b.Controls[0].Op == OpMIPS64XORconst {
  7767  			v_0 := b.Controls[0]
  7768  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7769  				break
  7770  			}
  7771  			cmp := v_0.Args[0]
  7772  			if cmp.Op != OpMIPS64SGTconst {
  7773  				break
  7774  			}
  7775  			b.resetWithControl(BlockMIPS64NE, cmp)
  7776  			return true
  7777  		}
  7778  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  7779  		// result: (NE cmp yes no)
  7780  		for b.Controls[0].Op == OpMIPS64XORconst {
  7781  			v_0 := b.Controls[0]
  7782  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7783  				break
  7784  			}
  7785  			cmp := v_0.Args[0]
  7786  			if cmp.Op != OpMIPS64SGTUconst {
  7787  				break
  7788  			}
  7789  			b.resetWithControl(BlockMIPS64NE, cmp)
  7790  			return true
  7791  		}
  7792  		// match: (EQ (SGTUconst [1] x) yes no)
  7793  		// result: (NE x yes no)
  7794  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  7795  			v_0 := b.Controls[0]
  7796  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7797  				break
  7798  			}
  7799  			x := v_0.Args[0]
  7800  			b.resetWithControl(BlockMIPS64NE, x)
  7801  			return true
  7802  		}
  7803  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
  7804  		// result: (EQ x yes no)
  7805  		for b.Controls[0].Op == OpMIPS64SGTU {
  7806  			v_0 := b.Controls[0]
  7807  			_ = v_0.Args[1]
  7808  			x := v_0.Args[0]
  7809  			v_0_1 := v_0.Args[1]
  7810  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7811  				break
  7812  			}
  7813  			b.resetWithControl(BlockMIPS64EQ, x)
  7814  			return true
  7815  		}
  7816  		// match: (EQ (SGTconst [0] x) yes no)
  7817  		// result: (GEZ x yes no)
  7818  		for b.Controls[0].Op == OpMIPS64SGTconst {
  7819  			v_0 := b.Controls[0]
  7820  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7821  				break
  7822  			}
  7823  			x := v_0.Args[0]
  7824  			b.resetWithControl(BlockMIPS64GEZ, x)
  7825  			return true
  7826  		}
  7827  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
  7828  		// result: (LEZ x yes no)
  7829  		for b.Controls[0].Op == OpMIPS64SGT {
  7830  			v_0 := b.Controls[0]
  7831  			_ = v_0.Args[1]
  7832  			x := v_0.Args[0]
  7833  			v_0_1 := v_0.Args[1]
  7834  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  7835  				break
  7836  			}
  7837  			b.resetWithControl(BlockMIPS64LEZ, x)
  7838  			return true
  7839  		}
  7840  		// match: (EQ (MOVVconst [0]) yes no)
  7841  		// result: (First yes no)
  7842  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7843  			v_0 := b.Controls[0]
  7844  			if auxIntToInt64(v_0.AuxInt) != 0 {
  7845  				break
  7846  			}
  7847  			b.Reset(BlockFirst)
  7848  			return true
  7849  		}
  7850  		// match: (EQ (MOVVconst [c]) yes no)
  7851  		// cond: c != 0
  7852  		// result: (First no yes)
  7853  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7854  			v_0 := b.Controls[0]
  7855  			c := auxIntToInt64(v_0.AuxInt)
  7856  			if !(c != 0) {
  7857  				break
  7858  			}
  7859  			b.Reset(BlockFirst)
  7860  			b.swapSuccessors()
  7861  			return true
  7862  		}
  7863  	case BlockMIPS64GEZ:
  7864  		// match: (GEZ (MOVVconst [c]) yes no)
  7865  		// cond: c >= 0
  7866  		// result: (First yes no)
  7867  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7868  			v_0 := b.Controls[0]
  7869  			c := auxIntToInt64(v_0.AuxInt)
  7870  			if !(c >= 0) {
  7871  				break
  7872  			}
  7873  			b.Reset(BlockFirst)
  7874  			return true
  7875  		}
  7876  		// match: (GEZ (MOVVconst [c]) yes no)
  7877  		// cond: c < 0
  7878  		// result: (First no yes)
  7879  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7880  			v_0 := b.Controls[0]
  7881  			c := auxIntToInt64(v_0.AuxInt)
  7882  			if !(c < 0) {
  7883  				break
  7884  			}
  7885  			b.Reset(BlockFirst)
  7886  			b.swapSuccessors()
  7887  			return true
  7888  		}
  7889  	case BlockMIPS64GTZ:
  7890  		// match: (GTZ (MOVVconst [c]) yes no)
  7891  		// cond: c > 0
  7892  		// result: (First yes no)
  7893  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7894  			v_0 := b.Controls[0]
  7895  			c := auxIntToInt64(v_0.AuxInt)
  7896  			if !(c > 0) {
  7897  				break
  7898  			}
  7899  			b.Reset(BlockFirst)
  7900  			return true
  7901  		}
  7902  		// match: (GTZ (MOVVconst [c]) yes no)
  7903  		// cond: c <= 0
  7904  		// result: (First no yes)
  7905  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7906  			v_0 := b.Controls[0]
  7907  			c := auxIntToInt64(v_0.AuxInt)
  7908  			if !(c <= 0) {
  7909  				break
  7910  			}
  7911  			b.Reset(BlockFirst)
  7912  			b.swapSuccessors()
  7913  			return true
  7914  		}
  7915  	case BlockIf:
  7916  		// match: (If cond yes no)
  7917  		// result: (NE cond yes no)
  7918  		for {
  7919  			cond := b.Controls[0]
  7920  			b.resetWithControl(BlockMIPS64NE, cond)
  7921  			return true
  7922  		}
  7923  	case BlockMIPS64LEZ:
  7924  		// match: (LEZ (MOVVconst [c]) yes no)
  7925  		// cond: c <= 0
  7926  		// result: (First yes no)
  7927  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7928  			v_0 := b.Controls[0]
  7929  			c := auxIntToInt64(v_0.AuxInt)
  7930  			if !(c <= 0) {
  7931  				break
  7932  			}
  7933  			b.Reset(BlockFirst)
  7934  			return true
  7935  		}
  7936  		// match: (LEZ (MOVVconst [c]) yes no)
  7937  		// cond: c > 0
  7938  		// result: (First no yes)
  7939  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7940  			v_0 := b.Controls[0]
  7941  			c := auxIntToInt64(v_0.AuxInt)
  7942  			if !(c > 0) {
  7943  				break
  7944  			}
  7945  			b.Reset(BlockFirst)
  7946  			b.swapSuccessors()
  7947  			return true
  7948  		}
  7949  	case BlockMIPS64LTZ:
  7950  		// match: (LTZ (MOVVconst [c]) yes no)
  7951  		// cond: c < 0
  7952  		// result: (First yes no)
  7953  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7954  			v_0 := b.Controls[0]
  7955  			c := auxIntToInt64(v_0.AuxInt)
  7956  			if !(c < 0) {
  7957  				break
  7958  			}
  7959  			b.Reset(BlockFirst)
  7960  			return true
  7961  		}
  7962  		// match: (LTZ (MOVVconst [c]) yes no)
  7963  		// cond: c >= 0
  7964  		// result: (First no yes)
  7965  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  7966  			v_0 := b.Controls[0]
  7967  			c := auxIntToInt64(v_0.AuxInt)
  7968  			if !(c >= 0) {
  7969  				break
  7970  			}
  7971  			b.Reset(BlockFirst)
  7972  			b.swapSuccessors()
  7973  			return true
  7974  		}
  7975  	case BlockMIPS64NE:
  7976  		// match: (NE (FPFlagTrue cmp) yes no)
  7977  		// result: (FPT cmp yes no)
  7978  		for b.Controls[0].Op == OpMIPS64FPFlagTrue {
  7979  			v_0 := b.Controls[0]
  7980  			cmp := v_0.Args[0]
  7981  			b.resetWithControl(BlockMIPS64FPT, cmp)
  7982  			return true
  7983  		}
  7984  		// match: (NE (FPFlagFalse cmp) yes no)
  7985  		// result: (FPF cmp yes no)
  7986  		for b.Controls[0].Op == OpMIPS64FPFlagFalse {
  7987  			v_0 := b.Controls[0]
  7988  			cmp := v_0.Args[0]
  7989  			b.resetWithControl(BlockMIPS64FPF, cmp)
  7990  			return true
  7991  		}
  7992  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  7993  		// result: (EQ cmp yes no)
  7994  		for b.Controls[0].Op == OpMIPS64XORconst {
  7995  			v_0 := b.Controls[0]
  7996  			if auxIntToInt64(v_0.AuxInt) != 1 {
  7997  				break
  7998  			}
  7999  			cmp := v_0.Args[0]
  8000  			if cmp.Op != OpMIPS64SGT {
  8001  				break
  8002  			}
  8003  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8004  			return true
  8005  		}
  8006  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  8007  		// result: (EQ cmp yes no)
  8008  		for b.Controls[0].Op == OpMIPS64XORconst {
  8009  			v_0 := b.Controls[0]
  8010  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8011  				break
  8012  			}
  8013  			cmp := v_0.Args[0]
  8014  			if cmp.Op != OpMIPS64SGTU {
  8015  				break
  8016  			}
  8017  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8018  			return true
  8019  		}
  8020  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  8021  		// result: (EQ cmp yes no)
  8022  		for b.Controls[0].Op == OpMIPS64XORconst {
  8023  			v_0 := b.Controls[0]
  8024  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8025  				break
  8026  			}
  8027  			cmp := v_0.Args[0]
  8028  			if cmp.Op != OpMIPS64SGTconst {
  8029  				break
  8030  			}
  8031  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8032  			return true
  8033  		}
  8034  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  8035  		// result: (EQ cmp yes no)
  8036  		for b.Controls[0].Op == OpMIPS64XORconst {
  8037  			v_0 := b.Controls[0]
  8038  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8039  				break
  8040  			}
  8041  			cmp := v_0.Args[0]
  8042  			if cmp.Op != OpMIPS64SGTUconst {
  8043  				break
  8044  			}
  8045  			b.resetWithControl(BlockMIPS64EQ, cmp)
  8046  			return true
  8047  		}
  8048  		// match: (NE (SGTUconst [1] x) yes no)
  8049  		// result: (EQ x yes no)
  8050  		for b.Controls[0].Op == OpMIPS64SGTUconst {
  8051  			v_0 := b.Controls[0]
  8052  			if auxIntToInt64(v_0.AuxInt) != 1 {
  8053  				break
  8054  			}
  8055  			x := v_0.Args[0]
  8056  			b.resetWithControl(BlockMIPS64EQ, x)
  8057  			return true
  8058  		}
  8059  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
  8060  		// result: (NE x yes no)
  8061  		for b.Controls[0].Op == OpMIPS64SGTU {
  8062  			v_0 := b.Controls[0]
  8063  			_ = v_0.Args[1]
  8064  			x := v_0.Args[0]
  8065  			v_0_1 := v_0.Args[1]
  8066  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8067  				break
  8068  			}
  8069  			b.resetWithControl(BlockMIPS64NE, x)
  8070  			return true
  8071  		}
  8072  		// match: (NE (SGTconst [0] x) yes no)
  8073  		// result: (LTZ x yes no)
  8074  		for b.Controls[0].Op == OpMIPS64SGTconst {
  8075  			v_0 := b.Controls[0]
  8076  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8077  				break
  8078  			}
  8079  			x := v_0.Args[0]
  8080  			b.resetWithControl(BlockMIPS64LTZ, x)
  8081  			return true
  8082  		}
  8083  		// match: (NE (SGT x (MOVVconst [0])) yes no)
  8084  		// result: (GTZ x yes no)
  8085  		for b.Controls[0].Op == OpMIPS64SGT {
  8086  			v_0 := b.Controls[0]
  8087  			_ = v_0.Args[1]
  8088  			x := v_0.Args[0]
  8089  			v_0_1 := v_0.Args[1]
  8090  			if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
  8091  				break
  8092  			}
  8093  			b.resetWithControl(BlockMIPS64GTZ, x)
  8094  			return true
  8095  		}
  8096  		// match: (NE (MOVVconst [0]) yes no)
  8097  		// result: (First no yes)
  8098  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8099  			v_0 := b.Controls[0]
  8100  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8101  				break
  8102  			}
  8103  			b.Reset(BlockFirst)
  8104  			b.swapSuccessors()
  8105  			return true
  8106  		}
  8107  		// match: (NE (MOVVconst [c]) yes no)
  8108  		// cond: c != 0
  8109  		// result: (First yes no)
  8110  		for b.Controls[0].Op == OpMIPS64MOVVconst {
  8111  			v_0 := b.Controls[0]
  8112  			c := auxIntToInt64(v_0.AuxInt)
  8113  			if !(c != 0) {
  8114  				break
  8115  			}
  8116  			b.Reset(BlockFirst)
  8117  			return true
  8118  		}
  8119  	}
  8120  	return false
  8121  }
  8122  

View as plain text