1
2
3
4 package ssa
5
6 import "cmd/compile/internal/types"
7
8 func rewriteValueARM64(v *Value) bool {
9 switch v.Op {
10 case OpARM64ADCSflags:
11 return rewriteValueARM64_OpARM64ADCSflags(v)
12 case OpARM64ADD:
13 return rewriteValueARM64_OpARM64ADD(v)
14 case OpARM64ADDconst:
15 return rewriteValueARM64_OpARM64ADDconst(v)
16 case OpARM64ADDshiftLL:
17 return rewriteValueARM64_OpARM64ADDshiftLL(v)
18 case OpARM64ADDshiftRA:
19 return rewriteValueARM64_OpARM64ADDshiftRA(v)
20 case OpARM64ADDshiftRL:
21 return rewriteValueARM64_OpARM64ADDshiftRL(v)
22 case OpARM64AND:
23 return rewriteValueARM64_OpARM64AND(v)
24 case OpARM64ANDconst:
25 return rewriteValueARM64_OpARM64ANDconst(v)
26 case OpARM64ANDshiftLL:
27 return rewriteValueARM64_OpARM64ANDshiftLL(v)
28 case OpARM64ANDshiftRA:
29 return rewriteValueARM64_OpARM64ANDshiftRA(v)
30 case OpARM64ANDshiftRL:
31 return rewriteValueARM64_OpARM64ANDshiftRL(v)
32 case OpARM64ANDshiftRO:
33 return rewriteValueARM64_OpARM64ANDshiftRO(v)
34 case OpARM64BIC:
35 return rewriteValueARM64_OpARM64BIC(v)
36 case OpARM64BICshiftLL:
37 return rewriteValueARM64_OpARM64BICshiftLL(v)
38 case OpARM64BICshiftRA:
39 return rewriteValueARM64_OpARM64BICshiftRA(v)
40 case OpARM64BICshiftRL:
41 return rewriteValueARM64_OpARM64BICshiftRL(v)
42 case OpARM64BICshiftRO:
43 return rewriteValueARM64_OpARM64BICshiftRO(v)
44 case OpARM64CMN:
45 return rewriteValueARM64_OpARM64CMN(v)
46 case OpARM64CMNW:
47 return rewriteValueARM64_OpARM64CMNW(v)
48 case OpARM64CMNWconst:
49 return rewriteValueARM64_OpARM64CMNWconst(v)
50 case OpARM64CMNconst:
51 return rewriteValueARM64_OpARM64CMNconst(v)
52 case OpARM64CMNshiftLL:
53 return rewriteValueARM64_OpARM64CMNshiftLL(v)
54 case OpARM64CMNshiftRA:
55 return rewriteValueARM64_OpARM64CMNshiftRA(v)
56 case OpARM64CMNshiftRL:
57 return rewriteValueARM64_OpARM64CMNshiftRL(v)
58 case OpARM64CMP:
59 return rewriteValueARM64_OpARM64CMP(v)
60 case OpARM64CMPW:
61 return rewriteValueARM64_OpARM64CMPW(v)
62 case OpARM64CMPWconst:
63 return rewriteValueARM64_OpARM64CMPWconst(v)
64 case OpARM64CMPconst:
65 return rewriteValueARM64_OpARM64CMPconst(v)
66 case OpARM64CMPshiftLL:
67 return rewriteValueARM64_OpARM64CMPshiftLL(v)
68 case OpARM64CMPshiftRA:
69 return rewriteValueARM64_OpARM64CMPshiftRA(v)
70 case OpARM64CMPshiftRL:
71 return rewriteValueARM64_OpARM64CMPshiftRL(v)
72 case OpARM64CSEL:
73 return rewriteValueARM64_OpARM64CSEL(v)
74 case OpARM64CSEL0:
75 return rewriteValueARM64_OpARM64CSEL0(v)
76 case OpARM64CSETM:
77 return rewriteValueARM64_OpARM64CSETM(v)
78 case OpARM64CSINC:
79 return rewriteValueARM64_OpARM64CSINC(v)
80 case OpARM64CSINV:
81 return rewriteValueARM64_OpARM64CSINV(v)
82 case OpARM64CSNEG:
83 return rewriteValueARM64_OpARM64CSNEG(v)
84 case OpARM64DIV:
85 return rewriteValueARM64_OpARM64DIV(v)
86 case OpARM64DIVW:
87 return rewriteValueARM64_OpARM64DIVW(v)
88 case OpARM64EON:
89 return rewriteValueARM64_OpARM64EON(v)
90 case OpARM64EONshiftLL:
91 return rewriteValueARM64_OpARM64EONshiftLL(v)
92 case OpARM64EONshiftRA:
93 return rewriteValueARM64_OpARM64EONshiftRA(v)
94 case OpARM64EONshiftRL:
95 return rewriteValueARM64_OpARM64EONshiftRL(v)
96 case OpARM64EONshiftRO:
97 return rewriteValueARM64_OpARM64EONshiftRO(v)
98 case OpARM64Equal:
99 return rewriteValueARM64_OpARM64Equal(v)
100 case OpARM64FADDD:
101 return rewriteValueARM64_OpARM64FADDD(v)
102 case OpARM64FADDS:
103 return rewriteValueARM64_OpARM64FADDS(v)
104 case OpARM64FCMPD:
105 return rewriteValueARM64_OpARM64FCMPD(v)
106 case OpARM64FCMPS:
107 return rewriteValueARM64_OpARM64FCMPS(v)
108 case OpARM64FMOVDfpgp:
109 return rewriteValueARM64_OpARM64FMOVDfpgp(v)
110 case OpARM64FMOVDgpfp:
111 return rewriteValueARM64_OpARM64FMOVDgpfp(v)
112 case OpARM64FMOVDload:
113 return rewriteValueARM64_OpARM64FMOVDload(v)
114 case OpARM64FMOVDloadidx:
115 return rewriteValueARM64_OpARM64FMOVDloadidx(v)
116 case OpARM64FMOVDloadidx8:
117 return rewriteValueARM64_OpARM64FMOVDloadidx8(v)
118 case OpARM64FMOVDstore:
119 return rewriteValueARM64_OpARM64FMOVDstore(v)
120 case OpARM64FMOVDstoreidx:
121 return rewriteValueARM64_OpARM64FMOVDstoreidx(v)
122 case OpARM64FMOVDstoreidx8:
123 return rewriteValueARM64_OpARM64FMOVDstoreidx8(v)
124 case OpARM64FMOVSload:
125 return rewriteValueARM64_OpARM64FMOVSload(v)
126 case OpARM64FMOVSloadidx:
127 return rewriteValueARM64_OpARM64FMOVSloadidx(v)
128 case OpARM64FMOVSloadidx4:
129 return rewriteValueARM64_OpARM64FMOVSloadidx4(v)
130 case OpARM64FMOVSstore:
131 return rewriteValueARM64_OpARM64FMOVSstore(v)
132 case OpARM64FMOVSstoreidx:
133 return rewriteValueARM64_OpARM64FMOVSstoreidx(v)
134 case OpARM64FMOVSstoreidx4:
135 return rewriteValueARM64_OpARM64FMOVSstoreidx4(v)
136 case OpARM64FMULD:
137 return rewriteValueARM64_OpARM64FMULD(v)
138 case OpARM64FMULS:
139 return rewriteValueARM64_OpARM64FMULS(v)
140 case OpARM64FNEGD:
141 return rewriteValueARM64_OpARM64FNEGD(v)
142 case OpARM64FNEGS:
143 return rewriteValueARM64_OpARM64FNEGS(v)
144 case OpARM64FNMULD:
145 return rewriteValueARM64_OpARM64FNMULD(v)
146 case OpARM64FNMULS:
147 return rewriteValueARM64_OpARM64FNMULS(v)
148 case OpARM64FSUBD:
149 return rewriteValueARM64_OpARM64FSUBD(v)
150 case OpARM64FSUBS:
151 return rewriteValueARM64_OpARM64FSUBS(v)
152 case OpARM64GreaterEqual:
153 return rewriteValueARM64_OpARM64GreaterEqual(v)
154 case OpARM64GreaterEqualF:
155 return rewriteValueARM64_OpARM64GreaterEqualF(v)
156 case OpARM64GreaterEqualU:
157 return rewriteValueARM64_OpARM64GreaterEqualU(v)
158 case OpARM64GreaterThan:
159 return rewriteValueARM64_OpARM64GreaterThan(v)
160 case OpARM64GreaterThanF:
161 return rewriteValueARM64_OpARM64GreaterThanF(v)
162 case OpARM64GreaterThanU:
163 return rewriteValueARM64_OpARM64GreaterThanU(v)
164 case OpARM64LessEqual:
165 return rewriteValueARM64_OpARM64LessEqual(v)
166 case OpARM64LessEqualF:
167 return rewriteValueARM64_OpARM64LessEqualF(v)
168 case OpARM64LessEqualU:
169 return rewriteValueARM64_OpARM64LessEqualU(v)
170 case OpARM64LessThan:
171 return rewriteValueARM64_OpARM64LessThan(v)
172 case OpARM64LessThanF:
173 return rewriteValueARM64_OpARM64LessThanF(v)
174 case OpARM64LessThanU:
175 return rewriteValueARM64_OpARM64LessThanU(v)
176 case OpARM64MADD:
177 return rewriteValueARM64_OpARM64MADD(v)
178 case OpARM64MADDW:
179 return rewriteValueARM64_OpARM64MADDW(v)
180 case OpARM64MNEG:
181 return rewriteValueARM64_OpARM64MNEG(v)
182 case OpARM64MNEGW:
183 return rewriteValueARM64_OpARM64MNEGW(v)
184 case OpARM64MOD:
185 return rewriteValueARM64_OpARM64MOD(v)
186 case OpARM64MODW:
187 return rewriteValueARM64_OpARM64MODW(v)
188 case OpARM64MOVBUload:
189 return rewriteValueARM64_OpARM64MOVBUload(v)
190 case OpARM64MOVBUloadidx:
191 return rewriteValueARM64_OpARM64MOVBUloadidx(v)
192 case OpARM64MOVBUreg:
193 return rewriteValueARM64_OpARM64MOVBUreg(v)
194 case OpARM64MOVBload:
195 return rewriteValueARM64_OpARM64MOVBload(v)
196 case OpARM64MOVBloadidx:
197 return rewriteValueARM64_OpARM64MOVBloadidx(v)
198 case OpARM64MOVBreg:
199 return rewriteValueARM64_OpARM64MOVBreg(v)
200 case OpARM64MOVBstore:
201 return rewriteValueARM64_OpARM64MOVBstore(v)
202 case OpARM64MOVBstoreidx:
203 return rewriteValueARM64_OpARM64MOVBstoreidx(v)
204 case OpARM64MOVBstorezero:
205 return rewriteValueARM64_OpARM64MOVBstorezero(v)
206 case OpARM64MOVBstorezeroidx:
207 return rewriteValueARM64_OpARM64MOVBstorezeroidx(v)
208 case OpARM64MOVDload:
209 return rewriteValueARM64_OpARM64MOVDload(v)
210 case OpARM64MOVDloadidx:
211 return rewriteValueARM64_OpARM64MOVDloadidx(v)
212 case OpARM64MOVDloadidx8:
213 return rewriteValueARM64_OpARM64MOVDloadidx8(v)
214 case OpARM64MOVDnop:
215 return rewriteValueARM64_OpARM64MOVDnop(v)
216 case OpARM64MOVDreg:
217 return rewriteValueARM64_OpARM64MOVDreg(v)
218 case OpARM64MOVDstore:
219 return rewriteValueARM64_OpARM64MOVDstore(v)
220 case OpARM64MOVDstoreidx:
221 return rewriteValueARM64_OpARM64MOVDstoreidx(v)
222 case OpARM64MOVDstoreidx8:
223 return rewriteValueARM64_OpARM64MOVDstoreidx8(v)
224 case OpARM64MOVDstorezero:
225 return rewriteValueARM64_OpARM64MOVDstorezero(v)
226 case OpARM64MOVDstorezeroidx:
227 return rewriteValueARM64_OpARM64MOVDstorezeroidx(v)
228 case OpARM64MOVDstorezeroidx8:
229 return rewriteValueARM64_OpARM64MOVDstorezeroidx8(v)
230 case OpARM64MOVHUload:
231 return rewriteValueARM64_OpARM64MOVHUload(v)
232 case OpARM64MOVHUloadidx:
233 return rewriteValueARM64_OpARM64MOVHUloadidx(v)
234 case OpARM64MOVHUloadidx2:
235 return rewriteValueARM64_OpARM64MOVHUloadidx2(v)
236 case OpARM64MOVHUreg:
237 return rewriteValueARM64_OpARM64MOVHUreg(v)
238 case OpARM64MOVHload:
239 return rewriteValueARM64_OpARM64MOVHload(v)
240 case OpARM64MOVHloadidx:
241 return rewriteValueARM64_OpARM64MOVHloadidx(v)
242 case OpARM64MOVHloadidx2:
243 return rewriteValueARM64_OpARM64MOVHloadidx2(v)
244 case OpARM64MOVHreg:
245 return rewriteValueARM64_OpARM64MOVHreg(v)
246 case OpARM64MOVHstore:
247 return rewriteValueARM64_OpARM64MOVHstore(v)
248 case OpARM64MOVHstoreidx:
249 return rewriteValueARM64_OpARM64MOVHstoreidx(v)
250 case OpARM64MOVHstoreidx2:
251 return rewriteValueARM64_OpARM64MOVHstoreidx2(v)
252 case OpARM64MOVHstorezero:
253 return rewriteValueARM64_OpARM64MOVHstorezero(v)
254 case OpARM64MOVHstorezeroidx:
255 return rewriteValueARM64_OpARM64MOVHstorezeroidx(v)
256 case OpARM64MOVHstorezeroidx2:
257 return rewriteValueARM64_OpARM64MOVHstorezeroidx2(v)
258 case OpARM64MOVQstorezero:
259 return rewriteValueARM64_OpARM64MOVQstorezero(v)
260 case OpARM64MOVWUload:
261 return rewriteValueARM64_OpARM64MOVWUload(v)
262 case OpARM64MOVWUloadidx:
263 return rewriteValueARM64_OpARM64MOVWUloadidx(v)
264 case OpARM64MOVWUloadidx4:
265 return rewriteValueARM64_OpARM64MOVWUloadidx4(v)
266 case OpARM64MOVWUreg:
267 return rewriteValueARM64_OpARM64MOVWUreg(v)
268 case OpARM64MOVWload:
269 return rewriteValueARM64_OpARM64MOVWload(v)
270 case OpARM64MOVWloadidx:
271 return rewriteValueARM64_OpARM64MOVWloadidx(v)
272 case OpARM64MOVWloadidx4:
273 return rewriteValueARM64_OpARM64MOVWloadidx4(v)
274 case OpARM64MOVWreg:
275 return rewriteValueARM64_OpARM64MOVWreg(v)
276 case OpARM64MOVWstore:
277 return rewriteValueARM64_OpARM64MOVWstore(v)
278 case OpARM64MOVWstoreidx:
279 return rewriteValueARM64_OpARM64MOVWstoreidx(v)
280 case OpARM64MOVWstoreidx4:
281 return rewriteValueARM64_OpARM64MOVWstoreidx4(v)
282 case OpARM64MOVWstorezero:
283 return rewriteValueARM64_OpARM64MOVWstorezero(v)
284 case OpARM64MOVWstorezeroidx:
285 return rewriteValueARM64_OpARM64MOVWstorezeroidx(v)
286 case OpARM64MOVWstorezeroidx4:
287 return rewriteValueARM64_OpARM64MOVWstorezeroidx4(v)
288 case OpARM64MSUB:
289 return rewriteValueARM64_OpARM64MSUB(v)
290 case OpARM64MSUBW:
291 return rewriteValueARM64_OpARM64MSUBW(v)
292 case OpARM64MUL:
293 return rewriteValueARM64_OpARM64MUL(v)
294 case OpARM64MULW:
295 return rewriteValueARM64_OpARM64MULW(v)
296 case OpARM64MVN:
297 return rewriteValueARM64_OpARM64MVN(v)
298 case OpARM64MVNshiftLL:
299 return rewriteValueARM64_OpARM64MVNshiftLL(v)
300 case OpARM64MVNshiftRA:
301 return rewriteValueARM64_OpARM64MVNshiftRA(v)
302 case OpARM64MVNshiftRL:
303 return rewriteValueARM64_OpARM64MVNshiftRL(v)
304 case OpARM64MVNshiftRO:
305 return rewriteValueARM64_OpARM64MVNshiftRO(v)
306 case OpARM64NEG:
307 return rewriteValueARM64_OpARM64NEG(v)
308 case OpARM64NEGshiftLL:
309 return rewriteValueARM64_OpARM64NEGshiftLL(v)
310 case OpARM64NEGshiftRA:
311 return rewriteValueARM64_OpARM64NEGshiftRA(v)
312 case OpARM64NEGshiftRL:
313 return rewriteValueARM64_OpARM64NEGshiftRL(v)
314 case OpARM64NotEqual:
315 return rewriteValueARM64_OpARM64NotEqual(v)
316 case OpARM64OR:
317 return rewriteValueARM64_OpARM64OR(v)
318 case OpARM64ORN:
319 return rewriteValueARM64_OpARM64ORN(v)
320 case OpARM64ORNshiftLL:
321 return rewriteValueARM64_OpARM64ORNshiftLL(v)
322 case OpARM64ORNshiftRA:
323 return rewriteValueARM64_OpARM64ORNshiftRA(v)
324 case OpARM64ORNshiftRL:
325 return rewriteValueARM64_OpARM64ORNshiftRL(v)
326 case OpARM64ORNshiftRO:
327 return rewriteValueARM64_OpARM64ORNshiftRO(v)
328 case OpARM64ORconst:
329 return rewriteValueARM64_OpARM64ORconst(v)
330 case OpARM64ORshiftLL:
331 return rewriteValueARM64_OpARM64ORshiftLL(v)
332 case OpARM64ORshiftRA:
333 return rewriteValueARM64_OpARM64ORshiftRA(v)
334 case OpARM64ORshiftRL:
335 return rewriteValueARM64_OpARM64ORshiftRL(v)
336 case OpARM64ORshiftRO:
337 return rewriteValueARM64_OpARM64ORshiftRO(v)
338 case OpARM64REV:
339 return rewriteValueARM64_OpARM64REV(v)
340 case OpARM64REVW:
341 return rewriteValueARM64_OpARM64REVW(v)
342 case OpARM64ROR:
343 return rewriteValueARM64_OpARM64ROR(v)
344 case OpARM64RORW:
345 return rewriteValueARM64_OpARM64RORW(v)
346 case OpARM64RORWconst:
347 return rewriteValueARM64_OpARM64RORWconst(v)
348 case OpARM64RORconst:
349 return rewriteValueARM64_OpARM64RORconst(v)
350 case OpARM64SBCSflags:
351 return rewriteValueARM64_OpARM64SBCSflags(v)
352 case OpARM64SLL:
353 return rewriteValueARM64_OpARM64SLL(v)
354 case OpARM64SLLconst:
355 return rewriteValueARM64_OpARM64SLLconst(v)
356 case OpARM64SRA:
357 return rewriteValueARM64_OpARM64SRA(v)
358 case OpARM64SRAconst:
359 return rewriteValueARM64_OpARM64SRAconst(v)
360 case OpARM64SRL:
361 return rewriteValueARM64_OpARM64SRL(v)
362 case OpARM64SRLconst:
363 return rewriteValueARM64_OpARM64SRLconst(v)
364 case OpARM64STP:
365 return rewriteValueARM64_OpARM64STP(v)
366 case OpARM64SUB:
367 return rewriteValueARM64_OpARM64SUB(v)
368 case OpARM64SUBconst:
369 return rewriteValueARM64_OpARM64SUBconst(v)
370 case OpARM64SUBshiftLL:
371 return rewriteValueARM64_OpARM64SUBshiftLL(v)
372 case OpARM64SUBshiftRA:
373 return rewriteValueARM64_OpARM64SUBshiftRA(v)
374 case OpARM64SUBshiftRL:
375 return rewriteValueARM64_OpARM64SUBshiftRL(v)
376 case OpARM64TST:
377 return rewriteValueARM64_OpARM64TST(v)
378 case OpARM64TSTW:
379 return rewriteValueARM64_OpARM64TSTW(v)
380 case OpARM64TSTWconst:
381 return rewriteValueARM64_OpARM64TSTWconst(v)
382 case OpARM64TSTconst:
383 return rewriteValueARM64_OpARM64TSTconst(v)
384 case OpARM64TSTshiftLL:
385 return rewriteValueARM64_OpARM64TSTshiftLL(v)
386 case OpARM64TSTshiftRA:
387 return rewriteValueARM64_OpARM64TSTshiftRA(v)
388 case OpARM64TSTshiftRL:
389 return rewriteValueARM64_OpARM64TSTshiftRL(v)
390 case OpARM64TSTshiftRO:
391 return rewriteValueARM64_OpARM64TSTshiftRO(v)
392 case OpARM64UBFIZ:
393 return rewriteValueARM64_OpARM64UBFIZ(v)
394 case OpARM64UBFX:
395 return rewriteValueARM64_OpARM64UBFX(v)
396 case OpARM64UDIV:
397 return rewriteValueARM64_OpARM64UDIV(v)
398 case OpARM64UDIVW:
399 return rewriteValueARM64_OpARM64UDIVW(v)
400 case OpARM64UMOD:
401 return rewriteValueARM64_OpARM64UMOD(v)
402 case OpARM64UMODW:
403 return rewriteValueARM64_OpARM64UMODW(v)
404 case OpARM64XOR:
405 return rewriteValueARM64_OpARM64XOR(v)
406 case OpARM64XORconst:
407 return rewriteValueARM64_OpARM64XORconst(v)
408 case OpARM64XORshiftLL:
409 return rewriteValueARM64_OpARM64XORshiftLL(v)
410 case OpARM64XORshiftRA:
411 return rewriteValueARM64_OpARM64XORshiftRA(v)
412 case OpARM64XORshiftRL:
413 return rewriteValueARM64_OpARM64XORshiftRL(v)
414 case OpARM64XORshiftRO:
415 return rewriteValueARM64_OpARM64XORshiftRO(v)
416 case OpAbs:
417 v.Op = OpARM64FABSD
418 return true
419 case OpAdd16:
420 v.Op = OpARM64ADD
421 return true
422 case OpAdd32:
423 v.Op = OpARM64ADD
424 return true
425 case OpAdd32F:
426 v.Op = OpARM64FADDS
427 return true
428 case OpAdd64:
429 v.Op = OpARM64ADD
430 return true
431 case OpAdd64F:
432 v.Op = OpARM64FADDD
433 return true
434 case OpAdd8:
435 v.Op = OpARM64ADD
436 return true
437 case OpAddPtr:
438 v.Op = OpARM64ADD
439 return true
440 case OpAddr:
441 return rewriteValueARM64_OpAddr(v)
442 case OpAnd16:
443 v.Op = OpARM64AND
444 return true
445 case OpAnd32:
446 v.Op = OpARM64AND
447 return true
448 case OpAnd64:
449 v.Op = OpARM64AND
450 return true
451 case OpAnd8:
452 v.Op = OpARM64AND
453 return true
454 case OpAndB:
455 v.Op = OpARM64AND
456 return true
457 case OpAtomicAdd32:
458 v.Op = OpARM64LoweredAtomicAdd32
459 return true
460 case OpAtomicAdd32Variant:
461 v.Op = OpARM64LoweredAtomicAdd32Variant
462 return true
463 case OpAtomicAdd64:
464 v.Op = OpARM64LoweredAtomicAdd64
465 return true
466 case OpAtomicAdd64Variant:
467 v.Op = OpARM64LoweredAtomicAdd64Variant
468 return true
469 case OpAtomicAnd32:
470 return rewriteValueARM64_OpAtomicAnd32(v)
471 case OpAtomicAnd32Variant:
472 return rewriteValueARM64_OpAtomicAnd32Variant(v)
473 case OpAtomicAnd8:
474 return rewriteValueARM64_OpAtomicAnd8(v)
475 case OpAtomicAnd8Variant:
476 return rewriteValueARM64_OpAtomicAnd8Variant(v)
477 case OpAtomicCompareAndSwap32:
478 v.Op = OpARM64LoweredAtomicCas32
479 return true
480 case OpAtomicCompareAndSwap32Variant:
481 v.Op = OpARM64LoweredAtomicCas32Variant
482 return true
483 case OpAtomicCompareAndSwap64:
484 v.Op = OpARM64LoweredAtomicCas64
485 return true
486 case OpAtomicCompareAndSwap64Variant:
487 v.Op = OpARM64LoweredAtomicCas64Variant
488 return true
489 case OpAtomicExchange32:
490 v.Op = OpARM64LoweredAtomicExchange32
491 return true
492 case OpAtomicExchange32Variant:
493 v.Op = OpARM64LoweredAtomicExchange32Variant
494 return true
495 case OpAtomicExchange64:
496 v.Op = OpARM64LoweredAtomicExchange64
497 return true
498 case OpAtomicExchange64Variant:
499 v.Op = OpARM64LoweredAtomicExchange64Variant
500 return true
501 case OpAtomicLoad32:
502 v.Op = OpARM64LDARW
503 return true
504 case OpAtomicLoad64:
505 v.Op = OpARM64LDAR
506 return true
507 case OpAtomicLoad8:
508 v.Op = OpARM64LDARB
509 return true
510 case OpAtomicLoadPtr:
511 v.Op = OpARM64LDAR
512 return true
513 case OpAtomicOr32:
514 return rewriteValueARM64_OpAtomicOr32(v)
515 case OpAtomicOr32Variant:
516 return rewriteValueARM64_OpAtomicOr32Variant(v)
517 case OpAtomicOr8:
518 return rewriteValueARM64_OpAtomicOr8(v)
519 case OpAtomicOr8Variant:
520 return rewriteValueARM64_OpAtomicOr8Variant(v)
521 case OpAtomicStore32:
522 v.Op = OpARM64STLRW
523 return true
524 case OpAtomicStore64:
525 v.Op = OpARM64STLR
526 return true
527 case OpAtomicStore8:
528 v.Op = OpARM64STLRB
529 return true
530 case OpAtomicStorePtrNoWB:
531 v.Op = OpARM64STLR
532 return true
533 case OpAvg64u:
534 return rewriteValueARM64_OpAvg64u(v)
535 case OpBitLen32:
536 return rewriteValueARM64_OpBitLen32(v)
537 case OpBitLen64:
538 return rewriteValueARM64_OpBitLen64(v)
539 case OpBitRev16:
540 return rewriteValueARM64_OpBitRev16(v)
541 case OpBitRev32:
542 v.Op = OpARM64RBITW
543 return true
544 case OpBitRev64:
545 v.Op = OpARM64RBIT
546 return true
547 case OpBitRev8:
548 return rewriteValueARM64_OpBitRev8(v)
549 case OpBswap32:
550 v.Op = OpARM64REVW
551 return true
552 case OpBswap64:
553 v.Op = OpARM64REV
554 return true
555 case OpCeil:
556 v.Op = OpARM64FRINTPD
557 return true
558 case OpClosureCall:
559 v.Op = OpARM64CALLclosure
560 return true
561 case OpCom16:
562 v.Op = OpARM64MVN
563 return true
564 case OpCom32:
565 v.Op = OpARM64MVN
566 return true
567 case OpCom64:
568 v.Op = OpARM64MVN
569 return true
570 case OpCom8:
571 v.Op = OpARM64MVN
572 return true
573 case OpCondSelect:
574 return rewriteValueARM64_OpCondSelect(v)
575 case OpConst16:
576 return rewriteValueARM64_OpConst16(v)
577 case OpConst32:
578 return rewriteValueARM64_OpConst32(v)
579 case OpConst32F:
580 return rewriteValueARM64_OpConst32F(v)
581 case OpConst64:
582 return rewriteValueARM64_OpConst64(v)
583 case OpConst64F:
584 return rewriteValueARM64_OpConst64F(v)
585 case OpConst8:
586 return rewriteValueARM64_OpConst8(v)
587 case OpConstBool:
588 return rewriteValueARM64_OpConstBool(v)
589 case OpConstNil:
590 return rewriteValueARM64_OpConstNil(v)
591 case OpCtz16:
592 return rewriteValueARM64_OpCtz16(v)
593 case OpCtz16NonZero:
594 v.Op = OpCtz32
595 return true
596 case OpCtz32:
597 return rewriteValueARM64_OpCtz32(v)
598 case OpCtz32NonZero:
599 v.Op = OpCtz32
600 return true
601 case OpCtz64:
602 return rewriteValueARM64_OpCtz64(v)
603 case OpCtz64NonZero:
604 v.Op = OpCtz64
605 return true
606 case OpCtz8:
607 return rewriteValueARM64_OpCtz8(v)
608 case OpCtz8NonZero:
609 v.Op = OpCtz32
610 return true
611 case OpCvt32Fto32:
612 v.Op = OpARM64FCVTZSSW
613 return true
614 case OpCvt32Fto32U:
615 v.Op = OpARM64FCVTZUSW
616 return true
617 case OpCvt32Fto64:
618 v.Op = OpARM64FCVTZSS
619 return true
620 case OpCvt32Fto64F:
621 v.Op = OpARM64FCVTSD
622 return true
623 case OpCvt32Fto64U:
624 v.Op = OpARM64FCVTZUS
625 return true
626 case OpCvt32Uto32F:
627 v.Op = OpARM64UCVTFWS
628 return true
629 case OpCvt32Uto64F:
630 v.Op = OpARM64UCVTFWD
631 return true
632 case OpCvt32to32F:
633 v.Op = OpARM64SCVTFWS
634 return true
635 case OpCvt32to64F:
636 v.Op = OpARM64SCVTFWD
637 return true
638 case OpCvt64Fto32:
639 v.Op = OpARM64FCVTZSDW
640 return true
641 case OpCvt64Fto32F:
642 v.Op = OpARM64FCVTDS
643 return true
644 case OpCvt64Fto32U:
645 v.Op = OpARM64FCVTZUDW
646 return true
647 case OpCvt64Fto64:
648 v.Op = OpARM64FCVTZSD
649 return true
650 case OpCvt64Fto64U:
651 v.Op = OpARM64FCVTZUD
652 return true
653 case OpCvt64Uto32F:
654 v.Op = OpARM64UCVTFS
655 return true
656 case OpCvt64Uto64F:
657 v.Op = OpARM64UCVTFD
658 return true
659 case OpCvt64to32F:
660 v.Op = OpARM64SCVTFS
661 return true
662 case OpCvt64to64F:
663 v.Op = OpARM64SCVTFD
664 return true
665 case OpCvtBoolToUint8:
666 v.Op = OpCopy
667 return true
668 case OpDiv16:
669 return rewriteValueARM64_OpDiv16(v)
670 case OpDiv16u:
671 return rewriteValueARM64_OpDiv16u(v)
672 case OpDiv32:
673 return rewriteValueARM64_OpDiv32(v)
674 case OpDiv32F:
675 v.Op = OpARM64FDIVS
676 return true
677 case OpDiv32u:
678 v.Op = OpARM64UDIVW
679 return true
680 case OpDiv64:
681 return rewriteValueARM64_OpDiv64(v)
682 case OpDiv64F:
683 v.Op = OpARM64FDIVD
684 return true
685 case OpDiv64u:
686 v.Op = OpARM64UDIV
687 return true
688 case OpDiv8:
689 return rewriteValueARM64_OpDiv8(v)
690 case OpDiv8u:
691 return rewriteValueARM64_OpDiv8u(v)
692 case OpEq16:
693 return rewriteValueARM64_OpEq16(v)
694 case OpEq32:
695 return rewriteValueARM64_OpEq32(v)
696 case OpEq32F:
697 return rewriteValueARM64_OpEq32F(v)
698 case OpEq64:
699 return rewriteValueARM64_OpEq64(v)
700 case OpEq64F:
701 return rewriteValueARM64_OpEq64F(v)
702 case OpEq8:
703 return rewriteValueARM64_OpEq8(v)
704 case OpEqB:
705 return rewriteValueARM64_OpEqB(v)
706 case OpEqPtr:
707 return rewriteValueARM64_OpEqPtr(v)
708 case OpFMA:
709 return rewriteValueARM64_OpFMA(v)
710 case OpFloor:
711 v.Op = OpARM64FRINTMD
712 return true
713 case OpGetCallerPC:
714 v.Op = OpARM64LoweredGetCallerPC
715 return true
716 case OpGetCallerSP:
717 v.Op = OpARM64LoweredGetCallerSP
718 return true
719 case OpGetClosurePtr:
720 v.Op = OpARM64LoweredGetClosurePtr
721 return true
722 case OpHmul32:
723 return rewriteValueARM64_OpHmul32(v)
724 case OpHmul32u:
725 return rewriteValueARM64_OpHmul32u(v)
726 case OpHmul64:
727 v.Op = OpARM64MULH
728 return true
729 case OpHmul64u:
730 v.Op = OpARM64UMULH
731 return true
732 case OpInterCall:
733 v.Op = OpARM64CALLinter
734 return true
735 case OpIsInBounds:
736 return rewriteValueARM64_OpIsInBounds(v)
737 case OpIsNonNil:
738 return rewriteValueARM64_OpIsNonNil(v)
739 case OpIsSliceInBounds:
740 return rewriteValueARM64_OpIsSliceInBounds(v)
741 case OpLeq16:
742 return rewriteValueARM64_OpLeq16(v)
743 case OpLeq16U:
744 return rewriteValueARM64_OpLeq16U(v)
745 case OpLeq32:
746 return rewriteValueARM64_OpLeq32(v)
747 case OpLeq32F:
748 return rewriteValueARM64_OpLeq32F(v)
749 case OpLeq32U:
750 return rewriteValueARM64_OpLeq32U(v)
751 case OpLeq64:
752 return rewriteValueARM64_OpLeq64(v)
753 case OpLeq64F:
754 return rewriteValueARM64_OpLeq64F(v)
755 case OpLeq64U:
756 return rewriteValueARM64_OpLeq64U(v)
757 case OpLeq8:
758 return rewriteValueARM64_OpLeq8(v)
759 case OpLeq8U:
760 return rewriteValueARM64_OpLeq8U(v)
761 case OpLess16:
762 return rewriteValueARM64_OpLess16(v)
763 case OpLess16U:
764 return rewriteValueARM64_OpLess16U(v)
765 case OpLess32:
766 return rewriteValueARM64_OpLess32(v)
767 case OpLess32F:
768 return rewriteValueARM64_OpLess32F(v)
769 case OpLess32U:
770 return rewriteValueARM64_OpLess32U(v)
771 case OpLess64:
772 return rewriteValueARM64_OpLess64(v)
773 case OpLess64F:
774 return rewriteValueARM64_OpLess64F(v)
775 case OpLess64U:
776 return rewriteValueARM64_OpLess64U(v)
777 case OpLess8:
778 return rewriteValueARM64_OpLess8(v)
779 case OpLess8U:
780 return rewriteValueARM64_OpLess8U(v)
781 case OpLoad:
782 return rewriteValueARM64_OpLoad(v)
783 case OpLocalAddr:
784 return rewriteValueARM64_OpLocalAddr(v)
785 case OpLsh16x16:
786 return rewriteValueARM64_OpLsh16x16(v)
787 case OpLsh16x32:
788 return rewriteValueARM64_OpLsh16x32(v)
789 case OpLsh16x64:
790 return rewriteValueARM64_OpLsh16x64(v)
791 case OpLsh16x8:
792 return rewriteValueARM64_OpLsh16x8(v)
793 case OpLsh32x16:
794 return rewriteValueARM64_OpLsh32x16(v)
795 case OpLsh32x32:
796 return rewriteValueARM64_OpLsh32x32(v)
797 case OpLsh32x64:
798 return rewriteValueARM64_OpLsh32x64(v)
799 case OpLsh32x8:
800 return rewriteValueARM64_OpLsh32x8(v)
801 case OpLsh64x16:
802 return rewriteValueARM64_OpLsh64x16(v)
803 case OpLsh64x32:
804 return rewriteValueARM64_OpLsh64x32(v)
805 case OpLsh64x64:
806 return rewriteValueARM64_OpLsh64x64(v)
807 case OpLsh64x8:
808 return rewriteValueARM64_OpLsh64x8(v)
809 case OpLsh8x16:
810 return rewriteValueARM64_OpLsh8x16(v)
811 case OpLsh8x32:
812 return rewriteValueARM64_OpLsh8x32(v)
813 case OpLsh8x64:
814 return rewriteValueARM64_OpLsh8x64(v)
815 case OpLsh8x8:
816 return rewriteValueARM64_OpLsh8x8(v)
817 case OpMod16:
818 return rewriteValueARM64_OpMod16(v)
819 case OpMod16u:
820 return rewriteValueARM64_OpMod16u(v)
821 case OpMod32:
822 return rewriteValueARM64_OpMod32(v)
823 case OpMod32u:
824 v.Op = OpARM64UMODW
825 return true
826 case OpMod64:
827 return rewriteValueARM64_OpMod64(v)
828 case OpMod64u:
829 v.Op = OpARM64UMOD
830 return true
831 case OpMod8:
832 return rewriteValueARM64_OpMod8(v)
833 case OpMod8u:
834 return rewriteValueARM64_OpMod8u(v)
835 case OpMove:
836 return rewriteValueARM64_OpMove(v)
837 case OpMul16:
838 v.Op = OpARM64MULW
839 return true
840 case OpMul32:
841 v.Op = OpARM64MULW
842 return true
843 case OpMul32F:
844 v.Op = OpARM64FMULS
845 return true
846 case OpMul64:
847 v.Op = OpARM64MUL
848 return true
849 case OpMul64F:
850 v.Op = OpARM64FMULD
851 return true
852 case OpMul64uhilo:
853 v.Op = OpARM64LoweredMuluhilo
854 return true
855 case OpMul8:
856 v.Op = OpARM64MULW
857 return true
858 case OpNeg16:
859 v.Op = OpARM64NEG
860 return true
861 case OpNeg32:
862 v.Op = OpARM64NEG
863 return true
864 case OpNeg32F:
865 v.Op = OpARM64FNEGS
866 return true
867 case OpNeg64:
868 v.Op = OpARM64NEG
869 return true
870 case OpNeg64F:
871 v.Op = OpARM64FNEGD
872 return true
873 case OpNeg8:
874 v.Op = OpARM64NEG
875 return true
876 case OpNeq16:
877 return rewriteValueARM64_OpNeq16(v)
878 case OpNeq32:
879 return rewriteValueARM64_OpNeq32(v)
880 case OpNeq32F:
881 return rewriteValueARM64_OpNeq32F(v)
882 case OpNeq64:
883 return rewriteValueARM64_OpNeq64(v)
884 case OpNeq64F:
885 return rewriteValueARM64_OpNeq64F(v)
886 case OpNeq8:
887 return rewriteValueARM64_OpNeq8(v)
888 case OpNeqB:
889 v.Op = OpARM64XOR
890 return true
891 case OpNeqPtr:
892 return rewriteValueARM64_OpNeqPtr(v)
893 case OpNilCheck:
894 v.Op = OpARM64LoweredNilCheck
895 return true
896 case OpNot:
897 return rewriteValueARM64_OpNot(v)
898 case OpOffPtr:
899 return rewriteValueARM64_OpOffPtr(v)
900 case OpOr16:
901 v.Op = OpARM64OR
902 return true
903 case OpOr32:
904 v.Op = OpARM64OR
905 return true
906 case OpOr64:
907 v.Op = OpARM64OR
908 return true
909 case OpOr8:
910 v.Op = OpARM64OR
911 return true
912 case OpOrB:
913 v.Op = OpARM64OR
914 return true
915 case OpPanicBounds:
916 return rewriteValueARM64_OpPanicBounds(v)
917 case OpPopCount16:
918 return rewriteValueARM64_OpPopCount16(v)
919 case OpPopCount32:
920 return rewriteValueARM64_OpPopCount32(v)
921 case OpPopCount64:
922 return rewriteValueARM64_OpPopCount64(v)
923 case OpPrefetchCache:
924 return rewriteValueARM64_OpPrefetchCache(v)
925 case OpPrefetchCacheStreamed:
926 return rewriteValueARM64_OpPrefetchCacheStreamed(v)
927 case OpPubBarrier:
928 return rewriteValueARM64_OpPubBarrier(v)
929 case OpRotateLeft16:
930 return rewriteValueARM64_OpRotateLeft16(v)
931 case OpRotateLeft32:
932 return rewriteValueARM64_OpRotateLeft32(v)
933 case OpRotateLeft64:
934 return rewriteValueARM64_OpRotateLeft64(v)
935 case OpRotateLeft8:
936 return rewriteValueARM64_OpRotateLeft8(v)
937 case OpRound:
938 v.Op = OpARM64FRINTAD
939 return true
940 case OpRound32F:
941 v.Op = OpARM64LoweredRound32F
942 return true
943 case OpRound64F:
944 v.Op = OpARM64LoweredRound64F
945 return true
946 case OpRoundToEven:
947 v.Op = OpARM64FRINTND
948 return true
949 case OpRsh16Ux16:
950 return rewriteValueARM64_OpRsh16Ux16(v)
951 case OpRsh16Ux32:
952 return rewriteValueARM64_OpRsh16Ux32(v)
953 case OpRsh16Ux64:
954 return rewriteValueARM64_OpRsh16Ux64(v)
955 case OpRsh16Ux8:
956 return rewriteValueARM64_OpRsh16Ux8(v)
957 case OpRsh16x16:
958 return rewriteValueARM64_OpRsh16x16(v)
959 case OpRsh16x32:
960 return rewriteValueARM64_OpRsh16x32(v)
961 case OpRsh16x64:
962 return rewriteValueARM64_OpRsh16x64(v)
963 case OpRsh16x8:
964 return rewriteValueARM64_OpRsh16x8(v)
965 case OpRsh32Ux16:
966 return rewriteValueARM64_OpRsh32Ux16(v)
967 case OpRsh32Ux32:
968 return rewriteValueARM64_OpRsh32Ux32(v)
969 case OpRsh32Ux64:
970 return rewriteValueARM64_OpRsh32Ux64(v)
971 case OpRsh32Ux8:
972 return rewriteValueARM64_OpRsh32Ux8(v)
973 case OpRsh32x16:
974 return rewriteValueARM64_OpRsh32x16(v)
975 case OpRsh32x32:
976 return rewriteValueARM64_OpRsh32x32(v)
977 case OpRsh32x64:
978 return rewriteValueARM64_OpRsh32x64(v)
979 case OpRsh32x8:
980 return rewriteValueARM64_OpRsh32x8(v)
981 case OpRsh64Ux16:
982 return rewriteValueARM64_OpRsh64Ux16(v)
983 case OpRsh64Ux32:
984 return rewriteValueARM64_OpRsh64Ux32(v)
985 case OpRsh64Ux64:
986 return rewriteValueARM64_OpRsh64Ux64(v)
987 case OpRsh64Ux8:
988 return rewriteValueARM64_OpRsh64Ux8(v)
989 case OpRsh64x16:
990 return rewriteValueARM64_OpRsh64x16(v)
991 case OpRsh64x32:
992 return rewriteValueARM64_OpRsh64x32(v)
993 case OpRsh64x64:
994 return rewriteValueARM64_OpRsh64x64(v)
995 case OpRsh64x8:
996 return rewriteValueARM64_OpRsh64x8(v)
997 case OpRsh8Ux16:
998 return rewriteValueARM64_OpRsh8Ux16(v)
999 case OpRsh8Ux32:
1000 return rewriteValueARM64_OpRsh8Ux32(v)
1001 case OpRsh8Ux64:
1002 return rewriteValueARM64_OpRsh8Ux64(v)
1003 case OpRsh8Ux8:
1004 return rewriteValueARM64_OpRsh8Ux8(v)
1005 case OpRsh8x16:
1006 return rewriteValueARM64_OpRsh8x16(v)
1007 case OpRsh8x32:
1008 return rewriteValueARM64_OpRsh8x32(v)
1009 case OpRsh8x64:
1010 return rewriteValueARM64_OpRsh8x64(v)
1011 case OpRsh8x8:
1012 return rewriteValueARM64_OpRsh8x8(v)
1013 case OpSelect0:
1014 return rewriteValueARM64_OpSelect0(v)
1015 case OpSelect1:
1016 return rewriteValueARM64_OpSelect1(v)
1017 case OpSelectN:
1018 return rewriteValueARM64_OpSelectN(v)
1019 case OpSignExt16to32:
1020 v.Op = OpARM64MOVHreg
1021 return true
1022 case OpSignExt16to64:
1023 v.Op = OpARM64MOVHreg
1024 return true
1025 case OpSignExt32to64:
1026 v.Op = OpARM64MOVWreg
1027 return true
1028 case OpSignExt8to16:
1029 v.Op = OpARM64MOVBreg
1030 return true
1031 case OpSignExt8to32:
1032 v.Op = OpARM64MOVBreg
1033 return true
1034 case OpSignExt8to64:
1035 v.Op = OpARM64MOVBreg
1036 return true
1037 case OpSlicemask:
1038 return rewriteValueARM64_OpSlicemask(v)
1039 case OpSqrt:
1040 v.Op = OpARM64FSQRTD
1041 return true
1042 case OpSqrt32:
1043 v.Op = OpARM64FSQRTS
1044 return true
1045 case OpStaticCall:
1046 v.Op = OpARM64CALLstatic
1047 return true
1048 case OpStore:
1049 return rewriteValueARM64_OpStore(v)
1050 case OpSub16:
1051 v.Op = OpARM64SUB
1052 return true
1053 case OpSub32:
1054 v.Op = OpARM64SUB
1055 return true
1056 case OpSub32F:
1057 v.Op = OpARM64FSUBS
1058 return true
1059 case OpSub64:
1060 v.Op = OpARM64SUB
1061 return true
1062 case OpSub64F:
1063 v.Op = OpARM64FSUBD
1064 return true
1065 case OpSub8:
1066 v.Op = OpARM64SUB
1067 return true
1068 case OpSubPtr:
1069 v.Op = OpARM64SUB
1070 return true
1071 case OpTailCall:
1072 v.Op = OpARM64CALLtail
1073 return true
1074 case OpTrunc:
1075 v.Op = OpARM64FRINTZD
1076 return true
1077 case OpTrunc16to8:
1078 v.Op = OpCopy
1079 return true
1080 case OpTrunc32to16:
1081 v.Op = OpCopy
1082 return true
1083 case OpTrunc32to8:
1084 v.Op = OpCopy
1085 return true
1086 case OpTrunc64to16:
1087 v.Op = OpCopy
1088 return true
1089 case OpTrunc64to32:
1090 v.Op = OpCopy
1091 return true
1092 case OpTrunc64to8:
1093 v.Op = OpCopy
1094 return true
1095 case OpWB:
1096 v.Op = OpARM64LoweredWB
1097 return true
1098 case OpXor16:
1099 v.Op = OpARM64XOR
1100 return true
1101 case OpXor32:
1102 v.Op = OpARM64XOR
1103 return true
1104 case OpXor64:
1105 v.Op = OpARM64XOR
1106 return true
1107 case OpXor8:
1108 v.Op = OpARM64XOR
1109 return true
1110 case OpZero:
1111 return rewriteValueARM64_OpZero(v)
1112 case OpZeroExt16to32:
1113 v.Op = OpARM64MOVHUreg
1114 return true
1115 case OpZeroExt16to64:
1116 v.Op = OpARM64MOVHUreg
1117 return true
1118 case OpZeroExt32to64:
1119 v.Op = OpARM64MOVWUreg
1120 return true
1121 case OpZeroExt8to16:
1122 v.Op = OpARM64MOVBUreg
1123 return true
1124 case OpZeroExt8to32:
1125 v.Op = OpARM64MOVBUreg
1126 return true
1127 case OpZeroExt8to64:
1128 v.Op = OpARM64MOVBUreg
1129 return true
1130 }
1131 return false
1132 }
1133 func rewriteValueARM64_OpARM64ADCSflags(v *Value) bool {
1134 v_2 := v.Args[2]
1135 v_1 := v.Args[1]
1136 v_0 := v.Args[0]
1137 b := v.Block
1138 typ := &b.Func.Config.Types
1139
1140
1141 for {
1142 x := v_0
1143 y := v_1
1144 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
1145 break
1146 }
1147 v_2_0 := v_2.Args[0]
1148 if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
1149 break
1150 }
1151 v_2_0_0 := v_2_0.Args[0]
1152 if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 {
1153 break
1154 }
1155 c := v_2_0_0.Args[0]
1156 v.reset(OpARM64ADCSflags)
1157 v.AddArg3(x, y, c)
1158 return true
1159 }
1160
1161
1162 for {
1163 x := v_0
1164 y := v_1
1165 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
1166 break
1167 }
1168 v_2_0 := v_2.Args[0]
1169 if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
1170 break
1171 }
1172 v_2_0_0 := v_2_0.Args[0]
1173 if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
1174 break
1175 }
1176 v.reset(OpARM64ADDSflags)
1177 v.AddArg2(x, y)
1178 return true
1179 }
1180 return false
1181 }
1182 func rewriteValueARM64_OpARM64ADD(v *Value) bool {
1183 v_1 := v.Args[1]
1184 v_0 := v.Args[0]
1185 b := v.Block
1186 typ := &b.Func.Config.Types
1187
1188
1189 for {
1190 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1191 x := v_0
1192 if v_1.Op != OpARM64MOVDconst {
1193 continue
1194 }
1195 c := auxIntToInt64(v_1.AuxInt)
1196 v.reset(OpARM64ADDconst)
1197 v.AuxInt = int64ToAuxInt(c)
1198 v.AddArg(x)
1199 return true
1200 }
1201 break
1202 }
1203
1204
1205
1206 for {
1207 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1208 a := v_0
1209 l := v_1
1210 if l.Op != OpARM64MUL {
1211 continue
1212 }
1213 y := l.Args[1]
1214 x := l.Args[0]
1215 if !(l.Uses == 1 && clobber(l)) {
1216 continue
1217 }
1218 v.reset(OpARM64MADD)
1219 v.AddArg3(a, x, y)
1220 return true
1221 }
1222 break
1223 }
1224
1225
1226
1227 for {
1228 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1229 a := v_0
1230 l := v_1
1231 if l.Op != OpARM64MNEG {
1232 continue
1233 }
1234 y := l.Args[1]
1235 x := l.Args[0]
1236 if !(l.Uses == 1 && clobber(l)) {
1237 continue
1238 }
1239 v.reset(OpARM64MSUB)
1240 v.AddArg3(a, x, y)
1241 return true
1242 }
1243 break
1244 }
1245
1246
1247
1248 for {
1249 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1250 a := v_0
1251 l := v_1
1252 if l.Op != OpARM64MULW {
1253 continue
1254 }
1255 y := l.Args[1]
1256 x := l.Args[0]
1257 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
1258 continue
1259 }
1260 v.reset(OpARM64MADDW)
1261 v.AddArg3(a, x, y)
1262 return true
1263 }
1264 break
1265 }
1266
1267
1268
1269 for {
1270 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1271 a := v_0
1272 l := v_1
1273 if l.Op != OpARM64MNEGW {
1274 continue
1275 }
1276 y := l.Args[1]
1277 x := l.Args[0]
1278 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
1279 continue
1280 }
1281 v.reset(OpARM64MSUBW)
1282 v.AddArg3(a, x, y)
1283 return true
1284 }
1285 break
1286 }
1287
1288
1289 for {
1290 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1291 x := v_0
1292 if v_1.Op != OpARM64NEG {
1293 continue
1294 }
1295 y := v_1.Args[0]
1296 v.reset(OpARM64SUB)
1297 v.AddArg2(x, y)
1298 return true
1299 }
1300 break
1301 }
1302
1303
1304
1305 for {
1306 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1307 x0 := v_0
1308 x1 := v_1
1309 if x1.Op != OpARM64SLLconst {
1310 continue
1311 }
1312 c := auxIntToInt64(x1.AuxInt)
1313 y := x1.Args[0]
1314 if !(clobberIfDead(x1)) {
1315 continue
1316 }
1317 v.reset(OpARM64ADDshiftLL)
1318 v.AuxInt = int64ToAuxInt(c)
1319 v.AddArg2(x0, y)
1320 return true
1321 }
1322 break
1323 }
1324
1325
1326
1327 for {
1328 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1329 x0 := v_0
1330 x1 := v_1
1331 if x1.Op != OpARM64SRLconst {
1332 continue
1333 }
1334 c := auxIntToInt64(x1.AuxInt)
1335 y := x1.Args[0]
1336 if !(clobberIfDead(x1)) {
1337 continue
1338 }
1339 v.reset(OpARM64ADDshiftRL)
1340 v.AuxInt = int64ToAuxInt(c)
1341 v.AddArg2(x0, y)
1342 return true
1343 }
1344 break
1345 }
1346
1347
1348
1349 for {
1350 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1351 x0 := v_0
1352 x1 := v_1
1353 if x1.Op != OpARM64SRAconst {
1354 continue
1355 }
1356 c := auxIntToInt64(x1.AuxInt)
1357 y := x1.Args[0]
1358 if !(clobberIfDead(x1)) {
1359 continue
1360 }
1361 v.reset(OpARM64ADDshiftRA)
1362 v.AuxInt = int64ToAuxInt(c)
1363 v.AddArg2(x0, y)
1364 return true
1365 }
1366 break
1367 }
1368
1369
1370
1371 for {
1372 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1373 if v_0.Op != OpARM64SLL {
1374 continue
1375 }
1376 _ = v_0.Args[1]
1377 x := v_0.Args[0]
1378 v_0_1 := v_0.Args[1]
1379 if v_0_1.Op != OpARM64ANDconst {
1380 continue
1381 }
1382 t := v_0_1.Type
1383 if auxIntToInt64(v_0_1.AuxInt) != 63 {
1384 continue
1385 }
1386 y := v_0_1.Args[0]
1387 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
1388 continue
1389 }
1390 cc := auxIntToOp(v_1.AuxInt)
1391 _ = v_1.Args[1]
1392 v_1_0 := v_1.Args[0]
1393 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 {
1394 continue
1395 }
1396 _ = v_1_0.Args[1]
1397 if x != v_1_0.Args[0] {
1398 continue
1399 }
1400 v_1_0_1 := v_1_0.Args[1]
1401 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
1402 continue
1403 }
1404 _ = v_1_0_1.Args[1]
1405 v_1_0_1_0 := v_1_0_1.Args[0]
1406 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 64 {
1407 continue
1408 }
1409 v_1_0_1_1 := v_1_0_1.Args[1]
1410 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 63 || y != v_1_0_1_1.Args[0] {
1411 continue
1412 }
1413 v_1_1 := v_1.Args[1]
1414 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
1415 continue
1416 }
1417 v_1_1_0 := v_1_1.Args[0]
1418 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
1419 continue
1420 }
1421 _ = v_1_1_0.Args[1]
1422 v_1_1_0_0 := v_1_1_0.Args[0]
1423 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 64 {
1424 continue
1425 }
1426 v_1_1_0_1 := v_1_1_0.Args[1]
1427 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
1428 continue
1429 }
1430 v.reset(OpARM64ROR)
1431 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
1432 v0.AddArg(y)
1433 v.AddArg2(x, v0)
1434 return true
1435 }
1436 break
1437 }
1438
1439
1440
1441 for {
1442 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1443 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 {
1444 continue
1445 }
1446 _ = v_0.Args[1]
1447 x := v_0.Args[0]
1448 v_0_1 := v_0.Args[1]
1449 if v_0_1.Op != OpARM64ANDconst {
1450 continue
1451 }
1452 t := v_0_1.Type
1453 if auxIntToInt64(v_0_1.AuxInt) != 63 {
1454 continue
1455 }
1456 y := v_0_1.Args[0]
1457 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
1458 continue
1459 }
1460 cc := auxIntToOp(v_1.AuxInt)
1461 _ = v_1.Args[1]
1462 v_1_0 := v_1.Args[0]
1463 if v_1_0.Op != OpARM64SLL {
1464 continue
1465 }
1466 _ = v_1_0.Args[1]
1467 if x != v_1_0.Args[0] {
1468 continue
1469 }
1470 v_1_0_1 := v_1_0.Args[1]
1471 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
1472 continue
1473 }
1474 _ = v_1_0_1.Args[1]
1475 v_1_0_1_0 := v_1_0_1.Args[0]
1476 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 64 {
1477 continue
1478 }
1479 v_1_0_1_1 := v_1_0_1.Args[1]
1480 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 63 || y != v_1_0_1_1.Args[0] {
1481 continue
1482 }
1483 v_1_1 := v_1.Args[1]
1484 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
1485 continue
1486 }
1487 v_1_1_0 := v_1_1.Args[0]
1488 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
1489 continue
1490 }
1491 _ = v_1_1_0.Args[1]
1492 v_1_1_0_0 := v_1_1_0.Args[0]
1493 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 64 {
1494 continue
1495 }
1496 v_1_1_0_1 := v_1_1_0.Args[1]
1497 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
1498 continue
1499 }
1500 v.reset(OpARM64ROR)
1501 v.AddArg2(x, y)
1502 return true
1503 }
1504 break
1505 }
1506
1507
1508
1509 for {
1510 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1511 if v_0.Op != OpARM64SLL {
1512 continue
1513 }
1514 _ = v_0.Args[1]
1515 x := v_0.Args[0]
1516 v_0_1 := v_0.Args[1]
1517 if v_0_1.Op != OpARM64ANDconst {
1518 continue
1519 }
1520 t := v_0_1.Type
1521 if auxIntToInt64(v_0_1.AuxInt) != 31 {
1522 continue
1523 }
1524 y := v_0_1.Args[0]
1525 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
1526 continue
1527 }
1528 cc := auxIntToOp(v_1.AuxInt)
1529 _ = v_1.Args[1]
1530 v_1_0 := v_1.Args[0]
1531 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 {
1532 continue
1533 }
1534 _ = v_1_0.Args[1]
1535 v_1_0_0 := v_1_0.Args[0]
1536 if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] {
1537 continue
1538 }
1539 v_1_0_1 := v_1_0.Args[1]
1540 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
1541 continue
1542 }
1543 _ = v_1_0_1.Args[1]
1544 v_1_0_1_0 := v_1_0_1.Args[0]
1545 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 32 {
1546 continue
1547 }
1548 v_1_0_1_1 := v_1_0_1.Args[1]
1549 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 31 || y != v_1_0_1_1.Args[0] {
1550 continue
1551 }
1552 v_1_1 := v_1.Args[1]
1553 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
1554 continue
1555 }
1556 v_1_1_0 := v_1_1.Args[0]
1557 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
1558 continue
1559 }
1560 _ = v_1_1_0.Args[1]
1561 v_1_1_0_0 := v_1_1_0.Args[0]
1562 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 32 {
1563 continue
1564 }
1565 v_1_1_0_1 := v_1_1_0.Args[1]
1566 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
1567 continue
1568 }
1569 v.reset(OpARM64RORW)
1570 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
1571 v0.AddArg(y)
1572 v.AddArg2(x, v0)
1573 return true
1574 }
1575 break
1576 }
1577
1578
1579
1580 for {
1581 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1582 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 {
1583 continue
1584 }
1585 _ = v_0.Args[1]
1586 v_0_0 := v_0.Args[0]
1587 if v_0_0.Op != OpARM64MOVWUreg {
1588 continue
1589 }
1590 x := v_0_0.Args[0]
1591 v_0_1 := v_0.Args[1]
1592 if v_0_1.Op != OpARM64ANDconst {
1593 continue
1594 }
1595 t := v_0_1.Type
1596 if auxIntToInt64(v_0_1.AuxInt) != 31 {
1597 continue
1598 }
1599 y := v_0_1.Args[0]
1600 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
1601 continue
1602 }
1603 cc := auxIntToOp(v_1.AuxInt)
1604 _ = v_1.Args[1]
1605 v_1_0 := v_1.Args[0]
1606 if v_1_0.Op != OpARM64SLL {
1607 continue
1608 }
1609 _ = v_1_0.Args[1]
1610 if x != v_1_0.Args[0] {
1611 continue
1612 }
1613 v_1_0_1 := v_1_0.Args[1]
1614 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
1615 continue
1616 }
1617 _ = v_1_0_1.Args[1]
1618 v_1_0_1_0 := v_1_0_1.Args[0]
1619 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 32 {
1620 continue
1621 }
1622 v_1_0_1_1 := v_1_0_1.Args[1]
1623 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 31 || y != v_1_0_1_1.Args[0] {
1624 continue
1625 }
1626 v_1_1 := v_1.Args[1]
1627 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
1628 continue
1629 }
1630 v_1_1_0 := v_1_1.Args[0]
1631 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
1632 continue
1633 }
1634 _ = v_1_1_0.Args[1]
1635 v_1_1_0_0 := v_1_1_0.Args[0]
1636 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 32 {
1637 continue
1638 }
1639 v_1_1_0_1 := v_1_1_0.Args[1]
1640 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
1641 continue
1642 }
1643 v.reset(OpARM64RORW)
1644 v.AddArg2(x, y)
1645 return true
1646 }
1647 break
1648 }
1649 return false
1650 }
1651 func rewriteValueARM64_OpARM64ADDconst(v *Value) bool {
1652 v_0 := v.Args[0]
1653
1654
1655
1656 for {
1657 off1 := auxIntToInt64(v.AuxInt)
1658 if v_0.Op != OpARM64MOVDaddr {
1659 break
1660 }
1661 off2 := auxIntToInt32(v_0.AuxInt)
1662 sym := auxToSym(v_0.Aux)
1663 ptr := v_0.Args[0]
1664 if !(is32Bit(off1 + int64(off2))) {
1665 break
1666 }
1667 v.reset(OpARM64MOVDaddr)
1668 v.AuxInt = int32ToAuxInt(int32(off1) + off2)
1669 v.Aux = symToAux(sym)
1670 v.AddArg(ptr)
1671 return true
1672 }
1673
1674
1675 for {
1676 if auxIntToInt64(v.AuxInt) != 0 {
1677 break
1678 }
1679 x := v_0
1680 v.copyOf(x)
1681 return true
1682 }
1683
1684
1685 for {
1686 c := auxIntToInt64(v.AuxInt)
1687 if v_0.Op != OpARM64MOVDconst {
1688 break
1689 }
1690 d := auxIntToInt64(v_0.AuxInt)
1691 v.reset(OpARM64MOVDconst)
1692 v.AuxInt = int64ToAuxInt(c + d)
1693 return true
1694 }
1695
1696
1697 for {
1698 c := auxIntToInt64(v.AuxInt)
1699 if v_0.Op != OpARM64ADDconst {
1700 break
1701 }
1702 d := auxIntToInt64(v_0.AuxInt)
1703 x := v_0.Args[0]
1704 v.reset(OpARM64ADDconst)
1705 v.AuxInt = int64ToAuxInt(c + d)
1706 v.AddArg(x)
1707 return true
1708 }
1709
1710
1711 for {
1712 c := auxIntToInt64(v.AuxInt)
1713 if v_0.Op != OpARM64SUBconst {
1714 break
1715 }
1716 d := auxIntToInt64(v_0.AuxInt)
1717 x := v_0.Args[0]
1718 v.reset(OpARM64ADDconst)
1719 v.AuxInt = int64ToAuxInt(c - d)
1720 v.AddArg(x)
1721 return true
1722 }
1723 return false
1724 }
1725 func rewriteValueARM64_OpARM64ADDshiftLL(v *Value) bool {
1726 v_1 := v.Args[1]
1727 v_0 := v.Args[0]
1728 b := v.Block
1729 typ := &b.Func.Config.Types
1730
1731
1732 for {
1733 d := auxIntToInt64(v.AuxInt)
1734 if v_0.Op != OpARM64MOVDconst {
1735 break
1736 }
1737 c := auxIntToInt64(v_0.AuxInt)
1738 x := v_1
1739 v.reset(OpARM64ADDconst)
1740 v.AuxInt = int64ToAuxInt(c)
1741 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
1742 v0.AuxInt = int64ToAuxInt(d)
1743 v0.AddArg(x)
1744 v.AddArg(v0)
1745 return true
1746 }
1747
1748
1749 for {
1750 d := auxIntToInt64(v.AuxInt)
1751 x := v_0
1752 if v_1.Op != OpARM64MOVDconst {
1753 break
1754 }
1755 c := auxIntToInt64(v_1.AuxInt)
1756 v.reset(OpARM64ADDconst)
1757 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
1758 v.AddArg(x)
1759 return true
1760 }
1761
1762
1763 for {
1764 c := auxIntToInt64(v.AuxInt)
1765 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
1766 break
1767 }
1768 x := v_0.Args[0]
1769 if x != v_1 {
1770 break
1771 }
1772 v.reset(OpARM64RORconst)
1773 v.AuxInt = int64ToAuxInt(64 - c)
1774 v.AddArg(x)
1775 return true
1776 }
1777
1778
1779
1780 for {
1781 t := v.Type
1782 c := auxIntToInt64(v.AuxInt)
1783 if v_0.Op != OpARM64UBFX {
1784 break
1785 }
1786 bfc := auxIntToArm64BitField(v_0.AuxInt)
1787 x := v_0.Args[0]
1788 if x != v_1 || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
1789 break
1790 }
1791 v.reset(OpARM64RORWconst)
1792 v.AuxInt = int64ToAuxInt(32 - c)
1793 v.AddArg(x)
1794 return true
1795 }
1796
1797
1798 for {
1799 if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
1800 break
1801 }
1802 x := v_0.Args[0]
1803 if x != v_1 {
1804 break
1805 }
1806 v.reset(OpARM64REV16W)
1807 v.AddArg(x)
1808 return true
1809 }
1810
1811
1812
1813 for {
1814 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
1815 break
1816 }
1817 v_0_0 := v_0.Args[0]
1818 if v_0_0.Op != OpARM64ANDconst {
1819 break
1820 }
1821 c1 := auxIntToInt64(v_0_0.AuxInt)
1822 x := v_0_0.Args[0]
1823 if v_1.Op != OpARM64ANDconst {
1824 break
1825 }
1826 c2 := auxIntToInt64(v_1.AuxInt)
1827 if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
1828 break
1829 }
1830 v.reset(OpARM64REV16W)
1831 v.AddArg(x)
1832 return true
1833 }
1834
1835
1836
1837 for {
1838 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
1839 break
1840 }
1841 v_0_0 := v_0.Args[0]
1842 if v_0_0.Op != OpARM64ANDconst {
1843 break
1844 }
1845 c1 := auxIntToInt64(v_0_0.AuxInt)
1846 x := v_0_0.Args[0]
1847 if v_1.Op != OpARM64ANDconst {
1848 break
1849 }
1850 c2 := auxIntToInt64(v_1.AuxInt)
1851 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
1852 break
1853 }
1854 v.reset(OpARM64REV16)
1855 v.AddArg(x)
1856 return true
1857 }
1858
1859
1860
1861 for {
1862 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
1863 break
1864 }
1865 v_0_0 := v_0.Args[0]
1866 if v_0_0.Op != OpARM64ANDconst {
1867 break
1868 }
1869 c1 := auxIntToInt64(v_0_0.AuxInt)
1870 x := v_0_0.Args[0]
1871 if v_1.Op != OpARM64ANDconst {
1872 break
1873 }
1874 c2 := auxIntToInt64(v_1.AuxInt)
1875 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
1876 break
1877 }
1878 v.reset(OpARM64REV16)
1879 v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
1880 v0.AuxInt = int64ToAuxInt(0xffffffff)
1881 v0.AddArg(x)
1882 v.AddArg(v0)
1883 return true
1884 }
1885
1886
1887 for {
1888 c := auxIntToInt64(v.AuxInt)
1889 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
1890 break
1891 }
1892 x := v_0.Args[0]
1893 x2 := v_1
1894 v.reset(OpARM64EXTRconst)
1895 v.AuxInt = int64ToAuxInt(64 - c)
1896 v.AddArg2(x2, x)
1897 return true
1898 }
1899
1900
1901
1902 for {
1903 t := v.Type
1904 c := auxIntToInt64(v.AuxInt)
1905 if v_0.Op != OpARM64UBFX {
1906 break
1907 }
1908 bfc := auxIntToArm64BitField(v_0.AuxInt)
1909 x := v_0.Args[0]
1910 x2 := v_1
1911 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
1912 break
1913 }
1914 v.reset(OpARM64EXTRWconst)
1915 v.AuxInt = int64ToAuxInt(32 - c)
1916 v.AddArg2(x2, x)
1917 return true
1918 }
1919 return false
1920 }
1921 func rewriteValueARM64_OpARM64ADDshiftRA(v *Value) bool {
1922 v_1 := v.Args[1]
1923 v_0 := v.Args[0]
1924 b := v.Block
1925
1926
1927 for {
1928 d := auxIntToInt64(v.AuxInt)
1929 if v_0.Op != OpARM64MOVDconst {
1930 break
1931 }
1932 c := auxIntToInt64(v_0.AuxInt)
1933 x := v_1
1934 v.reset(OpARM64ADDconst)
1935 v.AuxInt = int64ToAuxInt(c)
1936 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
1937 v0.AuxInt = int64ToAuxInt(d)
1938 v0.AddArg(x)
1939 v.AddArg(v0)
1940 return true
1941 }
1942
1943
1944 for {
1945 d := auxIntToInt64(v.AuxInt)
1946 x := v_0
1947 if v_1.Op != OpARM64MOVDconst {
1948 break
1949 }
1950 c := auxIntToInt64(v_1.AuxInt)
1951 v.reset(OpARM64ADDconst)
1952 v.AuxInt = int64ToAuxInt(c >> uint64(d))
1953 v.AddArg(x)
1954 return true
1955 }
1956 return false
1957 }
1958 func rewriteValueARM64_OpARM64ADDshiftRL(v *Value) bool {
1959 v_1 := v.Args[1]
1960 v_0 := v.Args[0]
1961 b := v.Block
1962
1963
1964 for {
1965 d := auxIntToInt64(v.AuxInt)
1966 if v_0.Op != OpARM64MOVDconst {
1967 break
1968 }
1969 c := auxIntToInt64(v_0.AuxInt)
1970 x := v_1
1971 v.reset(OpARM64ADDconst)
1972 v.AuxInt = int64ToAuxInt(c)
1973 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
1974 v0.AuxInt = int64ToAuxInt(d)
1975 v0.AddArg(x)
1976 v.AddArg(v0)
1977 return true
1978 }
1979
1980
1981 for {
1982 d := auxIntToInt64(v.AuxInt)
1983 x := v_0
1984 if v_1.Op != OpARM64MOVDconst {
1985 break
1986 }
1987 c := auxIntToInt64(v_1.AuxInt)
1988 v.reset(OpARM64ADDconst)
1989 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
1990 v.AddArg(x)
1991 return true
1992 }
1993
1994
1995 for {
1996 c := auxIntToInt64(v.AuxInt)
1997 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
1998 break
1999 }
2000 x := v_0.Args[0]
2001 if x != v_1 {
2002 break
2003 }
2004 v.reset(OpARM64RORconst)
2005 v.AuxInt = int64ToAuxInt(c)
2006 v.AddArg(x)
2007 return true
2008 }
2009
2010
2011
2012 for {
2013 t := v.Type
2014 c := auxIntToInt64(v.AuxInt)
2015 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 32-c {
2016 break
2017 }
2018 x := v_0.Args[0]
2019 if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) {
2020 break
2021 }
2022 v.reset(OpARM64RORWconst)
2023 v.AuxInt = int64ToAuxInt(c)
2024 v.AddArg(x)
2025 return true
2026 }
2027 return false
2028 }
2029 func rewriteValueARM64_OpARM64AND(v *Value) bool {
2030 v_1 := v.Args[1]
2031 v_0 := v.Args[0]
2032
2033
2034 for {
2035 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2036 x := v_0
2037 if v_1.Op != OpARM64MOVDconst {
2038 continue
2039 }
2040 c := auxIntToInt64(v_1.AuxInt)
2041 v.reset(OpARM64ANDconst)
2042 v.AuxInt = int64ToAuxInt(c)
2043 v.AddArg(x)
2044 return true
2045 }
2046 break
2047 }
2048
2049
2050 for {
2051 x := v_0
2052 if x != v_1 {
2053 break
2054 }
2055 v.copyOf(x)
2056 return true
2057 }
2058
2059
2060 for {
2061 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2062 x := v_0
2063 if v_1.Op != OpARM64MVN {
2064 continue
2065 }
2066 y := v_1.Args[0]
2067 v.reset(OpARM64BIC)
2068 v.AddArg2(x, y)
2069 return true
2070 }
2071 break
2072 }
2073
2074
2075
2076 for {
2077 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2078 x0 := v_0
2079 x1 := v_1
2080 if x1.Op != OpARM64SLLconst {
2081 continue
2082 }
2083 c := auxIntToInt64(x1.AuxInt)
2084 y := x1.Args[0]
2085 if !(clobberIfDead(x1)) {
2086 continue
2087 }
2088 v.reset(OpARM64ANDshiftLL)
2089 v.AuxInt = int64ToAuxInt(c)
2090 v.AddArg2(x0, y)
2091 return true
2092 }
2093 break
2094 }
2095
2096
2097
2098 for {
2099 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2100 x0 := v_0
2101 x1 := v_1
2102 if x1.Op != OpARM64SRLconst {
2103 continue
2104 }
2105 c := auxIntToInt64(x1.AuxInt)
2106 y := x1.Args[0]
2107 if !(clobberIfDead(x1)) {
2108 continue
2109 }
2110 v.reset(OpARM64ANDshiftRL)
2111 v.AuxInt = int64ToAuxInt(c)
2112 v.AddArg2(x0, y)
2113 return true
2114 }
2115 break
2116 }
2117
2118
2119
2120 for {
2121 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2122 x0 := v_0
2123 x1 := v_1
2124 if x1.Op != OpARM64SRAconst {
2125 continue
2126 }
2127 c := auxIntToInt64(x1.AuxInt)
2128 y := x1.Args[0]
2129 if !(clobberIfDead(x1)) {
2130 continue
2131 }
2132 v.reset(OpARM64ANDshiftRA)
2133 v.AuxInt = int64ToAuxInt(c)
2134 v.AddArg2(x0, y)
2135 return true
2136 }
2137 break
2138 }
2139
2140
2141
2142 for {
2143 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2144 x0 := v_0
2145 x1 := v_1
2146 if x1.Op != OpARM64RORconst {
2147 continue
2148 }
2149 c := auxIntToInt64(x1.AuxInt)
2150 y := x1.Args[0]
2151 if !(clobberIfDead(x1)) {
2152 continue
2153 }
2154 v.reset(OpARM64ANDshiftRO)
2155 v.AuxInt = int64ToAuxInt(c)
2156 v.AddArg2(x0, y)
2157 return true
2158 }
2159 break
2160 }
2161 return false
2162 }
2163 func rewriteValueARM64_OpARM64ANDconst(v *Value) bool {
2164 v_0 := v.Args[0]
2165
2166
2167 for {
2168 if auxIntToInt64(v.AuxInt) != 0 {
2169 break
2170 }
2171 v.reset(OpARM64MOVDconst)
2172 v.AuxInt = int64ToAuxInt(0)
2173 return true
2174 }
2175
2176
2177 for {
2178 if auxIntToInt64(v.AuxInt) != -1 {
2179 break
2180 }
2181 x := v_0
2182 v.copyOf(x)
2183 return true
2184 }
2185
2186
2187 for {
2188 c := auxIntToInt64(v.AuxInt)
2189 if v_0.Op != OpARM64MOVDconst {
2190 break
2191 }
2192 d := auxIntToInt64(v_0.AuxInt)
2193 v.reset(OpARM64MOVDconst)
2194 v.AuxInt = int64ToAuxInt(c & d)
2195 return true
2196 }
2197
2198
2199 for {
2200 c := auxIntToInt64(v.AuxInt)
2201 if v_0.Op != OpARM64ANDconst {
2202 break
2203 }
2204 d := auxIntToInt64(v_0.AuxInt)
2205 x := v_0.Args[0]
2206 v.reset(OpARM64ANDconst)
2207 v.AuxInt = int64ToAuxInt(c & d)
2208 v.AddArg(x)
2209 return true
2210 }
2211
2212
2213 for {
2214 c := auxIntToInt64(v.AuxInt)
2215 if v_0.Op != OpARM64MOVWUreg {
2216 break
2217 }
2218 x := v_0.Args[0]
2219 v.reset(OpARM64ANDconst)
2220 v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
2221 v.AddArg(x)
2222 return true
2223 }
2224
2225
2226 for {
2227 c := auxIntToInt64(v.AuxInt)
2228 if v_0.Op != OpARM64MOVHUreg {
2229 break
2230 }
2231 x := v_0.Args[0]
2232 v.reset(OpARM64ANDconst)
2233 v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
2234 v.AddArg(x)
2235 return true
2236 }
2237
2238
2239 for {
2240 c := auxIntToInt64(v.AuxInt)
2241 if v_0.Op != OpARM64MOVBUreg {
2242 break
2243 }
2244 x := v_0.Args[0]
2245 v.reset(OpARM64ANDconst)
2246 v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
2247 v.AddArg(x)
2248 return true
2249 }
2250
2251
2252
2253 for {
2254 ac := auxIntToInt64(v.AuxInt)
2255 if v_0.Op != OpARM64SLLconst {
2256 break
2257 }
2258 sc := auxIntToInt64(v_0.AuxInt)
2259 x := v_0.Args[0]
2260 if !(isARM64BFMask(sc, ac, sc)) {
2261 break
2262 }
2263 v.reset(OpARM64UBFIZ)
2264 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
2265 v.AddArg(x)
2266 return true
2267 }
2268
2269
2270
2271 for {
2272 ac := auxIntToInt64(v.AuxInt)
2273 if v_0.Op != OpARM64SRLconst {
2274 break
2275 }
2276 sc := auxIntToInt64(v_0.AuxInt)
2277 x := v_0.Args[0]
2278 if !(isARM64BFMask(sc, ac, 0)) {
2279 break
2280 }
2281 v.reset(OpARM64UBFX)
2282 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
2283 v.AddArg(x)
2284 return true
2285 }
2286 return false
2287 }
2288 func rewriteValueARM64_OpARM64ANDshiftLL(v *Value) bool {
2289 v_1 := v.Args[1]
2290 v_0 := v.Args[0]
2291 b := v.Block
2292
2293
2294 for {
2295 d := auxIntToInt64(v.AuxInt)
2296 if v_0.Op != OpARM64MOVDconst {
2297 break
2298 }
2299 c := auxIntToInt64(v_0.AuxInt)
2300 x := v_1
2301 v.reset(OpARM64ANDconst)
2302 v.AuxInt = int64ToAuxInt(c)
2303 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
2304 v0.AuxInt = int64ToAuxInt(d)
2305 v0.AddArg(x)
2306 v.AddArg(v0)
2307 return true
2308 }
2309
2310
2311 for {
2312 d := auxIntToInt64(v.AuxInt)
2313 x := v_0
2314 if v_1.Op != OpARM64MOVDconst {
2315 break
2316 }
2317 c := auxIntToInt64(v_1.AuxInt)
2318 v.reset(OpARM64ANDconst)
2319 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
2320 v.AddArg(x)
2321 return true
2322 }
2323
2324
2325 for {
2326 c := auxIntToInt64(v.AuxInt)
2327 y := v_0
2328 if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
2329 break
2330 }
2331 x := y.Args[0]
2332 if x != v_1 {
2333 break
2334 }
2335 v.copyOf(y)
2336 return true
2337 }
2338 return false
2339 }
2340 func rewriteValueARM64_OpARM64ANDshiftRA(v *Value) bool {
2341 v_1 := v.Args[1]
2342 v_0 := v.Args[0]
2343 b := v.Block
2344
2345
2346 for {
2347 d := auxIntToInt64(v.AuxInt)
2348 if v_0.Op != OpARM64MOVDconst {
2349 break
2350 }
2351 c := auxIntToInt64(v_0.AuxInt)
2352 x := v_1
2353 v.reset(OpARM64ANDconst)
2354 v.AuxInt = int64ToAuxInt(c)
2355 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
2356 v0.AuxInt = int64ToAuxInt(d)
2357 v0.AddArg(x)
2358 v.AddArg(v0)
2359 return true
2360 }
2361
2362
2363 for {
2364 d := auxIntToInt64(v.AuxInt)
2365 x := v_0
2366 if v_1.Op != OpARM64MOVDconst {
2367 break
2368 }
2369 c := auxIntToInt64(v_1.AuxInt)
2370 v.reset(OpARM64ANDconst)
2371 v.AuxInt = int64ToAuxInt(c >> uint64(d))
2372 v.AddArg(x)
2373 return true
2374 }
2375
2376
2377 for {
2378 c := auxIntToInt64(v.AuxInt)
2379 y := v_0
2380 if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
2381 break
2382 }
2383 x := y.Args[0]
2384 if x != v_1 {
2385 break
2386 }
2387 v.copyOf(y)
2388 return true
2389 }
2390 return false
2391 }
2392 func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool {
2393 v_1 := v.Args[1]
2394 v_0 := v.Args[0]
2395 b := v.Block
2396
2397
2398 for {
2399 d := auxIntToInt64(v.AuxInt)
2400 if v_0.Op != OpARM64MOVDconst {
2401 break
2402 }
2403 c := auxIntToInt64(v_0.AuxInt)
2404 x := v_1
2405 v.reset(OpARM64ANDconst)
2406 v.AuxInt = int64ToAuxInt(c)
2407 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
2408 v0.AuxInt = int64ToAuxInt(d)
2409 v0.AddArg(x)
2410 v.AddArg(v0)
2411 return true
2412 }
2413
2414
2415 for {
2416 d := auxIntToInt64(v.AuxInt)
2417 x := v_0
2418 if v_1.Op != OpARM64MOVDconst {
2419 break
2420 }
2421 c := auxIntToInt64(v_1.AuxInt)
2422 v.reset(OpARM64ANDconst)
2423 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
2424 v.AddArg(x)
2425 return true
2426 }
2427
2428
2429 for {
2430 c := auxIntToInt64(v.AuxInt)
2431 y := v_0
2432 if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
2433 break
2434 }
2435 x := y.Args[0]
2436 if x != v_1 {
2437 break
2438 }
2439 v.copyOf(y)
2440 return true
2441 }
2442 return false
2443 }
2444 func rewriteValueARM64_OpARM64ANDshiftRO(v *Value) bool {
2445 v_1 := v.Args[1]
2446 v_0 := v.Args[0]
2447 b := v.Block
2448
2449
2450 for {
2451 d := auxIntToInt64(v.AuxInt)
2452 if v_0.Op != OpARM64MOVDconst {
2453 break
2454 }
2455 c := auxIntToInt64(v_0.AuxInt)
2456 x := v_1
2457 v.reset(OpARM64ANDconst)
2458 v.AuxInt = int64ToAuxInt(c)
2459 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
2460 v0.AuxInt = int64ToAuxInt(d)
2461 v0.AddArg(x)
2462 v.AddArg(v0)
2463 return true
2464 }
2465
2466
2467 for {
2468 d := auxIntToInt64(v.AuxInt)
2469 x := v_0
2470 if v_1.Op != OpARM64MOVDconst {
2471 break
2472 }
2473 c := auxIntToInt64(v_1.AuxInt)
2474 v.reset(OpARM64ANDconst)
2475 v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
2476 v.AddArg(x)
2477 return true
2478 }
2479
2480
2481 for {
2482 c := auxIntToInt64(v.AuxInt)
2483 y := v_0
2484 if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
2485 break
2486 }
2487 x := y.Args[0]
2488 if x != v_1 {
2489 break
2490 }
2491 v.copyOf(y)
2492 return true
2493 }
2494 return false
2495 }
2496 func rewriteValueARM64_OpARM64BIC(v *Value) bool {
2497 v_1 := v.Args[1]
2498 v_0 := v.Args[0]
2499
2500
2501 for {
2502 x := v_0
2503 if v_1.Op != OpARM64MOVDconst {
2504 break
2505 }
2506 c := auxIntToInt64(v_1.AuxInt)
2507 v.reset(OpARM64ANDconst)
2508 v.AuxInt = int64ToAuxInt(^c)
2509 v.AddArg(x)
2510 return true
2511 }
2512
2513
2514 for {
2515 x := v_0
2516 if x != v_1 {
2517 break
2518 }
2519 v.reset(OpARM64MOVDconst)
2520 v.AuxInt = int64ToAuxInt(0)
2521 return true
2522 }
2523
2524
2525
2526 for {
2527 x0 := v_0
2528 x1 := v_1
2529 if x1.Op != OpARM64SLLconst {
2530 break
2531 }
2532 c := auxIntToInt64(x1.AuxInt)
2533 y := x1.Args[0]
2534 if !(clobberIfDead(x1)) {
2535 break
2536 }
2537 v.reset(OpARM64BICshiftLL)
2538 v.AuxInt = int64ToAuxInt(c)
2539 v.AddArg2(x0, y)
2540 return true
2541 }
2542
2543
2544
2545 for {
2546 x0 := v_0
2547 x1 := v_1
2548 if x1.Op != OpARM64SRLconst {
2549 break
2550 }
2551 c := auxIntToInt64(x1.AuxInt)
2552 y := x1.Args[0]
2553 if !(clobberIfDead(x1)) {
2554 break
2555 }
2556 v.reset(OpARM64BICshiftRL)
2557 v.AuxInt = int64ToAuxInt(c)
2558 v.AddArg2(x0, y)
2559 return true
2560 }
2561
2562
2563
2564 for {
2565 x0 := v_0
2566 x1 := v_1
2567 if x1.Op != OpARM64SRAconst {
2568 break
2569 }
2570 c := auxIntToInt64(x1.AuxInt)
2571 y := x1.Args[0]
2572 if !(clobberIfDead(x1)) {
2573 break
2574 }
2575 v.reset(OpARM64BICshiftRA)
2576 v.AuxInt = int64ToAuxInt(c)
2577 v.AddArg2(x0, y)
2578 return true
2579 }
2580
2581
2582
2583 for {
2584 x0 := v_0
2585 x1 := v_1
2586 if x1.Op != OpARM64RORconst {
2587 break
2588 }
2589 c := auxIntToInt64(x1.AuxInt)
2590 y := x1.Args[0]
2591 if !(clobberIfDead(x1)) {
2592 break
2593 }
2594 v.reset(OpARM64BICshiftRO)
2595 v.AuxInt = int64ToAuxInt(c)
2596 v.AddArg2(x0, y)
2597 return true
2598 }
2599 return false
2600 }
2601 func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool {
2602 v_1 := v.Args[1]
2603 v_0 := v.Args[0]
2604
2605
2606 for {
2607 d := auxIntToInt64(v.AuxInt)
2608 x := v_0
2609 if v_1.Op != OpARM64MOVDconst {
2610 break
2611 }
2612 c := auxIntToInt64(v_1.AuxInt)
2613 v.reset(OpARM64ANDconst)
2614 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
2615 v.AddArg(x)
2616 return true
2617 }
2618
2619
2620 for {
2621 c := auxIntToInt64(v.AuxInt)
2622 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
2623 break
2624 }
2625 x := v_0.Args[0]
2626 if x != v_1 {
2627 break
2628 }
2629 v.reset(OpARM64MOVDconst)
2630 v.AuxInt = int64ToAuxInt(0)
2631 return true
2632 }
2633 return false
2634 }
2635 func rewriteValueARM64_OpARM64BICshiftRA(v *Value) bool {
2636 v_1 := v.Args[1]
2637 v_0 := v.Args[0]
2638
2639
2640 for {
2641 d := auxIntToInt64(v.AuxInt)
2642 x := v_0
2643 if v_1.Op != OpARM64MOVDconst {
2644 break
2645 }
2646 c := auxIntToInt64(v_1.AuxInt)
2647 v.reset(OpARM64ANDconst)
2648 v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
2649 v.AddArg(x)
2650 return true
2651 }
2652
2653
2654 for {
2655 c := auxIntToInt64(v.AuxInt)
2656 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
2657 break
2658 }
2659 x := v_0.Args[0]
2660 if x != v_1 {
2661 break
2662 }
2663 v.reset(OpARM64MOVDconst)
2664 v.AuxInt = int64ToAuxInt(0)
2665 return true
2666 }
2667 return false
2668 }
2669 func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool {
2670 v_1 := v.Args[1]
2671 v_0 := v.Args[0]
2672
2673
2674 for {
2675 d := auxIntToInt64(v.AuxInt)
2676 x := v_0
2677 if v_1.Op != OpARM64MOVDconst {
2678 break
2679 }
2680 c := auxIntToInt64(v_1.AuxInt)
2681 v.reset(OpARM64ANDconst)
2682 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
2683 v.AddArg(x)
2684 return true
2685 }
2686
2687
2688 for {
2689 c := auxIntToInt64(v.AuxInt)
2690 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
2691 break
2692 }
2693 x := v_0.Args[0]
2694 if x != v_1 {
2695 break
2696 }
2697 v.reset(OpARM64MOVDconst)
2698 v.AuxInt = int64ToAuxInt(0)
2699 return true
2700 }
2701 return false
2702 }
2703 func rewriteValueARM64_OpARM64BICshiftRO(v *Value) bool {
2704 v_1 := v.Args[1]
2705 v_0 := v.Args[0]
2706
2707
2708 for {
2709 d := auxIntToInt64(v.AuxInt)
2710 x := v_0
2711 if v_1.Op != OpARM64MOVDconst {
2712 break
2713 }
2714 c := auxIntToInt64(v_1.AuxInt)
2715 v.reset(OpARM64ANDconst)
2716 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
2717 v.AddArg(x)
2718 return true
2719 }
2720
2721
2722 for {
2723 c := auxIntToInt64(v.AuxInt)
2724 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
2725 break
2726 }
2727 x := v_0.Args[0]
2728 if x != v_1 {
2729 break
2730 }
2731 v.reset(OpARM64MOVDconst)
2732 v.AuxInt = int64ToAuxInt(0)
2733 return true
2734 }
2735 return false
2736 }
2737 func rewriteValueARM64_OpARM64CMN(v *Value) bool {
2738 v_1 := v.Args[1]
2739 v_0 := v.Args[0]
2740
2741
2742 for {
2743 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2744 x := v_0
2745 if v_1.Op != OpARM64MOVDconst {
2746 continue
2747 }
2748 c := auxIntToInt64(v_1.AuxInt)
2749 v.reset(OpARM64CMNconst)
2750 v.AuxInt = int64ToAuxInt(c)
2751 v.AddArg(x)
2752 return true
2753 }
2754 break
2755 }
2756
2757
2758
2759 for {
2760 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2761 x0 := v_0
2762 x1 := v_1
2763 if x1.Op != OpARM64SLLconst {
2764 continue
2765 }
2766 c := auxIntToInt64(x1.AuxInt)
2767 y := x1.Args[0]
2768 if !(clobberIfDead(x1)) {
2769 continue
2770 }
2771 v.reset(OpARM64CMNshiftLL)
2772 v.AuxInt = int64ToAuxInt(c)
2773 v.AddArg2(x0, y)
2774 return true
2775 }
2776 break
2777 }
2778
2779
2780
2781 for {
2782 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2783 x0 := v_0
2784 x1 := v_1
2785 if x1.Op != OpARM64SRLconst {
2786 continue
2787 }
2788 c := auxIntToInt64(x1.AuxInt)
2789 y := x1.Args[0]
2790 if !(clobberIfDead(x1)) {
2791 continue
2792 }
2793 v.reset(OpARM64CMNshiftRL)
2794 v.AuxInt = int64ToAuxInt(c)
2795 v.AddArg2(x0, y)
2796 return true
2797 }
2798 break
2799 }
2800
2801
2802
2803 for {
2804 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2805 x0 := v_0
2806 x1 := v_1
2807 if x1.Op != OpARM64SRAconst {
2808 continue
2809 }
2810 c := auxIntToInt64(x1.AuxInt)
2811 y := x1.Args[0]
2812 if !(clobberIfDead(x1)) {
2813 continue
2814 }
2815 v.reset(OpARM64CMNshiftRA)
2816 v.AuxInt = int64ToAuxInt(c)
2817 v.AddArg2(x0, y)
2818 return true
2819 }
2820 break
2821 }
2822 return false
2823 }
2824 func rewriteValueARM64_OpARM64CMNW(v *Value) bool {
2825 v_1 := v.Args[1]
2826 v_0 := v.Args[0]
2827
2828
2829 for {
2830 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2831 x := v_0
2832 if v_1.Op != OpARM64MOVDconst {
2833 continue
2834 }
2835 c := auxIntToInt64(v_1.AuxInt)
2836 v.reset(OpARM64CMNWconst)
2837 v.AuxInt = int32ToAuxInt(int32(c))
2838 v.AddArg(x)
2839 return true
2840 }
2841 break
2842 }
2843 return false
2844 }
2845 func rewriteValueARM64_OpARM64CMNWconst(v *Value) bool {
2846 v_0 := v.Args[0]
2847
2848
2849 for {
2850 y := auxIntToInt32(v.AuxInt)
2851 if v_0.Op != OpARM64MOVDconst {
2852 break
2853 }
2854 x := auxIntToInt64(v_0.AuxInt)
2855 v.reset(OpARM64FlagConstant)
2856 v.AuxInt = flagConstantToAuxInt(addFlags32(int32(x), y))
2857 return true
2858 }
2859 return false
2860 }
2861 func rewriteValueARM64_OpARM64CMNconst(v *Value) bool {
2862 v_0 := v.Args[0]
2863
2864
2865 for {
2866 y := auxIntToInt64(v.AuxInt)
2867 if v_0.Op != OpARM64MOVDconst {
2868 break
2869 }
2870 x := auxIntToInt64(v_0.AuxInt)
2871 v.reset(OpARM64FlagConstant)
2872 v.AuxInt = flagConstantToAuxInt(addFlags64(x, y))
2873 return true
2874 }
2875 return false
2876 }
2877 func rewriteValueARM64_OpARM64CMNshiftLL(v *Value) bool {
2878 v_1 := v.Args[1]
2879 v_0 := v.Args[0]
2880 b := v.Block
2881
2882
2883 for {
2884 d := auxIntToInt64(v.AuxInt)
2885 if v_0.Op != OpARM64MOVDconst {
2886 break
2887 }
2888 c := auxIntToInt64(v_0.AuxInt)
2889 x := v_1
2890 v.reset(OpARM64CMNconst)
2891 v.AuxInt = int64ToAuxInt(c)
2892 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
2893 v0.AuxInt = int64ToAuxInt(d)
2894 v0.AddArg(x)
2895 v.AddArg(v0)
2896 return true
2897 }
2898
2899
2900 for {
2901 d := auxIntToInt64(v.AuxInt)
2902 x := v_0
2903 if v_1.Op != OpARM64MOVDconst {
2904 break
2905 }
2906 c := auxIntToInt64(v_1.AuxInt)
2907 v.reset(OpARM64CMNconst)
2908 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
2909 v.AddArg(x)
2910 return true
2911 }
2912 return false
2913 }
2914 func rewriteValueARM64_OpARM64CMNshiftRA(v *Value) bool {
2915 v_1 := v.Args[1]
2916 v_0 := v.Args[0]
2917 b := v.Block
2918
2919
2920 for {
2921 d := auxIntToInt64(v.AuxInt)
2922 if v_0.Op != OpARM64MOVDconst {
2923 break
2924 }
2925 c := auxIntToInt64(v_0.AuxInt)
2926 x := v_1
2927 v.reset(OpARM64CMNconst)
2928 v.AuxInt = int64ToAuxInt(c)
2929 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
2930 v0.AuxInt = int64ToAuxInt(d)
2931 v0.AddArg(x)
2932 v.AddArg(v0)
2933 return true
2934 }
2935
2936
2937 for {
2938 d := auxIntToInt64(v.AuxInt)
2939 x := v_0
2940 if v_1.Op != OpARM64MOVDconst {
2941 break
2942 }
2943 c := auxIntToInt64(v_1.AuxInt)
2944 v.reset(OpARM64CMNconst)
2945 v.AuxInt = int64ToAuxInt(c >> uint64(d))
2946 v.AddArg(x)
2947 return true
2948 }
2949 return false
2950 }
2951 func rewriteValueARM64_OpARM64CMNshiftRL(v *Value) bool {
2952 v_1 := v.Args[1]
2953 v_0 := v.Args[0]
2954 b := v.Block
2955
2956
2957 for {
2958 d := auxIntToInt64(v.AuxInt)
2959 if v_0.Op != OpARM64MOVDconst {
2960 break
2961 }
2962 c := auxIntToInt64(v_0.AuxInt)
2963 x := v_1
2964 v.reset(OpARM64CMNconst)
2965 v.AuxInt = int64ToAuxInt(c)
2966 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
2967 v0.AuxInt = int64ToAuxInt(d)
2968 v0.AddArg(x)
2969 v.AddArg(v0)
2970 return true
2971 }
2972
2973
2974 for {
2975 d := auxIntToInt64(v.AuxInt)
2976 x := v_0
2977 if v_1.Op != OpARM64MOVDconst {
2978 break
2979 }
2980 c := auxIntToInt64(v_1.AuxInt)
2981 v.reset(OpARM64CMNconst)
2982 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
2983 v.AddArg(x)
2984 return true
2985 }
2986 return false
2987 }
2988 func rewriteValueARM64_OpARM64CMP(v *Value) bool {
2989 v_1 := v.Args[1]
2990 v_0 := v.Args[0]
2991 b := v.Block
2992
2993
2994 for {
2995 x := v_0
2996 if v_1.Op != OpARM64MOVDconst {
2997 break
2998 }
2999 c := auxIntToInt64(v_1.AuxInt)
3000 v.reset(OpARM64CMPconst)
3001 v.AuxInt = int64ToAuxInt(c)
3002 v.AddArg(x)
3003 return true
3004 }
3005
3006
3007 for {
3008 if v_0.Op != OpARM64MOVDconst {
3009 break
3010 }
3011 c := auxIntToInt64(v_0.AuxInt)
3012 x := v_1
3013 v.reset(OpARM64InvertFlags)
3014 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3015 v0.AuxInt = int64ToAuxInt(c)
3016 v0.AddArg(x)
3017 v.AddArg(v0)
3018 return true
3019 }
3020
3021
3022
3023 for {
3024 x := v_0
3025 y := v_1
3026 if !(canonLessThan(x, y)) {
3027 break
3028 }
3029 v.reset(OpARM64InvertFlags)
3030 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
3031 v0.AddArg2(y, x)
3032 v.AddArg(v0)
3033 return true
3034 }
3035
3036
3037
3038 for {
3039 x0 := v_0
3040 x1 := v_1
3041 if x1.Op != OpARM64SLLconst {
3042 break
3043 }
3044 c := auxIntToInt64(x1.AuxInt)
3045 y := x1.Args[0]
3046 if !(clobberIfDead(x1)) {
3047 break
3048 }
3049 v.reset(OpARM64CMPshiftLL)
3050 v.AuxInt = int64ToAuxInt(c)
3051 v.AddArg2(x0, y)
3052 return true
3053 }
3054
3055
3056
3057 for {
3058 x0 := v_0
3059 if x0.Op != OpARM64SLLconst {
3060 break
3061 }
3062 c := auxIntToInt64(x0.AuxInt)
3063 y := x0.Args[0]
3064 x1 := v_1
3065 if !(clobberIfDead(x0)) {
3066 break
3067 }
3068 v.reset(OpARM64InvertFlags)
3069 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
3070 v0.AuxInt = int64ToAuxInt(c)
3071 v0.AddArg2(x1, y)
3072 v.AddArg(v0)
3073 return true
3074 }
3075
3076
3077
3078 for {
3079 x0 := v_0
3080 x1 := v_1
3081 if x1.Op != OpARM64SRLconst {
3082 break
3083 }
3084 c := auxIntToInt64(x1.AuxInt)
3085 y := x1.Args[0]
3086 if !(clobberIfDead(x1)) {
3087 break
3088 }
3089 v.reset(OpARM64CMPshiftRL)
3090 v.AuxInt = int64ToAuxInt(c)
3091 v.AddArg2(x0, y)
3092 return true
3093 }
3094
3095
3096
3097 for {
3098 x0 := v_0
3099 if x0.Op != OpARM64SRLconst {
3100 break
3101 }
3102 c := auxIntToInt64(x0.AuxInt)
3103 y := x0.Args[0]
3104 x1 := v_1
3105 if !(clobberIfDead(x0)) {
3106 break
3107 }
3108 v.reset(OpARM64InvertFlags)
3109 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
3110 v0.AuxInt = int64ToAuxInt(c)
3111 v0.AddArg2(x1, y)
3112 v.AddArg(v0)
3113 return true
3114 }
3115
3116
3117
3118 for {
3119 x0 := v_0
3120 x1 := v_1
3121 if x1.Op != OpARM64SRAconst {
3122 break
3123 }
3124 c := auxIntToInt64(x1.AuxInt)
3125 y := x1.Args[0]
3126 if !(clobberIfDead(x1)) {
3127 break
3128 }
3129 v.reset(OpARM64CMPshiftRA)
3130 v.AuxInt = int64ToAuxInt(c)
3131 v.AddArg2(x0, y)
3132 return true
3133 }
3134
3135
3136
3137 for {
3138 x0 := v_0
3139 if x0.Op != OpARM64SRAconst {
3140 break
3141 }
3142 c := auxIntToInt64(x0.AuxInt)
3143 y := x0.Args[0]
3144 x1 := v_1
3145 if !(clobberIfDead(x0)) {
3146 break
3147 }
3148 v.reset(OpARM64InvertFlags)
3149 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
3150 v0.AuxInt = int64ToAuxInt(c)
3151 v0.AddArg2(x1, y)
3152 v.AddArg(v0)
3153 return true
3154 }
3155 return false
3156 }
3157 func rewriteValueARM64_OpARM64CMPW(v *Value) bool {
3158 v_1 := v.Args[1]
3159 v_0 := v.Args[0]
3160 b := v.Block
3161
3162
3163 for {
3164 x := v_0
3165 if v_1.Op != OpARM64MOVDconst {
3166 break
3167 }
3168 c := auxIntToInt64(v_1.AuxInt)
3169 v.reset(OpARM64CMPWconst)
3170 v.AuxInt = int32ToAuxInt(int32(c))
3171 v.AddArg(x)
3172 return true
3173 }
3174
3175
3176 for {
3177 if v_0.Op != OpARM64MOVDconst {
3178 break
3179 }
3180 c := auxIntToInt64(v_0.AuxInt)
3181 x := v_1
3182 v.reset(OpARM64InvertFlags)
3183 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
3184 v0.AuxInt = int32ToAuxInt(int32(c))
3185 v0.AddArg(x)
3186 v.AddArg(v0)
3187 return true
3188 }
3189
3190
3191
3192 for {
3193 x := v_0
3194 y := v_1
3195 if !(canonLessThan(x, y)) {
3196 break
3197 }
3198 v.reset(OpARM64InvertFlags)
3199 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
3200 v0.AddArg2(y, x)
3201 v.AddArg(v0)
3202 return true
3203 }
3204 return false
3205 }
3206 func rewriteValueARM64_OpARM64CMPWconst(v *Value) bool {
3207 v_0 := v.Args[0]
3208
3209
3210 for {
3211 y := auxIntToInt32(v.AuxInt)
3212 if v_0.Op != OpARM64MOVDconst {
3213 break
3214 }
3215 x := auxIntToInt64(v_0.AuxInt)
3216 v.reset(OpARM64FlagConstant)
3217 v.AuxInt = flagConstantToAuxInt(subFlags32(int32(x), y))
3218 return true
3219 }
3220
3221
3222
3223 for {
3224 c := auxIntToInt32(v.AuxInt)
3225 if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
3226 break
3227 }
3228 v.reset(OpARM64FlagConstant)
3229 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3230 return true
3231 }
3232
3233
3234
3235 for {
3236 c := auxIntToInt32(v.AuxInt)
3237 if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
3238 break
3239 }
3240 v.reset(OpARM64FlagConstant)
3241 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3242 return true
3243 }
3244 return false
3245 }
3246 func rewriteValueARM64_OpARM64CMPconst(v *Value) bool {
3247 v_0 := v.Args[0]
3248
3249
3250 for {
3251 y := auxIntToInt64(v.AuxInt)
3252 if v_0.Op != OpARM64MOVDconst {
3253 break
3254 }
3255 x := auxIntToInt64(v_0.AuxInt)
3256 v.reset(OpARM64FlagConstant)
3257 v.AuxInt = flagConstantToAuxInt(subFlags64(x, y))
3258 return true
3259 }
3260
3261
3262
3263 for {
3264 c := auxIntToInt64(v.AuxInt)
3265 if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
3266 break
3267 }
3268 v.reset(OpARM64FlagConstant)
3269 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3270 return true
3271 }
3272
3273
3274
3275 for {
3276 c := auxIntToInt64(v.AuxInt)
3277 if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
3278 break
3279 }
3280 v.reset(OpARM64FlagConstant)
3281 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3282 return true
3283 }
3284
3285
3286
3287 for {
3288 c := auxIntToInt64(v.AuxInt)
3289 if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) {
3290 break
3291 }
3292 v.reset(OpARM64FlagConstant)
3293 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3294 return true
3295 }
3296
3297
3298
3299 for {
3300 n := auxIntToInt64(v.AuxInt)
3301 if v_0.Op != OpARM64ANDconst {
3302 break
3303 }
3304 m := auxIntToInt64(v_0.AuxInt)
3305 if !(0 <= m && m < n) {
3306 break
3307 }
3308 v.reset(OpARM64FlagConstant)
3309 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3310 return true
3311 }
3312
3313
3314
3315 for {
3316 n := auxIntToInt64(v.AuxInt)
3317 if v_0.Op != OpARM64SRLconst {
3318 break
3319 }
3320 c := auxIntToInt64(v_0.AuxInt)
3321 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
3322 break
3323 }
3324 v.reset(OpARM64FlagConstant)
3325 v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
3326 return true
3327 }
3328 return false
3329 }
3330 func rewriteValueARM64_OpARM64CMPshiftLL(v *Value) bool {
3331 v_1 := v.Args[1]
3332 v_0 := v.Args[0]
3333 b := v.Block
3334
3335
3336 for {
3337 d := auxIntToInt64(v.AuxInt)
3338 if v_0.Op != OpARM64MOVDconst {
3339 break
3340 }
3341 c := auxIntToInt64(v_0.AuxInt)
3342 x := v_1
3343 v.reset(OpARM64InvertFlags)
3344 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3345 v0.AuxInt = int64ToAuxInt(c)
3346 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
3347 v1.AuxInt = int64ToAuxInt(d)
3348 v1.AddArg(x)
3349 v0.AddArg(v1)
3350 v.AddArg(v0)
3351 return true
3352 }
3353
3354
3355 for {
3356 d := auxIntToInt64(v.AuxInt)
3357 x := v_0
3358 if v_1.Op != OpARM64MOVDconst {
3359 break
3360 }
3361 c := auxIntToInt64(v_1.AuxInt)
3362 v.reset(OpARM64CMPconst)
3363 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
3364 v.AddArg(x)
3365 return true
3366 }
3367 return false
3368 }
3369 func rewriteValueARM64_OpARM64CMPshiftRA(v *Value) bool {
3370 v_1 := v.Args[1]
3371 v_0 := v.Args[0]
3372 b := v.Block
3373
3374
3375 for {
3376 d := auxIntToInt64(v.AuxInt)
3377 if v_0.Op != OpARM64MOVDconst {
3378 break
3379 }
3380 c := auxIntToInt64(v_0.AuxInt)
3381 x := v_1
3382 v.reset(OpARM64InvertFlags)
3383 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3384 v0.AuxInt = int64ToAuxInt(c)
3385 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
3386 v1.AuxInt = int64ToAuxInt(d)
3387 v1.AddArg(x)
3388 v0.AddArg(v1)
3389 v.AddArg(v0)
3390 return true
3391 }
3392
3393
3394 for {
3395 d := auxIntToInt64(v.AuxInt)
3396 x := v_0
3397 if v_1.Op != OpARM64MOVDconst {
3398 break
3399 }
3400 c := auxIntToInt64(v_1.AuxInt)
3401 v.reset(OpARM64CMPconst)
3402 v.AuxInt = int64ToAuxInt(c >> uint64(d))
3403 v.AddArg(x)
3404 return true
3405 }
3406 return false
3407 }
3408 func rewriteValueARM64_OpARM64CMPshiftRL(v *Value) bool {
3409 v_1 := v.Args[1]
3410 v_0 := v.Args[0]
3411 b := v.Block
3412
3413
3414 for {
3415 d := auxIntToInt64(v.AuxInt)
3416 if v_0.Op != OpARM64MOVDconst {
3417 break
3418 }
3419 c := auxIntToInt64(v_0.AuxInt)
3420 x := v_1
3421 v.reset(OpARM64InvertFlags)
3422 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3423 v0.AuxInt = int64ToAuxInt(c)
3424 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
3425 v1.AuxInt = int64ToAuxInt(d)
3426 v1.AddArg(x)
3427 v0.AddArg(v1)
3428 v.AddArg(v0)
3429 return true
3430 }
3431
3432
3433 for {
3434 d := auxIntToInt64(v.AuxInt)
3435 x := v_0
3436 if v_1.Op != OpARM64MOVDconst {
3437 break
3438 }
3439 c := auxIntToInt64(v_1.AuxInt)
3440 v.reset(OpARM64CMPconst)
3441 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
3442 v.AddArg(x)
3443 return true
3444 }
3445 return false
3446 }
3447 func rewriteValueARM64_OpARM64CSEL(v *Value) bool {
3448 v_2 := v.Args[2]
3449 v_1 := v.Args[1]
3450 v_0 := v.Args[0]
3451
3452
3453 for {
3454 cc := auxIntToOp(v.AuxInt)
3455 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != -1 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
3456 break
3457 }
3458 flag := v_2
3459 v.reset(OpARM64CSETM)
3460 v.AuxInt = opToAuxInt(cc)
3461 v.AddArg(flag)
3462 return true
3463 }
3464
3465
3466 for {
3467 cc := auxIntToOp(v.AuxInt)
3468 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
3469 break
3470 }
3471 flag := v_2
3472 v.reset(OpARM64CSETM)
3473 v.AuxInt = opToAuxInt(arm64Negate(cc))
3474 v.AddArg(flag)
3475 return true
3476 }
3477
3478
3479 for {
3480 cc := auxIntToOp(v.AuxInt)
3481 x := v_0
3482 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
3483 break
3484 }
3485 flag := v_2
3486 v.reset(OpARM64CSEL0)
3487 v.AuxInt = opToAuxInt(cc)
3488 v.AddArg2(x, flag)
3489 return true
3490 }
3491
3492
3493 for {
3494 cc := auxIntToOp(v.AuxInt)
3495 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
3496 break
3497 }
3498 y := v_1
3499 flag := v_2
3500 v.reset(OpARM64CSEL0)
3501 v.AuxInt = opToAuxInt(arm64Negate(cc))
3502 v.AddArg2(y, flag)
3503 return true
3504 }
3505
3506
3507 for {
3508 cc := auxIntToOp(v.AuxInt)
3509 x := v_0
3510 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
3511 break
3512 }
3513 a := v_1.Args[0]
3514 flag := v_2
3515 v.reset(OpARM64CSINC)
3516 v.AuxInt = opToAuxInt(cc)
3517 v.AddArg3(x, a, flag)
3518 return true
3519 }
3520
3521
3522 for {
3523 cc := auxIntToOp(v.AuxInt)
3524 if v_0.Op != OpARM64ADDconst || auxIntToInt64(v_0.AuxInt) != 1 {
3525 break
3526 }
3527 a := v_0.Args[0]
3528 x := v_1
3529 flag := v_2
3530 v.reset(OpARM64CSINC)
3531 v.AuxInt = opToAuxInt(arm64Negate(cc))
3532 v.AddArg3(x, a, flag)
3533 return true
3534 }
3535
3536
3537 for {
3538 cc := auxIntToOp(v.AuxInt)
3539 x := v_0
3540 if v_1.Op != OpARM64MVN {
3541 break
3542 }
3543 a := v_1.Args[0]
3544 flag := v_2
3545 v.reset(OpARM64CSINV)
3546 v.AuxInt = opToAuxInt(cc)
3547 v.AddArg3(x, a, flag)
3548 return true
3549 }
3550
3551
3552 for {
3553 cc := auxIntToOp(v.AuxInt)
3554 if v_0.Op != OpARM64MVN {
3555 break
3556 }
3557 a := v_0.Args[0]
3558 x := v_1
3559 flag := v_2
3560 v.reset(OpARM64CSINV)
3561 v.AuxInt = opToAuxInt(arm64Negate(cc))
3562 v.AddArg3(x, a, flag)
3563 return true
3564 }
3565
3566
3567 for {
3568 cc := auxIntToOp(v.AuxInt)
3569 x := v_0
3570 if v_1.Op != OpARM64NEG {
3571 break
3572 }
3573 a := v_1.Args[0]
3574 flag := v_2
3575 v.reset(OpARM64CSNEG)
3576 v.AuxInt = opToAuxInt(cc)
3577 v.AddArg3(x, a, flag)
3578 return true
3579 }
3580
3581
3582 for {
3583 cc := auxIntToOp(v.AuxInt)
3584 if v_0.Op != OpARM64NEG {
3585 break
3586 }
3587 a := v_0.Args[0]
3588 x := v_1
3589 flag := v_2
3590 v.reset(OpARM64CSNEG)
3591 v.AuxInt = opToAuxInt(arm64Negate(cc))
3592 v.AddArg3(x, a, flag)
3593 return true
3594 }
3595
3596
3597 for {
3598 cc := auxIntToOp(v.AuxInt)
3599 x := v_0
3600 y := v_1
3601 if v_2.Op != OpARM64InvertFlags {
3602 break
3603 }
3604 cmp := v_2.Args[0]
3605 v.reset(OpARM64CSEL)
3606 v.AuxInt = opToAuxInt(arm64Invert(cc))
3607 v.AddArg3(x, y, cmp)
3608 return true
3609 }
3610
3611
3612
3613 for {
3614 cc := auxIntToOp(v.AuxInt)
3615 x := v_0
3616 flag := v_2
3617 if !(ccARM64Eval(cc, flag) > 0) {
3618 break
3619 }
3620 v.copyOf(x)
3621 return true
3622 }
3623
3624
3625
3626 for {
3627 cc := auxIntToOp(v.AuxInt)
3628 y := v_1
3629 flag := v_2
3630 if !(ccARM64Eval(cc, flag) < 0) {
3631 break
3632 }
3633 v.copyOf(y)
3634 return true
3635 }
3636
3637
3638
3639 for {
3640 cc := auxIntToOp(v.AuxInt)
3641 x := v_0
3642 y := v_1
3643 if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
3644 break
3645 }
3646 boolval := v_2.Args[0]
3647 if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
3648 break
3649 }
3650 v.reset(OpARM64CSEL)
3651 v.AuxInt = opToAuxInt(boolval.Op)
3652 v.AddArg3(x, y, flagArg(boolval))
3653 return true
3654 }
3655
3656
3657
3658 for {
3659 cc := auxIntToOp(v.AuxInt)
3660 x := v_0
3661 y := v_1
3662 if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
3663 break
3664 }
3665 boolval := v_2.Args[0]
3666 if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
3667 break
3668 }
3669 v.reset(OpARM64CSEL)
3670 v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
3671 v.AddArg3(x, y, flagArg(boolval))
3672 return true
3673 }
3674 return false
3675 }
3676 func rewriteValueARM64_OpARM64CSEL0(v *Value) bool {
3677 v_1 := v.Args[1]
3678 v_0 := v.Args[0]
3679
3680
3681 for {
3682 cc := auxIntToOp(v.AuxInt)
3683 x := v_0
3684 if v_1.Op != OpARM64InvertFlags {
3685 break
3686 }
3687 cmp := v_1.Args[0]
3688 v.reset(OpARM64CSEL0)
3689 v.AuxInt = opToAuxInt(arm64Invert(cc))
3690 v.AddArg2(x, cmp)
3691 return true
3692 }
3693
3694
3695
3696 for {
3697 cc := auxIntToOp(v.AuxInt)
3698 x := v_0
3699 flag := v_1
3700 if !(ccARM64Eval(cc, flag) > 0) {
3701 break
3702 }
3703 v.copyOf(x)
3704 return true
3705 }
3706
3707
3708
3709 for {
3710 cc := auxIntToOp(v.AuxInt)
3711 flag := v_1
3712 if !(ccARM64Eval(cc, flag) < 0) {
3713 break
3714 }
3715 v.reset(OpARM64MOVDconst)
3716 v.AuxInt = int64ToAuxInt(0)
3717 return true
3718 }
3719
3720
3721
3722 for {
3723 cc := auxIntToOp(v.AuxInt)
3724 x := v_0
3725 if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
3726 break
3727 }
3728 boolval := v_1.Args[0]
3729 if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
3730 break
3731 }
3732 v.reset(OpARM64CSEL0)
3733 v.AuxInt = opToAuxInt(boolval.Op)
3734 v.AddArg2(x, flagArg(boolval))
3735 return true
3736 }
3737
3738
3739
3740 for {
3741 cc := auxIntToOp(v.AuxInt)
3742 x := v_0
3743 if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
3744 break
3745 }
3746 boolval := v_1.Args[0]
3747 if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
3748 break
3749 }
3750 v.reset(OpARM64CSEL0)
3751 v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
3752 v.AddArg2(x, flagArg(boolval))
3753 return true
3754 }
3755 return false
3756 }
3757 func rewriteValueARM64_OpARM64CSETM(v *Value) bool {
3758 v_0 := v.Args[0]
3759
3760
3761 for {
3762 cc := auxIntToOp(v.AuxInt)
3763 if v_0.Op != OpARM64InvertFlags {
3764 break
3765 }
3766 cmp := v_0.Args[0]
3767 v.reset(OpARM64CSETM)
3768 v.AuxInt = opToAuxInt(arm64Invert(cc))
3769 v.AddArg(cmp)
3770 return true
3771 }
3772
3773
3774
3775 for {
3776 cc := auxIntToOp(v.AuxInt)
3777 flag := v_0
3778 if !(ccARM64Eval(cc, flag) > 0) {
3779 break
3780 }
3781 v.reset(OpARM64MOVDconst)
3782 v.AuxInt = int64ToAuxInt(-1)
3783 return true
3784 }
3785
3786
3787
3788 for {
3789 cc := auxIntToOp(v.AuxInt)
3790 flag := v_0
3791 if !(ccARM64Eval(cc, flag) < 0) {
3792 break
3793 }
3794 v.reset(OpARM64MOVDconst)
3795 v.AuxInt = int64ToAuxInt(0)
3796 return true
3797 }
3798 return false
3799 }
3800 func rewriteValueARM64_OpARM64CSINC(v *Value) bool {
3801 v_2 := v.Args[2]
3802 v_1 := v.Args[1]
3803 v_0 := v.Args[0]
3804
3805
3806 for {
3807 cc := auxIntToOp(v.AuxInt)
3808 x := v_0
3809 y := v_1
3810 if v_2.Op != OpARM64InvertFlags {
3811 break
3812 }
3813 cmp := v_2.Args[0]
3814 v.reset(OpARM64CSINC)
3815 v.AuxInt = opToAuxInt(arm64Invert(cc))
3816 v.AddArg3(x, y, cmp)
3817 return true
3818 }
3819
3820
3821
3822 for {
3823 cc := auxIntToOp(v.AuxInt)
3824 x := v_0
3825 flag := v_2
3826 if !(ccARM64Eval(cc, flag) > 0) {
3827 break
3828 }
3829 v.copyOf(x)
3830 return true
3831 }
3832
3833
3834
3835 for {
3836 cc := auxIntToOp(v.AuxInt)
3837 y := v_1
3838 flag := v_2
3839 if !(ccARM64Eval(cc, flag) < 0) {
3840 break
3841 }
3842 v.reset(OpARM64ADDconst)
3843 v.AuxInt = int64ToAuxInt(1)
3844 v.AddArg(y)
3845 return true
3846 }
3847 return false
3848 }
3849 func rewriteValueARM64_OpARM64CSINV(v *Value) bool {
3850 v_2 := v.Args[2]
3851 v_1 := v.Args[1]
3852 v_0 := v.Args[0]
3853
3854
3855 for {
3856 cc := auxIntToOp(v.AuxInt)
3857 x := v_0
3858 y := v_1
3859 if v_2.Op != OpARM64InvertFlags {
3860 break
3861 }
3862 cmp := v_2.Args[0]
3863 v.reset(OpARM64CSINV)
3864 v.AuxInt = opToAuxInt(arm64Invert(cc))
3865 v.AddArg3(x, y, cmp)
3866 return true
3867 }
3868
3869
3870
3871 for {
3872 cc := auxIntToOp(v.AuxInt)
3873 x := v_0
3874 flag := v_2
3875 if !(ccARM64Eval(cc, flag) > 0) {
3876 break
3877 }
3878 v.copyOf(x)
3879 return true
3880 }
3881
3882
3883
3884 for {
3885 cc := auxIntToOp(v.AuxInt)
3886 y := v_1
3887 flag := v_2
3888 if !(ccARM64Eval(cc, flag) < 0) {
3889 break
3890 }
3891 v.reset(OpNot)
3892 v.AddArg(y)
3893 return true
3894 }
3895 return false
3896 }
3897 func rewriteValueARM64_OpARM64CSNEG(v *Value) bool {
3898 v_2 := v.Args[2]
3899 v_1 := v.Args[1]
3900 v_0 := v.Args[0]
3901
3902
3903 for {
3904 cc := auxIntToOp(v.AuxInt)
3905 x := v_0
3906 y := v_1
3907 if v_2.Op != OpARM64InvertFlags {
3908 break
3909 }
3910 cmp := v_2.Args[0]
3911 v.reset(OpARM64CSNEG)
3912 v.AuxInt = opToAuxInt(arm64Invert(cc))
3913 v.AddArg3(x, y, cmp)
3914 return true
3915 }
3916
3917
3918
3919 for {
3920 cc := auxIntToOp(v.AuxInt)
3921 x := v_0
3922 flag := v_2
3923 if !(ccARM64Eval(cc, flag) > 0) {
3924 break
3925 }
3926 v.copyOf(x)
3927 return true
3928 }
3929
3930
3931
3932 for {
3933 cc := auxIntToOp(v.AuxInt)
3934 y := v_1
3935 flag := v_2
3936 if !(ccARM64Eval(cc, flag) < 0) {
3937 break
3938 }
3939 v.reset(OpARM64NEG)
3940 v.AddArg(y)
3941 return true
3942 }
3943 return false
3944 }
3945 func rewriteValueARM64_OpARM64DIV(v *Value) bool {
3946 v_1 := v.Args[1]
3947 v_0 := v.Args[0]
3948
3949
3950
3951 for {
3952 if v_0.Op != OpARM64MOVDconst {
3953 break
3954 }
3955 c := auxIntToInt64(v_0.AuxInt)
3956 if v_1.Op != OpARM64MOVDconst {
3957 break
3958 }
3959 d := auxIntToInt64(v_1.AuxInt)
3960 if !(d != 0) {
3961 break
3962 }
3963 v.reset(OpARM64MOVDconst)
3964 v.AuxInt = int64ToAuxInt(c / d)
3965 return true
3966 }
3967 return false
3968 }
3969 func rewriteValueARM64_OpARM64DIVW(v *Value) bool {
3970 v_1 := v.Args[1]
3971 v_0 := v.Args[0]
3972
3973
3974
3975 for {
3976 if v_0.Op != OpARM64MOVDconst {
3977 break
3978 }
3979 c := auxIntToInt64(v_0.AuxInt)
3980 if v_1.Op != OpARM64MOVDconst {
3981 break
3982 }
3983 d := auxIntToInt64(v_1.AuxInt)
3984 if !(d != 0) {
3985 break
3986 }
3987 v.reset(OpARM64MOVDconst)
3988 v.AuxInt = int64ToAuxInt(int64(int32(c) / int32(d)))
3989 return true
3990 }
3991 return false
3992 }
3993 func rewriteValueARM64_OpARM64EON(v *Value) bool {
3994 v_1 := v.Args[1]
3995 v_0 := v.Args[0]
3996
3997
3998 for {
3999 x := v_0
4000 if v_1.Op != OpARM64MOVDconst {
4001 break
4002 }
4003 c := auxIntToInt64(v_1.AuxInt)
4004 v.reset(OpARM64XORconst)
4005 v.AuxInt = int64ToAuxInt(^c)
4006 v.AddArg(x)
4007 return true
4008 }
4009
4010
4011 for {
4012 x := v_0
4013 if x != v_1 {
4014 break
4015 }
4016 v.reset(OpARM64MOVDconst)
4017 v.AuxInt = int64ToAuxInt(-1)
4018 return true
4019 }
4020
4021
4022
4023 for {
4024 x0 := v_0
4025 x1 := v_1
4026 if x1.Op != OpARM64SLLconst {
4027 break
4028 }
4029 c := auxIntToInt64(x1.AuxInt)
4030 y := x1.Args[0]
4031 if !(clobberIfDead(x1)) {
4032 break
4033 }
4034 v.reset(OpARM64EONshiftLL)
4035 v.AuxInt = int64ToAuxInt(c)
4036 v.AddArg2(x0, y)
4037 return true
4038 }
4039
4040
4041
4042 for {
4043 x0 := v_0
4044 x1 := v_1
4045 if x1.Op != OpARM64SRLconst {
4046 break
4047 }
4048 c := auxIntToInt64(x1.AuxInt)
4049 y := x1.Args[0]
4050 if !(clobberIfDead(x1)) {
4051 break
4052 }
4053 v.reset(OpARM64EONshiftRL)
4054 v.AuxInt = int64ToAuxInt(c)
4055 v.AddArg2(x0, y)
4056 return true
4057 }
4058
4059
4060
4061 for {
4062 x0 := v_0
4063 x1 := v_1
4064 if x1.Op != OpARM64SRAconst {
4065 break
4066 }
4067 c := auxIntToInt64(x1.AuxInt)
4068 y := x1.Args[0]
4069 if !(clobberIfDead(x1)) {
4070 break
4071 }
4072 v.reset(OpARM64EONshiftRA)
4073 v.AuxInt = int64ToAuxInt(c)
4074 v.AddArg2(x0, y)
4075 return true
4076 }
4077
4078
4079
4080 for {
4081 x0 := v_0
4082 x1 := v_1
4083 if x1.Op != OpARM64RORconst {
4084 break
4085 }
4086 c := auxIntToInt64(x1.AuxInt)
4087 y := x1.Args[0]
4088 if !(clobberIfDead(x1)) {
4089 break
4090 }
4091 v.reset(OpARM64EONshiftRO)
4092 v.AuxInt = int64ToAuxInt(c)
4093 v.AddArg2(x0, y)
4094 return true
4095 }
4096 return false
4097 }
4098 func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool {
4099 v_1 := v.Args[1]
4100 v_0 := v.Args[0]
4101
4102
4103 for {
4104 d := auxIntToInt64(v.AuxInt)
4105 x := v_0
4106 if v_1.Op != OpARM64MOVDconst {
4107 break
4108 }
4109 c := auxIntToInt64(v_1.AuxInt)
4110 v.reset(OpARM64XORconst)
4111 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
4112 v.AddArg(x)
4113 return true
4114 }
4115
4116
4117 for {
4118 c := auxIntToInt64(v.AuxInt)
4119 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
4120 break
4121 }
4122 x := v_0.Args[0]
4123 if x != v_1 {
4124 break
4125 }
4126 v.reset(OpARM64MOVDconst)
4127 v.AuxInt = int64ToAuxInt(-1)
4128 return true
4129 }
4130 return false
4131 }
4132 func rewriteValueARM64_OpARM64EONshiftRA(v *Value) bool {
4133 v_1 := v.Args[1]
4134 v_0 := v.Args[0]
4135
4136
4137 for {
4138 d := auxIntToInt64(v.AuxInt)
4139 x := v_0
4140 if v_1.Op != OpARM64MOVDconst {
4141 break
4142 }
4143 c := auxIntToInt64(v_1.AuxInt)
4144 v.reset(OpARM64XORconst)
4145 v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
4146 v.AddArg(x)
4147 return true
4148 }
4149
4150
4151 for {
4152 c := auxIntToInt64(v.AuxInt)
4153 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
4154 break
4155 }
4156 x := v_0.Args[0]
4157 if x != v_1 {
4158 break
4159 }
4160 v.reset(OpARM64MOVDconst)
4161 v.AuxInt = int64ToAuxInt(-1)
4162 return true
4163 }
4164 return false
4165 }
4166 func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool {
4167 v_1 := v.Args[1]
4168 v_0 := v.Args[0]
4169
4170
4171 for {
4172 d := auxIntToInt64(v.AuxInt)
4173 x := v_0
4174 if v_1.Op != OpARM64MOVDconst {
4175 break
4176 }
4177 c := auxIntToInt64(v_1.AuxInt)
4178 v.reset(OpARM64XORconst)
4179 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
4180 v.AddArg(x)
4181 return true
4182 }
4183
4184
4185 for {
4186 c := auxIntToInt64(v.AuxInt)
4187 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
4188 break
4189 }
4190 x := v_0.Args[0]
4191 if x != v_1 {
4192 break
4193 }
4194 v.reset(OpARM64MOVDconst)
4195 v.AuxInt = int64ToAuxInt(-1)
4196 return true
4197 }
4198 return false
4199 }
4200 func rewriteValueARM64_OpARM64EONshiftRO(v *Value) bool {
4201 v_1 := v.Args[1]
4202 v_0 := v.Args[0]
4203
4204
4205 for {
4206 d := auxIntToInt64(v.AuxInt)
4207 x := v_0
4208 if v_1.Op != OpARM64MOVDconst {
4209 break
4210 }
4211 c := auxIntToInt64(v_1.AuxInt)
4212 v.reset(OpARM64XORconst)
4213 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
4214 v.AddArg(x)
4215 return true
4216 }
4217
4218
4219 for {
4220 c := auxIntToInt64(v.AuxInt)
4221 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
4222 break
4223 }
4224 x := v_0.Args[0]
4225 if x != v_1 {
4226 break
4227 }
4228 v.reset(OpARM64MOVDconst)
4229 v.AuxInt = int64ToAuxInt(-1)
4230 return true
4231 }
4232 return false
4233 }
4234 func rewriteValueARM64_OpARM64Equal(v *Value) bool {
4235 v_0 := v.Args[0]
4236
4237
4238 for {
4239 if v_0.Op != OpARM64FlagConstant {
4240 break
4241 }
4242 fc := auxIntToFlagConstant(v_0.AuxInt)
4243 v.reset(OpARM64MOVDconst)
4244 v.AuxInt = int64ToAuxInt(b2i(fc.eq()))
4245 return true
4246 }
4247
4248
4249 for {
4250 if v_0.Op != OpARM64InvertFlags {
4251 break
4252 }
4253 x := v_0.Args[0]
4254 v.reset(OpARM64Equal)
4255 v.AddArg(x)
4256 return true
4257 }
4258 return false
4259 }
4260 func rewriteValueARM64_OpARM64FADDD(v *Value) bool {
4261 v_1 := v.Args[1]
4262 v_0 := v.Args[0]
4263
4264
4265 for {
4266 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4267 a := v_0
4268 if v_1.Op != OpARM64FMULD {
4269 continue
4270 }
4271 y := v_1.Args[1]
4272 x := v_1.Args[0]
4273 v.reset(OpARM64FMADDD)
4274 v.AddArg3(a, x, y)
4275 return true
4276 }
4277 break
4278 }
4279
4280
4281 for {
4282 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4283 a := v_0
4284 if v_1.Op != OpARM64FNMULD {
4285 continue
4286 }
4287 y := v_1.Args[1]
4288 x := v_1.Args[0]
4289 v.reset(OpARM64FMSUBD)
4290 v.AddArg3(a, x, y)
4291 return true
4292 }
4293 break
4294 }
4295 return false
4296 }
4297 func rewriteValueARM64_OpARM64FADDS(v *Value) bool {
4298 v_1 := v.Args[1]
4299 v_0 := v.Args[0]
4300
4301
4302 for {
4303 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4304 a := v_0
4305 if v_1.Op != OpARM64FMULS {
4306 continue
4307 }
4308 y := v_1.Args[1]
4309 x := v_1.Args[0]
4310 v.reset(OpARM64FMADDS)
4311 v.AddArg3(a, x, y)
4312 return true
4313 }
4314 break
4315 }
4316
4317
4318 for {
4319 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4320 a := v_0
4321 if v_1.Op != OpARM64FNMULS {
4322 continue
4323 }
4324 y := v_1.Args[1]
4325 x := v_1.Args[0]
4326 v.reset(OpARM64FMSUBS)
4327 v.AddArg3(a, x, y)
4328 return true
4329 }
4330 break
4331 }
4332 return false
4333 }
4334 func rewriteValueARM64_OpARM64FCMPD(v *Value) bool {
4335 v_1 := v.Args[1]
4336 v_0 := v.Args[0]
4337 b := v.Block
4338
4339
4340 for {
4341 x := v_0
4342 if v_1.Op != OpARM64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != 0 {
4343 break
4344 }
4345 v.reset(OpARM64FCMPD0)
4346 v.AddArg(x)
4347 return true
4348 }
4349
4350
4351 for {
4352 if v_0.Op != OpARM64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != 0 {
4353 break
4354 }
4355 x := v_1
4356 v.reset(OpARM64InvertFlags)
4357 v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags)
4358 v0.AddArg(x)
4359 v.AddArg(v0)
4360 return true
4361 }
4362 return false
4363 }
4364 func rewriteValueARM64_OpARM64FCMPS(v *Value) bool {
4365 v_1 := v.Args[1]
4366 v_0 := v.Args[0]
4367 b := v.Block
4368
4369
4370 for {
4371 x := v_0
4372 if v_1.Op != OpARM64FMOVSconst || auxIntToFloat64(v_1.AuxInt) != 0 {
4373 break
4374 }
4375 v.reset(OpARM64FCMPS0)
4376 v.AddArg(x)
4377 return true
4378 }
4379
4380
4381 for {
4382 if v_0.Op != OpARM64FMOVSconst || auxIntToFloat64(v_0.AuxInt) != 0 {
4383 break
4384 }
4385 x := v_1
4386 v.reset(OpARM64InvertFlags)
4387 v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags)
4388 v0.AddArg(x)
4389 v.AddArg(v0)
4390 return true
4391 }
4392 return false
4393 }
4394 func rewriteValueARM64_OpARM64FMOVDfpgp(v *Value) bool {
4395 v_0 := v.Args[0]
4396 b := v.Block
4397
4398
4399 for {
4400 t := v.Type
4401 if v_0.Op != OpArg {
4402 break
4403 }
4404 off := auxIntToInt32(v_0.AuxInt)
4405 sym := auxToSym(v_0.Aux)
4406 b = b.Func.Entry
4407 v0 := b.NewValue0(v.Pos, OpArg, t)
4408 v.copyOf(v0)
4409 v0.AuxInt = int32ToAuxInt(off)
4410 v0.Aux = symToAux(sym)
4411 return true
4412 }
4413 return false
4414 }
4415 func rewriteValueARM64_OpARM64FMOVDgpfp(v *Value) bool {
4416 v_0 := v.Args[0]
4417 b := v.Block
4418
4419
4420 for {
4421 t := v.Type
4422 if v_0.Op != OpArg {
4423 break
4424 }
4425 off := auxIntToInt32(v_0.AuxInt)
4426 sym := auxToSym(v_0.Aux)
4427 b = b.Func.Entry
4428 v0 := b.NewValue0(v.Pos, OpArg, t)
4429 v.copyOf(v0)
4430 v0.AuxInt = int32ToAuxInt(off)
4431 v0.Aux = symToAux(sym)
4432 return true
4433 }
4434 return false
4435 }
4436 func rewriteValueARM64_OpARM64FMOVDload(v *Value) bool {
4437 v_1 := v.Args[1]
4438 v_0 := v.Args[0]
4439 b := v.Block
4440 config := b.Func.Config
4441
4442
4443 for {
4444 off := auxIntToInt32(v.AuxInt)
4445 sym := auxToSym(v.Aux)
4446 ptr := v_0
4447 if v_1.Op != OpARM64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4448 break
4449 }
4450 val := v_1.Args[1]
4451 if ptr != v_1.Args[0] {
4452 break
4453 }
4454 v.reset(OpARM64FMOVDgpfp)
4455 v.AddArg(val)
4456 return true
4457 }
4458
4459
4460
4461 for {
4462 off1 := auxIntToInt32(v.AuxInt)
4463 sym := auxToSym(v.Aux)
4464 if v_0.Op != OpARM64ADDconst {
4465 break
4466 }
4467 off2 := auxIntToInt64(v_0.AuxInt)
4468 ptr := v_0.Args[0]
4469 mem := v_1
4470 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4471 break
4472 }
4473 v.reset(OpARM64FMOVDload)
4474 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4475 v.Aux = symToAux(sym)
4476 v.AddArg2(ptr, mem)
4477 return true
4478 }
4479
4480
4481
4482 for {
4483 off := auxIntToInt32(v.AuxInt)
4484 sym := auxToSym(v.Aux)
4485 if v_0.Op != OpARM64ADD {
4486 break
4487 }
4488 idx := v_0.Args[1]
4489 ptr := v_0.Args[0]
4490 mem := v_1
4491 if !(off == 0 && sym == nil) {
4492 break
4493 }
4494 v.reset(OpARM64FMOVDloadidx)
4495 v.AddArg3(ptr, idx, mem)
4496 return true
4497 }
4498
4499
4500
4501 for {
4502 off := auxIntToInt32(v.AuxInt)
4503 sym := auxToSym(v.Aux)
4504 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
4505 break
4506 }
4507 idx := v_0.Args[1]
4508 ptr := v_0.Args[0]
4509 mem := v_1
4510 if !(off == 0 && sym == nil) {
4511 break
4512 }
4513 v.reset(OpARM64FMOVDloadidx8)
4514 v.AddArg3(ptr, idx, mem)
4515 return true
4516 }
4517
4518
4519
4520 for {
4521 off1 := auxIntToInt32(v.AuxInt)
4522 sym1 := auxToSym(v.Aux)
4523 if v_0.Op != OpARM64MOVDaddr {
4524 break
4525 }
4526 off2 := auxIntToInt32(v_0.AuxInt)
4527 sym2 := auxToSym(v_0.Aux)
4528 ptr := v_0.Args[0]
4529 mem := v_1
4530 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4531 break
4532 }
4533 v.reset(OpARM64FMOVDload)
4534 v.AuxInt = int32ToAuxInt(off1 + off2)
4535 v.Aux = symToAux(mergeSym(sym1, sym2))
4536 v.AddArg2(ptr, mem)
4537 return true
4538 }
4539 return false
4540 }
4541 func rewriteValueARM64_OpARM64FMOVDloadidx(v *Value) bool {
4542 v_2 := v.Args[2]
4543 v_1 := v.Args[1]
4544 v_0 := v.Args[0]
4545
4546
4547
4548 for {
4549 ptr := v_0
4550 if v_1.Op != OpARM64MOVDconst {
4551 break
4552 }
4553 c := auxIntToInt64(v_1.AuxInt)
4554 mem := v_2
4555 if !(is32Bit(c)) {
4556 break
4557 }
4558 v.reset(OpARM64FMOVDload)
4559 v.AuxInt = int32ToAuxInt(int32(c))
4560 v.AddArg2(ptr, mem)
4561 return true
4562 }
4563
4564
4565
4566 for {
4567 if v_0.Op != OpARM64MOVDconst {
4568 break
4569 }
4570 c := auxIntToInt64(v_0.AuxInt)
4571 ptr := v_1
4572 mem := v_2
4573 if !(is32Bit(c)) {
4574 break
4575 }
4576 v.reset(OpARM64FMOVDload)
4577 v.AuxInt = int32ToAuxInt(int32(c))
4578 v.AddArg2(ptr, mem)
4579 return true
4580 }
4581
4582
4583 for {
4584 ptr := v_0
4585 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
4586 break
4587 }
4588 idx := v_1.Args[0]
4589 mem := v_2
4590 v.reset(OpARM64FMOVDloadidx8)
4591 v.AddArg3(ptr, idx, mem)
4592 return true
4593 }
4594
4595
4596 for {
4597 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
4598 break
4599 }
4600 idx := v_0.Args[0]
4601 ptr := v_1
4602 mem := v_2
4603 v.reset(OpARM64FMOVDloadidx8)
4604 v.AddArg3(ptr, idx, mem)
4605 return true
4606 }
4607 return false
4608 }
4609 func rewriteValueARM64_OpARM64FMOVDloadidx8(v *Value) bool {
4610 v_2 := v.Args[2]
4611 v_1 := v.Args[1]
4612 v_0 := v.Args[0]
4613
4614
4615
4616 for {
4617 ptr := v_0
4618 if v_1.Op != OpARM64MOVDconst {
4619 break
4620 }
4621 c := auxIntToInt64(v_1.AuxInt)
4622 mem := v_2
4623 if !(is32Bit(c << 3)) {
4624 break
4625 }
4626 v.reset(OpARM64FMOVDload)
4627 v.AuxInt = int32ToAuxInt(int32(c) << 3)
4628 v.AddArg2(ptr, mem)
4629 return true
4630 }
4631 return false
4632 }
4633 func rewriteValueARM64_OpARM64FMOVDstore(v *Value) bool {
4634 v_2 := v.Args[2]
4635 v_1 := v.Args[1]
4636 v_0 := v.Args[0]
4637 b := v.Block
4638 config := b.Func.Config
4639
4640
4641 for {
4642 off := auxIntToInt32(v.AuxInt)
4643 sym := auxToSym(v.Aux)
4644 ptr := v_0
4645 if v_1.Op != OpARM64FMOVDgpfp {
4646 break
4647 }
4648 val := v_1.Args[0]
4649 mem := v_2
4650 v.reset(OpARM64MOVDstore)
4651 v.AuxInt = int32ToAuxInt(off)
4652 v.Aux = symToAux(sym)
4653 v.AddArg3(ptr, val, mem)
4654 return true
4655 }
4656
4657
4658
4659 for {
4660 off1 := auxIntToInt32(v.AuxInt)
4661 sym := auxToSym(v.Aux)
4662 if v_0.Op != OpARM64ADDconst {
4663 break
4664 }
4665 off2 := auxIntToInt64(v_0.AuxInt)
4666 ptr := v_0.Args[0]
4667 val := v_1
4668 mem := v_2
4669 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4670 break
4671 }
4672 v.reset(OpARM64FMOVDstore)
4673 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4674 v.Aux = symToAux(sym)
4675 v.AddArg3(ptr, val, mem)
4676 return true
4677 }
4678
4679
4680
4681 for {
4682 off := auxIntToInt32(v.AuxInt)
4683 sym := auxToSym(v.Aux)
4684 if v_0.Op != OpARM64ADD {
4685 break
4686 }
4687 idx := v_0.Args[1]
4688 ptr := v_0.Args[0]
4689 val := v_1
4690 mem := v_2
4691 if !(off == 0 && sym == nil) {
4692 break
4693 }
4694 v.reset(OpARM64FMOVDstoreidx)
4695 v.AddArg4(ptr, idx, val, mem)
4696 return true
4697 }
4698
4699
4700
4701 for {
4702 off := auxIntToInt32(v.AuxInt)
4703 sym := auxToSym(v.Aux)
4704 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
4705 break
4706 }
4707 idx := v_0.Args[1]
4708 ptr := v_0.Args[0]
4709 val := v_1
4710 mem := v_2
4711 if !(off == 0 && sym == nil) {
4712 break
4713 }
4714 v.reset(OpARM64FMOVDstoreidx8)
4715 v.AddArg4(ptr, idx, val, mem)
4716 return true
4717 }
4718
4719
4720
4721 for {
4722 off1 := auxIntToInt32(v.AuxInt)
4723 sym1 := auxToSym(v.Aux)
4724 if v_0.Op != OpARM64MOVDaddr {
4725 break
4726 }
4727 off2 := auxIntToInt32(v_0.AuxInt)
4728 sym2 := auxToSym(v_0.Aux)
4729 ptr := v_0.Args[0]
4730 val := v_1
4731 mem := v_2
4732 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4733 break
4734 }
4735 v.reset(OpARM64FMOVDstore)
4736 v.AuxInt = int32ToAuxInt(off1 + off2)
4737 v.Aux = symToAux(mergeSym(sym1, sym2))
4738 v.AddArg3(ptr, val, mem)
4739 return true
4740 }
4741 return false
4742 }
4743 func rewriteValueARM64_OpARM64FMOVDstoreidx(v *Value) bool {
4744 v_3 := v.Args[3]
4745 v_2 := v.Args[2]
4746 v_1 := v.Args[1]
4747 v_0 := v.Args[0]
4748
4749
4750
4751 for {
4752 ptr := v_0
4753 if v_1.Op != OpARM64MOVDconst {
4754 break
4755 }
4756 c := auxIntToInt64(v_1.AuxInt)
4757 val := v_2
4758 mem := v_3
4759 if !(is32Bit(c)) {
4760 break
4761 }
4762 v.reset(OpARM64FMOVDstore)
4763 v.AuxInt = int32ToAuxInt(int32(c))
4764 v.AddArg3(ptr, val, mem)
4765 return true
4766 }
4767
4768
4769
4770 for {
4771 if v_0.Op != OpARM64MOVDconst {
4772 break
4773 }
4774 c := auxIntToInt64(v_0.AuxInt)
4775 idx := v_1
4776 val := v_2
4777 mem := v_3
4778 if !(is32Bit(c)) {
4779 break
4780 }
4781 v.reset(OpARM64FMOVDstore)
4782 v.AuxInt = int32ToAuxInt(int32(c))
4783 v.AddArg3(idx, val, mem)
4784 return true
4785 }
4786
4787
4788 for {
4789 ptr := v_0
4790 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
4791 break
4792 }
4793 idx := v_1.Args[0]
4794 val := v_2
4795 mem := v_3
4796 v.reset(OpARM64FMOVDstoreidx8)
4797 v.AddArg4(ptr, idx, val, mem)
4798 return true
4799 }
4800
4801
4802 for {
4803 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
4804 break
4805 }
4806 idx := v_0.Args[0]
4807 ptr := v_1
4808 val := v_2
4809 mem := v_3
4810 v.reset(OpARM64FMOVDstoreidx8)
4811 v.AddArg4(ptr, idx, val, mem)
4812 return true
4813 }
4814 return false
4815 }
4816 func rewriteValueARM64_OpARM64FMOVDstoreidx8(v *Value) bool {
4817 v_3 := v.Args[3]
4818 v_2 := v.Args[2]
4819 v_1 := v.Args[1]
4820 v_0 := v.Args[0]
4821
4822
4823
4824 for {
4825 ptr := v_0
4826 if v_1.Op != OpARM64MOVDconst {
4827 break
4828 }
4829 c := auxIntToInt64(v_1.AuxInt)
4830 val := v_2
4831 mem := v_3
4832 if !(is32Bit(c << 3)) {
4833 break
4834 }
4835 v.reset(OpARM64FMOVDstore)
4836 v.AuxInt = int32ToAuxInt(int32(c) << 3)
4837 v.AddArg3(ptr, val, mem)
4838 return true
4839 }
4840 return false
4841 }
4842 func rewriteValueARM64_OpARM64FMOVSload(v *Value) bool {
4843 v_1 := v.Args[1]
4844 v_0 := v.Args[0]
4845 b := v.Block
4846 config := b.Func.Config
4847
4848
4849 for {
4850 off := auxIntToInt32(v.AuxInt)
4851 sym := auxToSym(v.Aux)
4852 ptr := v_0
4853 if v_1.Op != OpARM64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4854 break
4855 }
4856 val := v_1.Args[1]
4857 if ptr != v_1.Args[0] {
4858 break
4859 }
4860 v.reset(OpARM64FMOVSgpfp)
4861 v.AddArg(val)
4862 return true
4863 }
4864
4865
4866
4867 for {
4868 off1 := auxIntToInt32(v.AuxInt)
4869 sym := auxToSym(v.Aux)
4870 if v_0.Op != OpARM64ADDconst {
4871 break
4872 }
4873 off2 := auxIntToInt64(v_0.AuxInt)
4874 ptr := v_0.Args[0]
4875 mem := v_1
4876 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4877 break
4878 }
4879 v.reset(OpARM64FMOVSload)
4880 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4881 v.Aux = symToAux(sym)
4882 v.AddArg2(ptr, mem)
4883 return true
4884 }
4885
4886
4887
4888 for {
4889 off := auxIntToInt32(v.AuxInt)
4890 sym := auxToSym(v.Aux)
4891 if v_0.Op != OpARM64ADD {
4892 break
4893 }
4894 idx := v_0.Args[1]
4895 ptr := v_0.Args[0]
4896 mem := v_1
4897 if !(off == 0 && sym == nil) {
4898 break
4899 }
4900 v.reset(OpARM64FMOVSloadidx)
4901 v.AddArg3(ptr, idx, mem)
4902 return true
4903 }
4904
4905
4906
4907 for {
4908 off := auxIntToInt32(v.AuxInt)
4909 sym := auxToSym(v.Aux)
4910 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
4911 break
4912 }
4913 idx := v_0.Args[1]
4914 ptr := v_0.Args[0]
4915 mem := v_1
4916 if !(off == 0 && sym == nil) {
4917 break
4918 }
4919 v.reset(OpARM64FMOVSloadidx4)
4920 v.AddArg3(ptr, idx, mem)
4921 return true
4922 }
4923
4924
4925
4926 for {
4927 off1 := auxIntToInt32(v.AuxInt)
4928 sym1 := auxToSym(v.Aux)
4929 if v_0.Op != OpARM64MOVDaddr {
4930 break
4931 }
4932 off2 := auxIntToInt32(v_0.AuxInt)
4933 sym2 := auxToSym(v_0.Aux)
4934 ptr := v_0.Args[0]
4935 mem := v_1
4936 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4937 break
4938 }
4939 v.reset(OpARM64FMOVSload)
4940 v.AuxInt = int32ToAuxInt(off1 + off2)
4941 v.Aux = symToAux(mergeSym(sym1, sym2))
4942 v.AddArg2(ptr, mem)
4943 return true
4944 }
4945 return false
4946 }
4947 func rewriteValueARM64_OpARM64FMOVSloadidx(v *Value) bool {
4948 v_2 := v.Args[2]
4949 v_1 := v.Args[1]
4950 v_0 := v.Args[0]
4951
4952
4953
4954 for {
4955 ptr := v_0
4956 if v_1.Op != OpARM64MOVDconst {
4957 break
4958 }
4959 c := auxIntToInt64(v_1.AuxInt)
4960 mem := v_2
4961 if !(is32Bit(c)) {
4962 break
4963 }
4964 v.reset(OpARM64FMOVSload)
4965 v.AuxInt = int32ToAuxInt(int32(c))
4966 v.AddArg2(ptr, mem)
4967 return true
4968 }
4969
4970
4971
4972 for {
4973 if v_0.Op != OpARM64MOVDconst {
4974 break
4975 }
4976 c := auxIntToInt64(v_0.AuxInt)
4977 ptr := v_1
4978 mem := v_2
4979 if !(is32Bit(c)) {
4980 break
4981 }
4982 v.reset(OpARM64FMOVSload)
4983 v.AuxInt = int32ToAuxInt(int32(c))
4984 v.AddArg2(ptr, mem)
4985 return true
4986 }
4987
4988
4989 for {
4990 ptr := v_0
4991 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
4992 break
4993 }
4994 idx := v_1.Args[0]
4995 mem := v_2
4996 v.reset(OpARM64FMOVSloadidx4)
4997 v.AddArg3(ptr, idx, mem)
4998 return true
4999 }
5000
5001
5002 for {
5003 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
5004 break
5005 }
5006 idx := v_0.Args[0]
5007 ptr := v_1
5008 mem := v_2
5009 v.reset(OpARM64FMOVSloadidx4)
5010 v.AddArg3(ptr, idx, mem)
5011 return true
5012 }
5013 return false
5014 }
5015 func rewriteValueARM64_OpARM64FMOVSloadidx4(v *Value) bool {
5016 v_2 := v.Args[2]
5017 v_1 := v.Args[1]
5018 v_0 := v.Args[0]
5019
5020
5021
5022 for {
5023 ptr := v_0
5024 if v_1.Op != OpARM64MOVDconst {
5025 break
5026 }
5027 c := auxIntToInt64(v_1.AuxInt)
5028 mem := v_2
5029 if !(is32Bit(c << 2)) {
5030 break
5031 }
5032 v.reset(OpARM64FMOVSload)
5033 v.AuxInt = int32ToAuxInt(int32(c) << 2)
5034 v.AddArg2(ptr, mem)
5035 return true
5036 }
5037 return false
5038 }
5039 func rewriteValueARM64_OpARM64FMOVSstore(v *Value) bool {
5040 v_2 := v.Args[2]
5041 v_1 := v.Args[1]
5042 v_0 := v.Args[0]
5043 b := v.Block
5044 config := b.Func.Config
5045
5046
5047 for {
5048 off := auxIntToInt32(v.AuxInt)
5049 sym := auxToSym(v.Aux)
5050 ptr := v_0
5051 if v_1.Op != OpARM64FMOVSgpfp {
5052 break
5053 }
5054 val := v_1.Args[0]
5055 mem := v_2
5056 v.reset(OpARM64MOVWstore)
5057 v.AuxInt = int32ToAuxInt(off)
5058 v.Aux = symToAux(sym)
5059 v.AddArg3(ptr, val, mem)
5060 return true
5061 }
5062
5063
5064
5065 for {
5066 off1 := auxIntToInt32(v.AuxInt)
5067 sym := auxToSym(v.Aux)
5068 if v_0.Op != OpARM64ADDconst {
5069 break
5070 }
5071 off2 := auxIntToInt64(v_0.AuxInt)
5072 ptr := v_0.Args[0]
5073 val := v_1
5074 mem := v_2
5075 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5076 break
5077 }
5078 v.reset(OpARM64FMOVSstore)
5079 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5080 v.Aux = symToAux(sym)
5081 v.AddArg3(ptr, val, mem)
5082 return true
5083 }
5084
5085
5086
5087 for {
5088 off := auxIntToInt32(v.AuxInt)
5089 sym := auxToSym(v.Aux)
5090 if v_0.Op != OpARM64ADD {
5091 break
5092 }
5093 idx := v_0.Args[1]
5094 ptr := v_0.Args[0]
5095 val := v_1
5096 mem := v_2
5097 if !(off == 0 && sym == nil) {
5098 break
5099 }
5100 v.reset(OpARM64FMOVSstoreidx)
5101 v.AddArg4(ptr, idx, val, mem)
5102 return true
5103 }
5104
5105
5106
5107 for {
5108 off := auxIntToInt32(v.AuxInt)
5109 sym := auxToSym(v.Aux)
5110 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
5111 break
5112 }
5113 idx := v_0.Args[1]
5114 ptr := v_0.Args[0]
5115 val := v_1
5116 mem := v_2
5117 if !(off == 0 && sym == nil) {
5118 break
5119 }
5120 v.reset(OpARM64FMOVSstoreidx4)
5121 v.AddArg4(ptr, idx, val, mem)
5122 return true
5123 }
5124
5125
5126
5127 for {
5128 off1 := auxIntToInt32(v.AuxInt)
5129 sym1 := auxToSym(v.Aux)
5130 if v_0.Op != OpARM64MOVDaddr {
5131 break
5132 }
5133 off2 := auxIntToInt32(v_0.AuxInt)
5134 sym2 := auxToSym(v_0.Aux)
5135 ptr := v_0.Args[0]
5136 val := v_1
5137 mem := v_2
5138 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5139 break
5140 }
5141 v.reset(OpARM64FMOVSstore)
5142 v.AuxInt = int32ToAuxInt(off1 + off2)
5143 v.Aux = symToAux(mergeSym(sym1, sym2))
5144 v.AddArg3(ptr, val, mem)
5145 return true
5146 }
5147 return false
5148 }
5149 func rewriteValueARM64_OpARM64FMOVSstoreidx(v *Value) bool {
5150 v_3 := v.Args[3]
5151 v_2 := v.Args[2]
5152 v_1 := v.Args[1]
5153 v_0 := v.Args[0]
5154
5155
5156
5157 for {
5158 ptr := v_0
5159 if v_1.Op != OpARM64MOVDconst {
5160 break
5161 }
5162 c := auxIntToInt64(v_1.AuxInt)
5163 val := v_2
5164 mem := v_3
5165 if !(is32Bit(c)) {
5166 break
5167 }
5168 v.reset(OpARM64FMOVSstore)
5169 v.AuxInt = int32ToAuxInt(int32(c))
5170 v.AddArg3(ptr, val, mem)
5171 return true
5172 }
5173
5174
5175
5176 for {
5177 if v_0.Op != OpARM64MOVDconst {
5178 break
5179 }
5180 c := auxIntToInt64(v_0.AuxInt)
5181 idx := v_1
5182 val := v_2
5183 mem := v_3
5184 if !(is32Bit(c)) {
5185 break
5186 }
5187 v.reset(OpARM64FMOVSstore)
5188 v.AuxInt = int32ToAuxInt(int32(c))
5189 v.AddArg3(idx, val, mem)
5190 return true
5191 }
5192
5193
5194 for {
5195 ptr := v_0
5196 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
5197 break
5198 }
5199 idx := v_1.Args[0]
5200 val := v_2
5201 mem := v_3
5202 v.reset(OpARM64FMOVSstoreidx4)
5203 v.AddArg4(ptr, idx, val, mem)
5204 return true
5205 }
5206
5207
5208 for {
5209 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
5210 break
5211 }
5212 idx := v_0.Args[0]
5213 ptr := v_1
5214 val := v_2
5215 mem := v_3
5216 v.reset(OpARM64FMOVSstoreidx4)
5217 v.AddArg4(ptr, idx, val, mem)
5218 return true
5219 }
5220 return false
5221 }
5222 func rewriteValueARM64_OpARM64FMOVSstoreidx4(v *Value) bool {
5223 v_3 := v.Args[3]
5224 v_2 := v.Args[2]
5225 v_1 := v.Args[1]
5226 v_0 := v.Args[0]
5227
5228
5229
5230 for {
5231 ptr := v_0
5232 if v_1.Op != OpARM64MOVDconst {
5233 break
5234 }
5235 c := auxIntToInt64(v_1.AuxInt)
5236 val := v_2
5237 mem := v_3
5238 if !(is32Bit(c << 2)) {
5239 break
5240 }
5241 v.reset(OpARM64FMOVSstore)
5242 v.AuxInt = int32ToAuxInt(int32(c) << 2)
5243 v.AddArg3(ptr, val, mem)
5244 return true
5245 }
5246 return false
5247 }
5248 func rewriteValueARM64_OpARM64FMULD(v *Value) bool {
5249 v_1 := v.Args[1]
5250 v_0 := v.Args[0]
5251
5252
5253 for {
5254 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5255 if v_0.Op != OpARM64FNEGD {
5256 continue
5257 }
5258 x := v_0.Args[0]
5259 y := v_1
5260 v.reset(OpARM64FNMULD)
5261 v.AddArg2(x, y)
5262 return true
5263 }
5264 break
5265 }
5266 return false
5267 }
5268 func rewriteValueARM64_OpARM64FMULS(v *Value) bool {
5269 v_1 := v.Args[1]
5270 v_0 := v.Args[0]
5271
5272
5273 for {
5274 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5275 if v_0.Op != OpARM64FNEGS {
5276 continue
5277 }
5278 x := v_0.Args[0]
5279 y := v_1
5280 v.reset(OpARM64FNMULS)
5281 v.AddArg2(x, y)
5282 return true
5283 }
5284 break
5285 }
5286 return false
5287 }
5288 func rewriteValueARM64_OpARM64FNEGD(v *Value) bool {
5289 v_0 := v.Args[0]
5290
5291
5292 for {
5293 if v_0.Op != OpARM64FMULD {
5294 break
5295 }
5296 y := v_0.Args[1]
5297 x := v_0.Args[0]
5298 v.reset(OpARM64FNMULD)
5299 v.AddArg2(x, y)
5300 return true
5301 }
5302
5303
5304 for {
5305 if v_0.Op != OpARM64FNMULD {
5306 break
5307 }
5308 y := v_0.Args[1]
5309 x := v_0.Args[0]
5310 v.reset(OpARM64FMULD)
5311 v.AddArg2(x, y)
5312 return true
5313 }
5314 return false
5315 }
5316 func rewriteValueARM64_OpARM64FNEGS(v *Value) bool {
5317 v_0 := v.Args[0]
5318
5319
5320 for {
5321 if v_0.Op != OpARM64FMULS {
5322 break
5323 }
5324 y := v_0.Args[1]
5325 x := v_0.Args[0]
5326 v.reset(OpARM64FNMULS)
5327 v.AddArg2(x, y)
5328 return true
5329 }
5330
5331
5332 for {
5333 if v_0.Op != OpARM64FNMULS {
5334 break
5335 }
5336 y := v_0.Args[1]
5337 x := v_0.Args[0]
5338 v.reset(OpARM64FMULS)
5339 v.AddArg2(x, y)
5340 return true
5341 }
5342 return false
5343 }
5344 func rewriteValueARM64_OpARM64FNMULD(v *Value) bool {
5345 v_1 := v.Args[1]
5346 v_0 := v.Args[0]
5347
5348
5349 for {
5350 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5351 if v_0.Op != OpARM64FNEGD {
5352 continue
5353 }
5354 x := v_0.Args[0]
5355 y := v_1
5356 v.reset(OpARM64FMULD)
5357 v.AddArg2(x, y)
5358 return true
5359 }
5360 break
5361 }
5362 return false
5363 }
5364 func rewriteValueARM64_OpARM64FNMULS(v *Value) bool {
5365 v_1 := v.Args[1]
5366 v_0 := v.Args[0]
5367
5368
5369 for {
5370 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5371 if v_0.Op != OpARM64FNEGS {
5372 continue
5373 }
5374 x := v_0.Args[0]
5375 y := v_1
5376 v.reset(OpARM64FMULS)
5377 v.AddArg2(x, y)
5378 return true
5379 }
5380 break
5381 }
5382 return false
5383 }
5384 func rewriteValueARM64_OpARM64FSUBD(v *Value) bool {
5385 v_1 := v.Args[1]
5386 v_0 := v.Args[0]
5387
5388
5389 for {
5390 a := v_0
5391 if v_1.Op != OpARM64FMULD {
5392 break
5393 }
5394 y := v_1.Args[1]
5395 x := v_1.Args[0]
5396 v.reset(OpARM64FMSUBD)
5397 v.AddArg3(a, x, y)
5398 return true
5399 }
5400
5401
5402 for {
5403 if v_0.Op != OpARM64FMULD {
5404 break
5405 }
5406 y := v_0.Args[1]
5407 x := v_0.Args[0]
5408 a := v_1
5409 v.reset(OpARM64FNMSUBD)
5410 v.AddArg3(a, x, y)
5411 return true
5412 }
5413
5414
5415 for {
5416 a := v_0
5417 if v_1.Op != OpARM64FNMULD {
5418 break
5419 }
5420 y := v_1.Args[1]
5421 x := v_1.Args[0]
5422 v.reset(OpARM64FMADDD)
5423 v.AddArg3(a, x, y)
5424 return true
5425 }
5426
5427
5428 for {
5429 if v_0.Op != OpARM64FNMULD {
5430 break
5431 }
5432 y := v_0.Args[1]
5433 x := v_0.Args[0]
5434 a := v_1
5435 v.reset(OpARM64FNMADDD)
5436 v.AddArg3(a, x, y)
5437 return true
5438 }
5439 return false
5440 }
5441 func rewriteValueARM64_OpARM64FSUBS(v *Value) bool {
5442 v_1 := v.Args[1]
5443 v_0 := v.Args[0]
5444
5445
5446 for {
5447 a := v_0
5448 if v_1.Op != OpARM64FMULS {
5449 break
5450 }
5451 y := v_1.Args[1]
5452 x := v_1.Args[0]
5453 v.reset(OpARM64FMSUBS)
5454 v.AddArg3(a, x, y)
5455 return true
5456 }
5457
5458
5459 for {
5460 if v_0.Op != OpARM64FMULS {
5461 break
5462 }
5463 y := v_0.Args[1]
5464 x := v_0.Args[0]
5465 a := v_1
5466 v.reset(OpARM64FNMSUBS)
5467 v.AddArg3(a, x, y)
5468 return true
5469 }
5470
5471
5472 for {
5473 a := v_0
5474 if v_1.Op != OpARM64FNMULS {
5475 break
5476 }
5477 y := v_1.Args[1]
5478 x := v_1.Args[0]
5479 v.reset(OpARM64FMADDS)
5480 v.AddArg3(a, x, y)
5481 return true
5482 }
5483
5484
5485 for {
5486 if v_0.Op != OpARM64FNMULS {
5487 break
5488 }
5489 y := v_0.Args[1]
5490 x := v_0.Args[0]
5491 a := v_1
5492 v.reset(OpARM64FNMADDS)
5493 v.AddArg3(a, x, y)
5494 return true
5495 }
5496 return false
5497 }
5498 func rewriteValueARM64_OpARM64GreaterEqual(v *Value) bool {
5499 v_0 := v.Args[0]
5500
5501
5502 for {
5503 if v_0.Op != OpARM64FlagConstant {
5504 break
5505 }
5506 fc := auxIntToFlagConstant(v_0.AuxInt)
5507 v.reset(OpARM64MOVDconst)
5508 v.AuxInt = int64ToAuxInt(b2i(fc.ge()))
5509 return true
5510 }
5511
5512
5513 for {
5514 if v_0.Op != OpARM64InvertFlags {
5515 break
5516 }
5517 x := v_0.Args[0]
5518 v.reset(OpARM64LessEqual)
5519 v.AddArg(x)
5520 return true
5521 }
5522 return false
5523 }
5524 func rewriteValueARM64_OpARM64GreaterEqualF(v *Value) bool {
5525 v_0 := v.Args[0]
5526
5527
5528 for {
5529 if v_0.Op != OpARM64InvertFlags {
5530 break
5531 }
5532 x := v_0.Args[0]
5533 v.reset(OpARM64LessEqualF)
5534 v.AddArg(x)
5535 return true
5536 }
5537 return false
5538 }
5539 func rewriteValueARM64_OpARM64GreaterEqualU(v *Value) bool {
5540 v_0 := v.Args[0]
5541
5542
5543 for {
5544 if v_0.Op != OpARM64FlagConstant {
5545 break
5546 }
5547 fc := auxIntToFlagConstant(v_0.AuxInt)
5548 v.reset(OpARM64MOVDconst)
5549 v.AuxInt = int64ToAuxInt(b2i(fc.uge()))
5550 return true
5551 }
5552
5553
5554 for {
5555 if v_0.Op != OpARM64InvertFlags {
5556 break
5557 }
5558 x := v_0.Args[0]
5559 v.reset(OpARM64LessEqualU)
5560 v.AddArg(x)
5561 return true
5562 }
5563 return false
5564 }
5565 func rewriteValueARM64_OpARM64GreaterThan(v *Value) bool {
5566 v_0 := v.Args[0]
5567
5568
5569 for {
5570 if v_0.Op != OpARM64FlagConstant {
5571 break
5572 }
5573 fc := auxIntToFlagConstant(v_0.AuxInt)
5574 v.reset(OpARM64MOVDconst)
5575 v.AuxInt = int64ToAuxInt(b2i(fc.gt()))
5576 return true
5577 }
5578
5579
5580 for {
5581 if v_0.Op != OpARM64InvertFlags {
5582 break
5583 }
5584 x := v_0.Args[0]
5585 v.reset(OpARM64LessThan)
5586 v.AddArg(x)
5587 return true
5588 }
5589 return false
5590 }
5591 func rewriteValueARM64_OpARM64GreaterThanF(v *Value) bool {
5592 v_0 := v.Args[0]
5593
5594
5595 for {
5596 if v_0.Op != OpARM64InvertFlags {
5597 break
5598 }
5599 x := v_0.Args[0]
5600 v.reset(OpARM64LessThanF)
5601 v.AddArg(x)
5602 return true
5603 }
5604 return false
5605 }
5606 func rewriteValueARM64_OpARM64GreaterThanU(v *Value) bool {
5607 v_0 := v.Args[0]
5608
5609
5610 for {
5611 if v_0.Op != OpARM64FlagConstant {
5612 break
5613 }
5614 fc := auxIntToFlagConstant(v_0.AuxInt)
5615 v.reset(OpARM64MOVDconst)
5616 v.AuxInt = int64ToAuxInt(b2i(fc.ugt()))
5617 return true
5618 }
5619
5620
5621 for {
5622 if v_0.Op != OpARM64InvertFlags {
5623 break
5624 }
5625 x := v_0.Args[0]
5626 v.reset(OpARM64LessThanU)
5627 v.AddArg(x)
5628 return true
5629 }
5630 return false
5631 }
5632 func rewriteValueARM64_OpARM64LessEqual(v *Value) bool {
5633 v_0 := v.Args[0]
5634
5635
5636 for {
5637 if v_0.Op != OpARM64FlagConstant {
5638 break
5639 }
5640 fc := auxIntToFlagConstant(v_0.AuxInt)
5641 v.reset(OpARM64MOVDconst)
5642 v.AuxInt = int64ToAuxInt(b2i(fc.le()))
5643 return true
5644 }
5645
5646
5647 for {
5648 if v_0.Op != OpARM64InvertFlags {
5649 break
5650 }
5651 x := v_0.Args[0]
5652 v.reset(OpARM64GreaterEqual)
5653 v.AddArg(x)
5654 return true
5655 }
5656 return false
5657 }
5658 func rewriteValueARM64_OpARM64LessEqualF(v *Value) bool {
5659 v_0 := v.Args[0]
5660
5661
5662 for {
5663 if v_0.Op != OpARM64InvertFlags {
5664 break
5665 }
5666 x := v_0.Args[0]
5667 v.reset(OpARM64GreaterEqualF)
5668 v.AddArg(x)
5669 return true
5670 }
5671 return false
5672 }
5673 func rewriteValueARM64_OpARM64LessEqualU(v *Value) bool {
5674 v_0 := v.Args[0]
5675
5676
5677 for {
5678 if v_0.Op != OpARM64FlagConstant {
5679 break
5680 }
5681 fc := auxIntToFlagConstant(v_0.AuxInt)
5682 v.reset(OpARM64MOVDconst)
5683 v.AuxInt = int64ToAuxInt(b2i(fc.ule()))
5684 return true
5685 }
5686
5687
5688 for {
5689 if v_0.Op != OpARM64InvertFlags {
5690 break
5691 }
5692 x := v_0.Args[0]
5693 v.reset(OpARM64GreaterEqualU)
5694 v.AddArg(x)
5695 return true
5696 }
5697 return false
5698 }
5699 func rewriteValueARM64_OpARM64LessThan(v *Value) bool {
5700 v_0 := v.Args[0]
5701
5702
5703 for {
5704 if v_0.Op != OpARM64FlagConstant {
5705 break
5706 }
5707 fc := auxIntToFlagConstant(v_0.AuxInt)
5708 v.reset(OpARM64MOVDconst)
5709 v.AuxInt = int64ToAuxInt(b2i(fc.lt()))
5710 return true
5711 }
5712
5713
5714 for {
5715 if v_0.Op != OpARM64InvertFlags {
5716 break
5717 }
5718 x := v_0.Args[0]
5719 v.reset(OpARM64GreaterThan)
5720 v.AddArg(x)
5721 return true
5722 }
5723 return false
5724 }
5725 func rewriteValueARM64_OpARM64LessThanF(v *Value) bool {
5726 v_0 := v.Args[0]
5727
5728
5729 for {
5730 if v_0.Op != OpARM64InvertFlags {
5731 break
5732 }
5733 x := v_0.Args[0]
5734 v.reset(OpARM64GreaterThanF)
5735 v.AddArg(x)
5736 return true
5737 }
5738 return false
5739 }
5740 func rewriteValueARM64_OpARM64LessThanU(v *Value) bool {
5741 v_0 := v.Args[0]
5742
5743
5744 for {
5745 if v_0.Op != OpARM64FlagConstant {
5746 break
5747 }
5748 fc := auxIntToFlagConstant(v_0.AuxInt)
5749 v.reset(OpARM64MOVDconst)
5750 v.AuxInt = int64ToAuxInt(b2i(fc.ult()))
5751 return true
5752 }
5753
5754
5755 for {
5756 if v_0.Op != OpARM64InvertFlags {
5757 break
5758 }
5759 x := v_0.Args[0]
5760 v.reset(OpARM64GreaterThanU)
5761 v.AddArg(x)
5762 return true
5763 }
5764 return false
5765 }
5766 func rewriteValueARM64_OpARM64MADD(v *Value) bool {
5767 v_2 := v.Args[2]
5768 v_1 := v.Args[1]
5769 v_0 := v.Args[0]
5770 b := v.Block
5771
5772
5773 for {
5774 a := v_0
5775 x := v_1
5776 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 {
5777 break
5778 }
5779 v.reset(OpARM64SUB)
5780 v.AddArg2(a, x)
5781 return true
5782 }
5783
5784
5785 for {
5786 a := v_0
5787 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
5788 break
5789 }
5790 v.copyOf(a)
5791 return true
5792 }
5793
5794
5795 for {
5796 a := v_0
5797 x := v_1
5798 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 {
5799 break
5800 }
5801 v.reset(OpARM64ADD)
5802 v.AddArg2(a, x)
5803 return true
5804 }
5805
5806
5807
5808 for {
5809 a := v_0
5810 x := v_1
5811 if v_2.Op != OpARM64MOVDconst {
5812 break
5813 }
5814 c := auxIntToInt64(v_2.AuxInt)
5815 if !(isPowerOfTwo64(c)) {
5816 break
5817 }
5818 v.reset(OpARM64ADDshiftLL)
5819 v.AuxInt = int64ToAuxInt(log64(c))
5820 v.AddArg2(a, x)
5821 return true
5822 }
5823
5824
5825
5826 for {
5827 a := v_0
5828 x := v_1
5829 if v_2.Op != OpARM64MOVDconst {
5830 break
5831 }
5832 c := auxIntToInt64(v_2.AuxInt)
5833 if !(isPowerOfTwo64(c-1) && c >= 3) {
5834 break
5835 }
5836 v.reset(OpARM64ADD)
5837 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
5838 v0.AuxInt = int64ToAuxInt(log64(c - 1))
5839 v0.AddArg2(x, x)
5840 v.AddArg2(a, v0)
5841 return true
5842 }
5843
5844
5845
5846 for {
5847 a := v_0
5848 x := v_1
5849 if v_2.Op != OpARM64MOVDconst {
5850 break
5851 }
5852 c := auxIntToInt64(v_2.AuxInt)
5853 if !(isPowerOfTwo64(c+1) && c >= 7) {
5854 break
5855 }
5856 v.reset(OpARM64SUB)
5857 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
5858 v0.AuxInt = int64ToAuxInt(log64(c + 1))
5859 v0.AddArg2(x, x)
5860 v.AddArg2(a, v0)
5861 return true
5862 }
5863
5864
5865
5866 for {
5867 a := v_0
5868 x := v_1
5869 if v_2.Op != OpARM64MOVDconst {
5870 break
5871 }
5872 c := auxIntToInt64(v_2.AuxInt)
5873 if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
5874 break
5875 }
5876 v.reset(OpARM64SUBshiftLL)
5877 v.AuxInt = int64ToAuxInt(log64(c / 3))
5878 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
5879 v0.AuxInt = int64ToAuxInt(2)
5880 v0.AddArg2(x, x)
5881 v.AddArg2(a, v0)
5882 return true
5883 }
5884
5885
5886
5887 for {
5888 a := v_0
5889 x := v_1
5890 if v_2.Op != OpARM64MOVDconst {
5891 break
5892 }
5893 c := auxIntToInt64(v_2.AuxInt)
5894 if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
5895 break
5896 }
5897 v.reset(OpARM64ADDshiftLL)
5898 v.AuxInt = int64ToAuxInt(log64(c / 5))
5899 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
5900 v0.AuxInt = int64ToAuxInt(2)
5901 v0.AddArg2(x, x)
5902 v.AddArg2(a, v0)
5903 return true
5904 }
5905
5906
5907
5908 for {
5909 a := v_0
5910 x := v_1
5911 if v_2.Op != OpARM64MOVDconst {
5912 break
5913 }
5914 c := auxIntToInt64(v_2.AuxInt)
5915 if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
5916 break
5917 }
5918 v.reset(OpARM64SUBshiftLL)
5919 v.AuxInt = int64ToAuxInt(log64(c / 7))
5920 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
5921 v0.AuxInt = int64ToAuxInt(3)
5922 v0.AddArg2(x, x)
5923 v.AddArg2(a, v0)
5924 return true
5925 }
5926
5927
5928
5929 for {
5930 a := v_0
5931 x := v_1
5932 if v_2.Op != OpARM64MOVDconst {
5933 break
5934 }
5935 c := auxIntToInt64(v_2.AuxInt)
5936 if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
5937 break
5938 }
5939 v.reset(OpARM64ADDshiftLL)
5940 v.AuxInt = int64ToAuxInt(log64(c / 9))
5941 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
5942 v0.AuxInt = int64ToAuxInt(3)
5943 v0.AddArg2(x, x)
5944 v.AddArg2(a, v0)
5945 return true
5946 }
5947
5948
5949 for {
5950 a := v_0
5951 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
5952 break
5953 }
5954 x := v_2
5955 v.reset(OpARM64SUB)
5956 v.AddArg2(a, x)
5957 return true
5958 }
5959
5960
5961 for {
5962 a := v_0
5963 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
5964 break
5965 }
5966 v.copyOf(a)
5967 return true
5968 }
5969
5970
5971 for {
5972 a := v_0
5973 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
5974 break
5975 }
5976 x := v_2
5977 v.reset(OpARM64ADD)
5978 v.AddArg2(a, x)
5979 return true
5980 }
5981
5982
5983
5984 for {
5985 a := v_0
5986 if v_1.Op != OpARM64MOVDconst {
5987 break
5988 }
5989 c := auxIntToInt64(v_1.AuxInt)
5990 x := v_2
5991 if !(isPowerOfTwo64(c)) {
5992 break
5993 }
5994 v.reset(OpARM64ADDshiftLL)
5995 v.AuxInt = int64ToAuxInt(log64(c))
5996 v.AddArg2(a, x)
5997 return true
5998 }
5999
6000
6001
6002 for {
6003 a := v_0
6004 if v_1.Op != OpARM64MOVDconst {
6005 break
6006 }
6007 c := auxIntToInt64(v_1.AuxInt)
6008 x := v_2
6009 if !(isPowerOfTwo64(c-1) && c >= 3) {
6010 break
6011 }
6012 v.reset(OpARM64ADD)
6013 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6014 v0.AuxInt = int64ToAuxInt(log64(c - 1))
6015 v0.AddArg2(x, x)
6016 v.AddArg2(a, v0)
6017 return true
6018 }
6019
6020
6021
6022 for {
6023 a := v_0
6024 if v_1.Op != OpARM64MOVDconst {
6025 break
6026 }
6027 c := auxIntToInt64(v_1.AuxInt)
6028 x := v_2
6029 if !(isPowerOfTwo64(c+1) && c >= 7) {
6030 break
6031 }
6032 v.reset(OpARM64SUB)
6033 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6034 v0.AuxInt = int64ToAuxInt(log64(c + 1))
6035 v0.AddArg2(x, x)
6036 v.AddArg2(a, v0)
6037 return true
6038 }
6039
6040
6041
6042 for {
6043 a := v_0
6044 if v_1.Op != OpARM64MOVDconst {
6045 break
6046 }
6047 c := auxIntToInt64(v_1.AuxInt)
6048 x := v_2
6049 if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
6050 break
6051 }
6052 v.reset(OpARM64SUBshiftLL)
6053 v.AuxInt = int64ToAuxInt(log64(c / 3))
6054 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6055 v0.AuxInt = int64ToAuxInt(2)
6056 v0.AddArg2(x, x)
6057 v.AddArg2(a, v0)
6058 return true
6059 }
6060
6061
6062
6063 for {
6064 a := v_0
6065 if v_1.Op != OpARM64MOVDconst {
6066 break
6067 }
6068 c := auxIntToInt64(v_1.AuxInt)
6069 x := v_2
6070 if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
6071 break
6072 }
6073 v.reset(OpARM64ADDshiftLL)
6074 v.AuxInt = int64ToAuxInt(log64(c / 5))
6075 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6076 v0.AuxInt = int64ToAuxInt(2)
6077 v0.AddArg2(x, x)
6078 v.AddArg2(a, v0)
6079 return true
6080 }
6081
6082
6083
6084 for {
6085 a := v_0
6086 if v_1.Op != OpARM64MOVDconst {
6087 break
6088 }
6089 c := auxIntToInt64(v_1.AuxInt)
6090 x := v_2
6091 if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
6092 break
6093 }
6094 v.reset(OpARM64SUBshiftLL)
6095 v.AuxInt = int64ToAuxInt(log64(c / 7))
6096 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6097 v0.AuxInt = int64ToAuxInt(3)
6098 v0.AddArg2(x, x)
6099 v.AddArg2(a, v0)
6100 return true
6101 }
6102
6103
6104
6105 for {
6106 a := v_0
6107 if v_1.Op != OpARM64MOVDconst {
6108 break
6109 }
6110 c := auxIntToInt64(v_1.AuxInt)
6111 x := v_2
6112 if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
6113 break
6114 }
6115 v.reset(OpARM64ADDshiftLL)
6116 v.AuxInt = int64ToAuxInt(log64(c / 9))
6117 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6118 v0.AuxInt = int64ToAuxInt(3)
6119 v0.AddArg2(x, x)
6120 v.AddArg2(a, v0)
6121 return true
6122 }
6123
6124
6125 for {
6126 if v_0.Op != OpARM64MOVDconst {
6127 break
6128 }
6129 c := auxIntToInt64(v_0.AuxInt)
6130 x := v_1
6131 y := v_2
6132 v.reset(OpARM64ADDconst)
6133 v.AuxInt = int64ToAuxInt(c)
6134 v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
6135 v0.AddArg2(x, y)
6136 v.AddArg(v0)
6137 return true
6138 }
6139
6140
6141 for {
6142 a := v_0
6143 if v_1.Op != OpARM64MOVDconst {
6144 break
6145 }
6146 c := auxIntToInt64(v_1.AuxInt)
6147 if v_2.Op != OpARM64MOVDconst {
6148 break
6149 }
6150 d := auxIntToInt64(v_2.AuxInt)
6151 v.reset(OpARM64ADDconst)
6152 v.AuxInt = int64ToAuxInt(c * d)
6153 v.AddArg(a)
6154 return true
6155 }
6156 return false
6157 }
6158 func rewriteValueARM64_OpARM64MADDW(v *Value) bool {
6159 v_2 := v.Args[2]
6160 v_1 := v.Args[1]
6161 v_0 := v.Args[0]
6162 b := v.Block
6163
6164
6165
6166 for {
6167 a := v_0
6168 x := v_1
6169 if v_2.Op != OpARM64MOVDconst {
6170 break
6171 }
6172 c := auxIntToInt64(v_2.AuxInt)
6173 if !(int32(c) == -1) {
6174 break
6175 }
6176 v.reset(OpARM64SUB)
6177 v.AddArg2(a, x)
6178 return true
6179 }
6180
6181
6182
6183 for {
6184 a := v_0
6185 if v_2.Op != OpARM64MOVDconst {
6186 break
6187 }
6188 c := auxIntToInt64(v_2.AuxInt)
6189 if !(int32(c) == 0) {
6190 break
6191 }
6192 v.copyOf(a)
6193 return true
6194 }
6195
6196
6197
6198 for {
6199 a := v_0
6200 x := v_1
6201 if v_2.Op != OpARM64MOVDconst {
6202 break
6203 }
6204 c := auxIntToInt64(v_2.AuxInt)
6205 if !(int32(c) == 1) {
6206 break
6207 }
6208 v.reset(OpARM64ADD)
6209 v.AddArg2(a, x)
6210 return true
6211 }
6212
6213
6214
6215 for {
6216 a := v_0
6217 x := v_1
6218 if v_2.Op != OpARM64MOVDconst {
6219 break
6220 }
6221 c := auxIntToInt64(v_2.AuxInt)
6222 if !(isPowerOfTwo64(c)) {
6223 break
6224 }
6225 v.reset(OpARM64ADDshiftLL)
6226 v.AuxInt = int64ToAuxInt(log64(c))
6227 v.AddArg2(a, x)
6228 return true
6229 }
6230
6231
6232
6233 for {
6234 a := v_0
6235 x := v_1
6236 if v_2.Op != OpARM64MOVDconst {
6237 break
6238 }
6239 c := auxIntToInt64(v_2.AuxInt)
6240 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
6241 break
6242 }
6243 v.reset(OpARM64ADD)
6244 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6245 v0.AuxInt = int64ToAuxInt(log64(c - 1))
6246 v0.AddArg2(x, x)
6247 v.AddArg2(a, v0)
6248 return true
6249 }
6250
6251
6252
6253 for {
6254 a := v_0
6255 x := v_1
6256 if v_2.Op != OpARM64MOVDconst {
6257 break
6258 }
6259 c := auxIntToInt64(v_2.AuxInt)
6260 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
6261 break
6262 }
6263 v.reset(OpARM64SUB)
6264 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6265 v0.AuxInt = int64ToAuxInt(log64(c + 1))
6266 v0.AddArg2(x, x)
6267 v.AddArg2(a, v0)
6268 return true
6269 }
6270
6271
6272
6273 for {
6274 a := v_0
6275 x := v_1
6276 if v_2.Op != OpARM64MOVDconst {
6277 break
6278 }
6279 c := auxIntToInt64(v_2.AuxInt)
6280 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
6281 break
6282 }
6283 v.reset(OpARM64SUBshiftLL)
6284 v.AuxInt = int64ToAuxInt(log64(c / 3))
6285 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6286 v0.AuxInt = int64ToAuxInt(2)
6287 v0.AddArg2(x, x)
6288 v.AddArg2(a, v0)
6289 return true
6290 }
6291
6292
6293
6294 for {
6295 a := v_0
6296 x := v_1
6297 if v_2.Op != OpARM64MOVDconst {
6298 break
6299 }
6300 c := auxIntToInt64(v_2.AuxInt)
6301 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
6302 break
6303 }
6304 v.reset(OpARM64ADDshiftLL)
6305 v.AuxInt = int64ToAuxInt(log64(c / 5))
6306 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6307 v0.AuxInt = int64ToAuxInt(2)
6308 v0.AddArg2(x, x)
6309 v.AddArg2(a, v0)
6310 return true
6311 }
6312
6313
6314
6315 for {
6316 a := v_0
6317 x := v_1
6318 if v_2.Op != OpARM64MOVDconst {
6319 break
6320 }
6321 c := auxIntToInt64(v_2.AuxInt)
6322 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
6323 break
6324 }
6325 v.reset(OpARM64SUBshiftLL)
6326 v.AuxInt = int64ToAuxInt(log64(c / 7))
6327 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6328 v0.AuxInt = int64ToAuxInt(3)
6329 v0.AddArg2(x, x)
6330 v.AddArg2(a, v0)
6331 return true
6332 }
6333
6334
6335
6336 for {
6337 a := v_0
6338 x := v_1
6339 if v_2.Op != OpARM64MOVDconst {
6340 break
6341 }
6342 c := auxIntToInt64(v_2.AuxInt)
6343 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
6344 break
6345 }
6346 v.reset(OpARM64ADDshiftLL)
6347 v.AuxInt = int64ToAuxInt(log64(c / 9))
6348 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6349 v0.AuxInt = int64ToAuxInt(3)
6350 v0.AddArg2(x, x)
6351 v.AddArg2(a, v0)
6352 return true
6353 }
6354
6355
6356
6357 for {
6358 a := v_0
6359 if v_1.Op != OpARM64MOVDconst {
6360 break
6361 }
6362 c := auxIntToInt64(v_1.AuxInt)
6363 x := v_2
6364 if !(int32(c) == -1) {
6365 break
6366 }
6367 v.reset(OpARM64SUB)
6368 v.AddArg2(a, x)
6369 return true
6370 }
6371
6372
6373
6374 for {
6375 a := v_0
6376 if v_1.Op != OpARM64MOVDconst {
6377 break
6378 }
6379 c := auxIntToInt64(v_1.AuxInt)
6380 if !(int32(c) == 0) {
6381 break
6382 }
6383 v.copyOf(a)
6384 return true
6385 }
6386
6387
6388
6389 for {
6390 a := v_0
6391 if v_1.Op != OpARM64MOVDconst {
6392 break
6393 }
6394 c := auxIntToInt64(v_1.AuxInt)
6395 x := v_2
6396 if !(int32(c) == 1) {
6397 break
6398 }
6399 v.reset(OpARM64ADD)
6400 v.AddArg2(a, x)
6401 return true
6402 }
6403
6404
6405
6406 for {
6407 a := v_0
6408 if v_1.Op != OpARM64MOVDconst {
6409 break
6410 }
6411 c := auxIntToInt64(v_1.AuxInt)
6412 x := v_2
6413 if !(isPowerOfTwo64(c)) {
6414 break
6415 }
6416 v.reset(OpARM64ADDshiftLL)
6417 v.AuxInt = int64ToAuxInt(log64(c))
6418 v.AddArg2(a, x)
6419 return true
6420 }
6421
6422
6423
6424 for {
6425 a := v_0
6426 if v_1.Op != OpARM64MOVDconst {
6427 break
6428 }
6429 c := auxIntToInt64(v_1.AuxInt)
6430 x := v_2
6431 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
6432 break
6433 }
6434 v.reset(OpARM64ADD)
6435 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6436 v0.AuxInt = int64ToAuxInt(log64(c - 1))
6437 v0.AddArg2(x, x)
6438 v.AddArg2(a, v0)
6439 return true
6440 }
6441
6442
6443
6444 for {
6445 a := v_0
6446 if v_1.Op != OpARM64MOVDconst {
6447 break
6448 }
6449 c := auxIntToInt64(v_1.AuxInt)
6450 x := v_2
6451 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
6452 break
6453 }
6454 v.reset(OpARM64SUB)
6455 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6456 v0.AuxInt = int64ToAuxInt(log64(c + 1))
6457 v0.AddArg2(x, x)
6458 v.AddArg2(a, v0)
6459 return true
6460 }
6461
6462
6463
6464 for {
6465 a := v_0
6466 if v_1.Op != OpARM64MOVDconst {
6467 break
6468 }
6469 c := auxIntToInt64(v_1.AuxInt)
6470 x := v_2
6471 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
6472 break
6473 }
6474 v.reset(OpARM64SUBshiftLL)
6475 v.AuxInt = int64ToAuxInt(log64(c / 3))
6476 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6477 v0.AuxInt = int64ToAuxInt(2)
6478 v0.AddArg2(x, x)
6479 v.AddArg2(a, v0)
6480 return true
6481 }
6482
6483
6484
6485 for {
6486 a := v_0
6487 if v_1.Op != OpARM64MOVDconst {
6488 break
6489 }
6490 c := auxIntToInt64(v_1.AuxInt)
6491 x := v_2
6492 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
6493 break
6494 }
6495 v.reset(OpARM64ADDshiftLL)
6496 v.AuxInt = int64ToAuxInt(log64(c / 5))
6497 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6498 v0.AuxInt = int64ToAuxInt(2)
6499 v0.AddArg2(x, x)
6500 v.AddArg2(a, v0)
6501 return true
6502 }
6503
6504
6505
6506 for {
6507 a := v_0
6508 if v_1.Op != OpARM64MOVDconst {
6509 break
6510 }
6511 c := auxIntToInt64(v_1.AuxInt)
6512 x := v_2
6513 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
6514 break
6515 }
6516 v.reset(OpARM64SUBshiftLL)
6517 v.AuxInt = int64ToAuxInt(log64(c / 7))
6518 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6519 v0.AuxInt = int64ToAuxInt(3)
6520 v0.AddArg2(x, x)
6521 v.AddArg2(a, v0)
6522 return true
6523 }
6524
6525
6526
6527 for {
6528 a := v_0
6529 if v_1.Op != OpARM64MOVDconst {
6530 break
6531 }
6532 c := auxIntToInt64(v_1.AuxInt)
6533 x := v_2
6534 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
6535 break
6536 }
6537 v.reset(OpARM64ADDshiftLL)
6538 v.AuxInt = int64ToAuxInt(log64(c / 9))
6539 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6540 v0.AuxInt = int64ToAuxInt(3)
6541 v0.AddArg2(x, x)
6542 v.AddArg2(a, v0)
6543 return true
6544 }
6545
6546
6547 for {
6548 if v_0.Op != OpARM64MOVDconst {
6549 break
6550 }
6551 c := auxIntToInt64(v_0.AuxInt)
6552 x := v_1
6553 y := v_2
6554 v.reset(OpARM64ADDconst)
6555 v.AuxInt = int64ToAuxInt(c)
6556 v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
6557 v0.AddArg2(x, y)
6558 v.AddArg(v0)
6559 return true
6560 }
6561
6562
6563 for {
6564 a := v_0
6565 if v_1.Op != OpARM64MOVDconst {
6566 break
6567 }
6568 c := auxIntToInt64(v_1.AuxInt)
6569 if v_2.Op != OpARM64MOVDconst {
6570 break
6571 }
6572 d := auxIntToInt64(v_2.AuxInt)
6573 v.reset(OpARM64ADDconst)
6574 v.AuxInt = int64ToAuxInt(int64(int32(c) * int32(d)))
6575 v.AddArg(a)
6576 return true
6577 }
6578 return false
6579 }
6580 func rewriteValueARM64_OpARM64MNEG(v *Value) bool {
6581 v_1 := v.Args[1]
6582 v_0 := v.Args[0]
6583 b := v.Block
6584
6585
6586 for {
6587 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6588 x := v_0
6589 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
6590 continue
6591 }
6592 v.copyOf(x)
6593 return true
6594 }
6595 break
6596 }
6597
6598
6599 for {
6600 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6601 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6602 continue
6603 }
6604 v.reset(OpARM64MOVDconst)
6605 v.AuxInt = int64ToAuxInt(0)
6606 return true
6607 }
6608 break
6609 }
6610
6611
6612 for {
6613 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6614 x := v_0
6615 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
6616 continue
6617 }
6618 v.reset(OpARM64NEG)
6619 v.AddArg(x)
6620 return true
6621 }
6622 break
6623 }
6624
6625
6626
6627 for {
6628 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6629 x := v_0
6630 if v_1.Op != OpARM64MOVDconst {
6631 continue
6632 }
6633 c := auxIntToInt64(v_1.AuxInt)
6634 if !(isPowerOfTwo64(c)) {
6635 continue
6636 }
6637 v.reset(OpARM64NEG)
6638 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
6639 v0.AuxInt = int64ToAuxInt(log64(c))
6640 v0.AddArg(x)
6641 v.AddArg(v0)
6642 return true
6643 }
6644 break
6645 }
6646
6647
6648
6649 for {
6650 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6651 x := v_0
6652 if v_1.Op != OpARM64MOVDconst {
6653 continue
6654 }
6655 c := auxIntToInt64(v_1.AuxInt)
6656 if !(isPowerOfTwo64(c-1) && c >= 3) {
6657 continue
6658 }
6659 v.reset(OpARM64NEG)
6660 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6661 v0.AuxInt = int64ToAuxInt(log64(c - 1))
6662 v0.AddArg2(x, x)
6663 v.AddArg(v0)
6664 return true
6665 }
6666 break
6667 }
6668
6669
6670
6671 for {
6672 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6673 x := v_0
6674 if v_1.Op != OpARM64MOVDconst {
6675 continue
6676 }
6677 c := auxIntToInt64(v_1.AuxInt)
6678 if !(isPowerOfTwo64(c+1) && c >= 7) {
6679 continue
6680 }
6681 v.reset(OpARM64NEG)
6682 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6683 v0.AuxInt = int64ToAuxInt(log64(c + 1))
6684 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
6685 v1.AddArg(x)
6686 v0.AddArg2(v1, x)
6687 v.AddArg(v0)
6688 return true
6689 }
6690 break
6691 }
6692
6693
6694
6695 for {
6696 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6697 x := v_0
6698 if v_1.Op != OpARM64MOVDconst {
6699 continue
6700 }
6701 c := auxIntToInt64(v_1.AuxInt)
6702 if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
6703 continue
6704 }
6705 v.reset(OpARM64SLLconst)
6706 v.Type = x.Type
6707 v.AuxInt = int64ToAuxInt(log64(c / 3))
6708 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6709 v0.AuxInt = int64ToAuxInt(2)
6710 v0.AddArg2(x, x)
6711 v.AddArg(v0)
6712 return true
6713 }
6714 break
6715 }
6716
6717
6718
6719 for {
6720 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6721 x := v_0
6722 if v_1.Op != OpARM64MOVDconst {
6723 continue
6724 }
6725 c := auxIntToInt64(v_1.AuxInt)
6726 if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
6727 continue
6728 }
6729 v.reset(OpARM64NEG)
6730 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
6731 v0.AuxInt = int64ToAuxInt(log64(c / 5))
6732 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6733 v1.AuxInt = int64ToAuxInt(2)
6734 v1.AddArg2(x, x)
6735 v0.AddArg(v1)
6736 v.AddArg(v0)
6737 return true
6738 }
6739 break
6740 }
6741
6742
6743
6744 for {
6745 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6746 x := v_0
6747 if v_1.Op != OpARM64MOVDconst {
6748 continue
6749 }
6750 c := auxIntToInt64(v_1.AuxInt)
6751 if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
6752 continue
6753 }
6754 v.reset(OpARM64SLLconst)
6755 v.Type = x.Type
6756 v.AuxInt = int64ToAuxInt(log64(c / 7))
6757 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6758 v0.AuxInt = int64ToAuxInt(3)
6759 v0.AddArg2(x, x)
6760 v.AddArg(v0)
6761 return true
6762 }
6763 break
6764 }
6765
6766
6767
6768 for {
6769 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6770 x := v_0
6771 if v_1.Op != OpARM64MOVDconst {
6772 continue
6773 }
6774 c := auxIntToInt64(v_1.AuxInt)
6775 if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
6776 continue
6777 }
6778 v.reset(OpARM64NEG)
6779 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
6780 v0.AuxInt = int64ToAuxInt(log64(c / 9))
6781 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6782 v1.AuxInt = int64ToAuxInt(3)
6783 v1.AddArg2(x, x)
6784 v0.AddArg(v1)
6785 v.AddArg(v0)
6786 return true
6787 }
6788 break
6789 }
6790
6791
6792 for {
6793 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6794 if v_0.Op != OpARM64MOVDconst {
6795 continue
6796 }
6797 c := auxIntToInt64(v_0.AuxInt)
6798 if v_1.Op != OpARM64MOVDconst {
6799 continue
6800 }
6801 d := auxIntToInt64(v_1.AuxInt)
6802 v.reset(OpARM64MOVDconst)
6803 v.AuxInt = int64ToAuxInt(-c * d)
6804 return true
6805 }
6806 break
6807 }
6808 return false
6809 }
6810 func rewriteValueARM64_OpARM64MNEGW(v *Value) bool {
6811 v_1 := v.Args[1]
6812 v_0 := v.Args[0]
6813 b := v.Block
6814
6815
6816
6817 for {
6818 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6819 x := v_0
6820 if v_1.Op != OpARM64MOVDconst {
6821 continue
6822 }
6823 c := auxIntToInt64(v_1.AuxInt)
6824 if !(int32(c) == -1) {
6825 continue
6826 }
6827 v.copyOf(x)
6828 return true
6829 }
6830 break
6831 }
6832
6833
6834
6835 for {
6836 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6837 if v_1.Op != OpARM64MOVDconst {
6838 continue
6839 }
6840 c := auxIntToInt64(v_1.AuxInt)
6841 if !(int32(c) == 0) {
6842 continue
6843 }
6844 v.reset(OpARM64MOVDconst)
6845 v.AuxInt = int64ToAuxInt(0)
6846 return true
6847 }
6848 break
6849 }
6850
6851
6852
6853 for {
6854 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6855 x := v_0
6856 if v_1.Op != OpARM64MOVDconst {
6857 continue
6858 }
6859 c := auxIntToInt64(v_1.AuxInt)
6860 if !(int32(c) == 1) {
6861 continue
6862 }
6863 v.reset(OpARM64NEG)
6864 v.AddArg(x)
6865 return true
6866 }
6867 break
6868 }
6869
6870
6871
6872 for {
6873 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6874 x := v_0
6875 if v_1.Op != OpARM64MOVDconst {
6876 continue
6877 }
6878 c := auxIntToInt64(v_1.AuxInt)
6879 if !(isPowerOfTwo64(c)) {
6880 continue
6881 }
6882 v.reset(OpARM64NEG)
6883 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
6884 v0.AuxInt = int64ToAuxInt(log64(c))
6885 v0.AddArg(x)
6886 v.AddArg(v0)
6887 return true
6888 }
6889 break
6890 }
6891
6892
6893
6894 for {
6895 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6896 x := v_0
6897 if v_1.Op != OpARM64MOVDconst {
6898 continue
6899 }
6900 c := auxIntToInt64(v_1.AuxInt)
6901 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
6902 continue
6903 }
6904 v.reset(OpARM64NEG)
6905 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6906 v0.AuxInt = int64ToAuxInt(log64(c - 1))
6907 v0.AddArg2(x, x)
6908 v.AddArg(v0)
6909 return true
6910 }
6911 break
6912 }
6913
6914
6915
6916 for {
6917 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6918 x := v_0
6919 if v_1.Op != OpARM64MOVDconst {
6920 continue
6921 }
6922 c := auxIntToInt64(v_1.AuxInt)
6923 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
6924 continue
6925 }
6926 v.reset(OpARM64NEG)
6927 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6928 v0.AuxInt = int64ToAuxInt(log64(c + 1))
6929 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
6930 v1.AddArg(x)
6931 v0.AddArg2(v1, x)
6932 v.AddArg(v0)
6933 return true
6934 }
6935 break
6936 }
6937
6938
6939
6940 for {
6941 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6942 x := v_0
6943 if v_1.Op != OpARM64MOVDconst {
6944 continue
6945 }
6946 c := auxIntToInt64(v_1.AuxInt)
6947 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
6948 continue
6949 }
6950 v.reset(OpARM64SLLconst)
6951 v.Type = x.Type
6952 v.AuxInt = int64ToAuxInt(log64(c / 3))
6953 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
6954 v0.AuxInt = int64ToAuxInt(2)
6955 v0.AddArg2(x, x)
6956 v.AddArg(v0)
6957 return true
6958 }
6959 break
6960 }
6961
6962
6963
6964 for {
6965 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6966 x := v_0
6967 if v_1.Op != OpARM64MOVDconst {
6968 continue
6969 }
6970 c := auxIntToInt64(v_1.AuxInt)
6971 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
6972 continue
6973 }
6974 v.reset(OpARM64NEG)
6975 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
6976 v0.AuxInt = int64ToAuxInt(log64(c / 5))
6977 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
6978 v1.AuxInt = int64ToAuxInt(2)
6979 v1.AddArg2(x, x)
6980 v0.AddArg(v1)
6981 v.AddArg(v0)
6982 return true
6983 }
6984 break
6985 }
6986
6987
6988
6989 for {
6990 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6991 x := v_0
6992 if v_1.Op != OpARM64MOVDconst {
6993 continue
6994 }
6995 c := auxIntToInt64(v_1.AuxInt)
6996 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
6997 continue
6998 }
6999 v.reset(OpARM64SLLconst)
7000 v.Type = x.Type
7001 v.AuxInt = int64ToAuxInt(log64(c / 7))
7002 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7003 v0.AuxInt = int64ToAuxInt(3)
7004 v0.AddArg2(x, x)
7005 v.AddArg(v0)
7006 return true
7007 }
7008 break
7009 }
7010
7011
7012
7013 for {
7014 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7015 x := v_0
7016 if v_1.Op != OpARM64MOVDconst {
7017 continue
7018 }
7019 c := auxIntToInt64(v_1.AuxInt)
7020 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
7021 continue
7022 }
7023 v.reset(OpARM64NEG)
7024 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
7025 v0.AuxInt = int64ToAuxInt(log64(c / 9))
7026 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7027 v1.AuxInt = int64ToAuxInt(3)
7028 v1.AddArg2(x, x)
7029 v0.AddArg(v1)
7030 v.AddArg(v0)
7031 return true
7032 }
7033 break
7034 }
7035
7036
7037 for {
7038 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7039 if v_0.Op != OpARM64MOVDconst {
7040 continue
7041 }
7042 c := auxIntToInt64(v_0.AuxInt)
7043 if v_1.Op != OpARM64MOVDconst {
7044 continue
7045 }
7046 d := auxIntToInt64(v_1.AuxInt)
7047 v.reset(OpARM64MOVDconst)
7048 v.AuxInt = int64ToAuxInt(-int64(int32(c) * int32(d)))
7049 return true
7050 }
7051 break
7052 }
7053 return false
7054 }
7055 func rewriteValueARM64_OpARM64MOD(v *Value) bool {
7056 v_1 := v.Args[1]
7057 v_0 := v.Args[0]
7058
7059
7060
7061 for {
7062 if v_0.Op != OpARM64MOVDconst {
7063 break
7064 }
7065 c := auxIntToInt64(v_0.AuxInt)
7066 if v_1.Op != OpARM64MOVDconst {
7067 break
7068 }
7069 d := auxIntToInt64(v_1.AuxInt)
7070 if !(d != 0) {
7071 break
7072 }
7073 v.reset(OpARM64MOVDconst)
7074 v.AuxInt = int64ToAuxInt(c % d)
7075 return true
7076 }
7077 return false
7078 }
7079 func rewriteValueARM64_OpARM64MODW(v *Value) bool {
7080 v_1 := v.Args[1]
7081 v_0 := v.Args[0]
7082
7083
7084
7085 for {
7086 if v_0.Op != OpARM64MOVDconst {
7087 break
7088 }
7089 c := auxIntToInt64(v_0.AuxInt)
7090 if v_1.Op != OpARM64MOVDconst {
7091 break
7092 }
7093 d := auxIntToInt64(v_1.AuxInt)
7094 if !(d != 0) {
7095 break
7096 }
7097 v.reset(OpARM64MOVDconst)
7098 v.AuxInt = int64ToAuxInt(int64(int32(c) % int32(d)))
7099 return true
7100 }
7101 return false
7102 }
7103 func rewriteValueARM64_OpARM64MOVBUload(v *Value) bool {
7104 v_1 := v.Args[1]
7105 v_0 := v.Args[0]
7106 b := v.Block
7107 config := b.Func.Config
7108
7109
7110
7111 for {
7112 off1 := auxIntToInt32(v.AuxInt)
7113 sym := auxToSym(v.Aux)
7114 if v_0.Op != OpARM64ADDconst {
7115 break
7116 }
7117 off2 := auxIntToInt64(v_0.AuxInt)
7118 ptr := v_0.Args[0]
7119 mem := v_1
7120 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
7121 break
7122 }
7123 v.reset(OpARM64MOVBUload)
7124 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
7125 v.Aux = symToAux(sym)
7126 v.AddArg2(ptr, mem)
7127 return true
7128 }
7129
7130
7131
7132 for {
7133 off := auxIntToInt32(v.AuxInt)
7134 sym := auxToSym(v.Aux)
7135 if v_0.Op != OpARM64ADD {
7136 break
7137 }
7138 idx := v_0.Args[1]
7139 ptr := v_0.Args[0]
7140 mem := v_1
7141 if !(off == 0 && sym == nil) {
7142 break
7143 }
7144 v.reset(OpARM64MOVBUloadidx)
7145 v.AddArg3(ptr, idx, mem)
7146 return true
7147 }
7148
7149
7150
7151 for {
7152 off1 := auxIntToInt32(v.AuxInt)
7153 sym1 := auxToSym(v.Aux)
7154 if v_0.Op != OpARM64MOVDaddr {
7155 break
7156 }
7157 off2 := auxIntToInt32(v_0.AuxInt)
7158 sym2 := auxToSym(v_0.Aux)
7159 ptr := v_0.Args[0]
7160 mem := v_1
7161 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
7162 break
7163 }
7164 v.reset(OpARM64MOVBUload)
7165 v.AuxInt = int32ToAuxInt(off1 + off2)
7166 v.Aux = symToAux(mergeSym(sym1, sym2))
7167 v.AddArg2(ptr, mem)
7168 return true
7169 }
7170
7171
7172
7173 for {
7174 off := auxIntToInt32(v.AuxInt)
7175 sym := auxToSym(v.Aux)
7176 ptr := v_0
7177 if v_1.Op != OpARM64MOVBstorezero {
7178 break
7179 }
7180 off2 := auxIntToInt32(v_1.AuxInt)
7181 sym2 := auxToSym(v_1.Aux)
7182 ptr2 := v_1.Args[0]
7183 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
7184 break
7185 }
7186 v.reset(OpARM64MOVDconst)
7187 v.AuxInt = int64ToAuxInt(0)
7188 return true
7189 }
7190
7191
7192
7193 for {
7194 off := auxIntToInt32(v.AuxInt)
7195 sym := auxToSym(v.Aux)
7196 if v_0.Op != OpSB || !(symIsRO(sym)) {
7197 break
7198 }
7199 v.reset(OpARM64MOVDconst)
7200 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
7201 return true
7202 }
7203 return false
7204 }
7205 func rewriteValueARM64_OpARM64MOVBUloadidx(v *Value) bool {
7206 v_2 := v.Args[2]
7207 v_1 := v.Args[1]
7208 v_0 := v.Args[0]
7209
7210
7211
7212 for {
7213 ptr := v_0
7214 if v_1.Op != OpARM64MOVDconst {
7215 break
7216 }
7217 c := auxIntToInt64(v_1.AuxInt)
7218 mem := v_2
7219 if !(is32Bit(c)) {
7220 break
7221 }
7222 v.reset(OpARM64MOVBUload)
7223 v.AuxInt = int32ToAuxInt(int32(c))
7224 v.AddArg2(ptr, mem)
7225 return true
7226 }
7227
7228
7229
7230 for {
7231 if v_0.Op != OpARM64MOVDconst {
7232 break
7233 }
7234 c := auxIntToInt64(v_0.AuxInt)
7235 ptr := v_1
7236 mem := v_2
7237 if !(is32Bit(c)) {
7238 break
7239 }
7240 v.reset(OpARM64MOVBUload)
7241 v.AuxInt = int32ToAuxInt(int32(c))
7242 v.AddArg2(ptr, mem)
7243 return true
7244 }
7245
7246
7247
7248 for {
7249 ptr := v_0
7250 idx := v_1
7251 if v_2.Op != OpARM64MOVBstorezeroidx {
7252 break
7253 }
7254 idx2 := v_2.Args[1]
7255 ptr2 := v_2.Args[0]
7256 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
7257 break
7258 }
7259 v.reset(OpARM64MOVDconst)
7260 v.AuxInt = int64ToAuxInt(0)
7261 return true
7262 }
7263 return false
7264 }
7265 func rewriteValueARM64_OpARM64MOVBUreg(v *Value) bool {
7266 v_0 := v.Args[0]
7267
7268
7269 for {
7270 x := v_0
7271 if x.Op != OpARM64MOVBUload {
7272 break
7273 }
7274 v.reset(OpARM64MOVDreg)
7275 v.AddArg(x)
7276 return true
7277 }
7278
7279
7280 for {
7281 x := v_0
7282 if x.Op != OpARM64MOVBUloadidx {
7283 break
7284 }
7285 v.reset(OpARM64MOVDreg)
7286 v.AddArg(x)
7287 return true
7288 }
7289
7290
7291 for {
7292 x := v_0
7293 if x.Op != OpARM64MOVBUreg {
7294 break
7295 }
7296 v.reset(OpARM64MOVDreg)
7297 v.AddArg(x)
7298 return true
7299 }
7300
7301
7302 for {
7303 if v_0.Op != OpARM64ANDconst {
7304 break
7305 }
7306 c := auxIntToInt64(v_0.AuxInt)
7307 x := v_0.Args[0]
7308 v.reset(OpARM64ANDconst)
7309 v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
7310 v.AddArg(x)
7311 return true
7312 }
7313
7314
7315 for {
7316 if v_0.Op != OpARM64MOVDconst {
7317 break
7318 }
7319 c := auxIntToInt64(v_0.AuxInt)
7320 v.reset(OpARM64MOVDconst)
7321 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
7322 return true
7323 }
7324
7325
7326
7327 for {
7328 x := v_0
7329 if !(x.Type.IsBoolean()) {
7330 break
7331 }
7332 v.reset(OpARM64MOVDreg)
7333 v.AddArg(x)
7334 return true
7335 }
7336
7337
7338
7339 for {
7340 if v_0.Op != OpARM64SLLconst {
7341 break
7342 }
7343 lc := auxIntToInt64(v_0.AuxInt)
7344 if !(lc >= 8) {
7345 break
7346 }
7347 v.reset(OpARM64MOVDconst)
7348 v.AuxInt = int64ToAuxInt(0)
7349 return true
7350 }
7351
7352
7353
7354 for {
7355 if v_0.Op != OpARM64SLLconst {
7356 break
7357 }
7358 lc := auxIntToInt64(v_0.AuxInt)
7359 x := v_0.Args[0]
7360 if !(lc < 8) {
7361 break
7362 }
7363 v.reset(OpARM64UBFIZ)
7364 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc))
7365 v.AddArg(x)
7366 return true
7367 }
7368
7369
7370
7371 for {
7372 if v_0.Op != OpARM64SRLconst {
7373 break
7374 }
7375 rc := auxIntToInt64(v_0.AuxInt)
7376 x := v_0.Args[0]
7377 if !(rc < 8) {
7378 break
7379 }
7380 v.reset(OpARM64UBFX)
7381 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8))
7382 v.AddArg(x)
7383 return true
7384 }
7385
7386
7387
7388 for {
7389 if v_0.Op != OpARM64UBFX {
7390 break
7391 }
7392 bfc := auxIntToArm64BitField(v_0.AuxInt)
7393 x := v_0.Args[0]
7394 if !(bfc.getARM64BFwidth() <= 8) {
7395 break
7396 }
7397 v.reset(OpARM64UBFX)
7398 v.AuxInt = arm64BitFieldToAuxInt(bfc)
7399 v.AddArg(x)
7400 return true
7401 }
7402 return false
7403 }
7404 func rewriteValueARM64_OpARM64MOVBload(v *Value) bool {
7405 v_1 := v.Args[1]
7406 v_0 := v.Args[0]
7407 b := v.Block
7408 config := b.Func.Config
7409
7410
7411
7412 for {
7413 off1 := auxIntToInt32(v.AuxInt)
7414 sym := auxToSym(v.Aux)
7415 if v_0.Op != OpARM64ADDconst {
7416 break
7417 }
7418 off2 := auxIntToInt64(v_0.AuxInt)
7419 ptr := v_0.Args[0]
7420 mem := v_1
7421 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
7422 break
7423 }
7424 v.reset(OpARM64MOVBload)
7425 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
7426 v.Aux = symToAux(sym)
7427 v.AddArg2(ptr, mem)
7428 return true
7429 }
7430
7431
7432
7433 for {
7434 off := auxIntToInt32(v.AuxInt)
7435 sym := auxToSym(v.Aux)
7436 if v_0.Op != OpARM64ADD {
7437 break
7438 }
7439 idx := v_0.Args[1]
7440 ptr := v_0.Args[0]
7441 mem := v_1
7442 if !(off == 0 && sym == nil) {
7443 break
7444 }
7445 v.reset(OpARM64MOVBloadidx)
7446 v.AddArg3(ptr, idx, mem)
7447 return true
7448 }
7449
7450
7451
7452 for {
7453 off1 := auxIntToInt32(v.AuxInt)
7454 sym1 := auxToSym(v.Aux)
7455 if v_0.Op != OpARM64MOVDaddr {
7456 break
7457 }
7458 off2 := auxIntToInt32(v_0.AuxInt)
7459 sym2 := auxToSym(v_0.Aux)
7460 ptr := v_0.Args[0]
7461 mem := v_1
7462 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
7463 break
7464 }
7465 v.reset(OpARM64MOVBload)
7466 v.AuxInt = int32ToAuxInt(off1 + off2)
7467 v.Aux = symToAux(mergeSym(sym1, sym2))
7468 v.AddArg2(ptr, mem)
7469 return true
7470 }
7471
7472
7473
7474 for {
7475 off := auxIntToInt32(v.AuxInt)
7476 sym := auxToSym(v.Aux)
7477 ptr := v_0
7478 if v_1.Op != OpARM64MOVBstorezero {
7479 break
7480 }
7481 off2 := auxIntToInt32(v_1.AuxInt)
7482 sym2 := auxToSym(v_1.Aux)
7483 ptr2 := v_1.Args[0]
7484 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
7485 break
7486 }
7487 v.reset(OpARM64MOVDconst)
7488 v.AuxInt = int64ToAuxInt(0)
7489 return true
7490 }
7491 return false
7492 }
7493 func rewriteValueARM64_OpARM64MOVBloadidx(v *Value) bool {
7494 v_2 := v.Args[2]
7495 v_1 := v.Args[1]
7496 v_0 := v.Args[0]
7497
7498
7499
7500 for {
7501 ptr := v_0
7502 if v_1.Op != OpARM64MOVDconst {
7503 break
7504 }
7505 c := auxIntToInt64(v_1.AuxInt)
7506 mem := v_2
7507 if !(is32Bit(c)) {
7508 break
7509 }
7510 v.reset(OpARM64MOVBload)
7511 v.AuxInt = int32ToAuxInt(int32(c))
7512 v.AddArg2(ptr, mem)
7513 return true
7514 }
7515
7516
7517
7518 for {
7519 if v_0.Op != OpARM64MOVDconst {
7520 break
7521 }
7522 c := auxIntToInt64(v_0.AuxInt)
7523 ptr := v_1
7524 mem := v_2
7525 if !(is32Bit(c)) {
7526 break
7527 }
7528 v.reset(OpARM64MOVBload)
7529 v.AuxInt = int32ToAuxInt(int32(c))
7530 v.AddArg2(ptr, mem)
7531 return true
7532 }
7533
7534
7535
7536 for {
7537 ptr := v_0
7538 idx := v_1
7539 if v_2.Op != OpARM64MOVBstorezeroidx {
7540 break
7541 }
7542 idx2 := v_2.Args[1]
7543 ptr2 := v_2.Args[0]
7544 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
7545 break
7546 }
7547 v.reset(OpARM64MOVDconst)
7548 v.AuxInt = int64ToAuxInt(0)
7549 return true
7550 }
7551 return false
7552 }
7553 func rewriteValueARM64_OpARM64MOVBreg(v *Value) bool {
7554 v_0 := v.Args[0]
7555
7556
7557 for {
7558 x := v_0
7559 if x.Op != OpARM64MOVBload {
7560 break
7561 }
7562 v.reset(OpARM64MOVDreg)
7563 v.AddArg(x)
7564 return true
7565 }
7566
7567
7568 for {
7569 x := v_0
7570 if x.Op != OpARM64MOVBloadidx {
7571 break
7572 }
7573 v.reset(OpARM64MOVDreg)
7574 v.AddArg(x)
7575 return true
7576 }
7577
7578
7579 for {
7580 x := v_0
7581 if x.Op != OpARM64MOVBreg {
7582 break
7583 }
7584 v.reset(OpARM64MOVDreg)
7585 v.AddArg(x)
7586 return true
7587 }
7588
7589
7590 for {
7591 if v_0.Op != OpARM64MOVDconst {
7592 break
7593 }
7594 c := auxIntToInt64(v_0.AuxInt)
7595 v.reset(OpARM64MOVDconst)
7596 v.AuxInt = int64ToAuxInt(int64(int8(c)))
7597 return true
7598 }
7599
7600
7601
7602 for {
7603 if v_0.Op != OpARM64SLLconst {
7604 break
7605 }
7606 lc := auxIntToInt64(v_0.AuxInt)
7607 x := v_0.Args[0]
7608 if !(lc < 8) {
7609 break
7610 }
7611 v.reset(OpARM64SBFIZ)
7612 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 8-lc))
7613 v.AddArg(x)
7614 return true
7615 }
7616
7617
7618
7619 for {
7620 if v_0.Op != OpARM64SBFX {
7621 break
7622 }
7623 bfc := auxIntToArm64BitField(v_0.AuxInt)
7624 x := v_0.Args[0]
7625 if !(bfc.getARM64BFwidth() <= 8) {
7626 break
7627 }
7628 v.reset(OpARM64SBFX)
7629 v.AuxInt = arm64BitFieldToAuxInt(bfc)
7630 v.AddArg(x)
7631 return true
7632 }
7633 return false
7634 }
7635 func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
7636 v_2 := v.Args[2]
7637 v_1 := v.Args[1]
7638 v_0 := v.Args[0]
7639 b := v.Block
7640 config := b.Func.Config
7641
7642
7643
7644 for {
7645 off1 := auxIntToInt32(v.AuxInt)
7646 sym := auxToSym(v.Aux)
7647 if v_0.Op != OpARM64ADDconst {
7648 break
7649 }
7650 off2 := auxIntToInt64(v_0.AuxInt)
7651 ptr := v_0.Args[0]
7652 val := v_1
7653 mem := v_2
7654 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
7655 break
7656 }
7657 v.reset(OpARM64MOVBstore)
7658 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
7659 v.Aux = symToAux(sym)
7660 v.AddArg3(ptr, val, mem)
7661 return true
7662 }
7663
7664
7665
7666 for {
7667 off := auxIntToInt32(v.AuxInt)
7668 sym := auxToSym(v.Aux)
7669 if v_0.Op != OpARM64ADD {
7670 break
7671 }
7672 idx := v_0.Args[1]
7673 ptr := v_0.Args[0]
7674 val := v_1
7675 mem := v_2
7676 if !(off == 0 && sym == nil) {
7677 break
7678 }
7679 v.reset(OpARM64MOVBstoreidx)
7680 v.AddArg4(ptr, idx, val, mem)
7681 return true
7682 }
7683
7684
7685
7686 for {
7687 off1 := auxIntToInt32(v.AuxInt)
7688 sym1 := auxToSym(v.Aux)
7689 if v_0.Op != OpARM64MOVDaddr {
7690 break
7691 }
7692 off2 := auxIntToInt32(v_0.AuxInt)
7693 sym2 := auxToSym(v_0.Aux)
7694 ptr := v_0.Args[0]
7695 val := v_1
7696 mem := v_2
7697 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
7698 break
7699 }
7700 v.reset(OpARM64MOVBstore)
7701 v.AuxInt = int32ToAuxInt(off1 + off2)
7702 v.Aux = symToAux(mergeSym(sym1, sym2))
7703 v.AddArg3(ptr, val, mem)
7704 return true
7705 }
7706
7707
7708 for {
7709 off := auxIntToInt32(v.AuxInt)
7710 sym := auxToSym(v.Aux)
7711 ptr := v_0
7712 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
7713 break
7714 }
7715 mem := v_2
7716 v.reset(OpARM64MOVBstorezero)
7717 v.AuxInt = int32ToAuxInt(off)
7718 v.Aux = symToAux(sym)
7719 v.AddArg2(ptr, mem)
7720 return true
7721 }
7722
7723
7724 for {
7725 off := auxIntToInt32(v.AuxInt)
7726 sym := auxToSym(v.Aux)
7727 ptr := v_0
7728 if v_1.Op != OpARM64MOVBreg {
7729 break
7730 }
7731 x := v_1.Args[0]
7732 mem := v_2
7733 v.reset(OpARM64MOVBstore)
7734 v.AuxInt = int32ToAuxInt(off)
7735 v.Aux = symToAux(sym)
7736 v.AddArg3(ptr, x, mem)
7737 return true
7738 }
7739
7740
7741 for {
7742 off := auxIntToInt32(v.AuxInt)
7743 sym := auxToSym(v.Aux)
7744 ptr := v_0
7745 if v_1.Op != OpARM64MOVBUreg {
7746 break
7747 }
7748 x := v_1.Args[0]
7749 mem := v_2
7750 v.reset(OpARM64MOVBstore)
7751 v.AuxInt = int32ToAuxInt(off)
7752 v.Aux = symToAux(sym)
7753 v.AddArg3(ptr, x, mem)
7754 return true
7755 }
7756
7757
7758 for {
7759 off := auxIntToInt32(v.AuxInt)
7760 sym := auxToSym(v.Aux)
7761 ptr := v_0
7762 if v_1.Op != OpARM64MOVHreg {
7763 break
7764 }
7765 x := v_1.Args[0]
7766 mem := v_2
7767 v.reset(OpARM64MOVBstore)
7768 v.AuxInt = int32ToAuxInt(off)
7769 v.Aux = symToAux(sym)
7770 v.AddArg3(ptr, x, mem)
7771 return true
7772 }
7773
7774
7775 for {
7776 off := auxIntToInt32(v.AuxInt)
7777 sym := auxToSym(v.Aux)
7778 ptr := v_0
7779 if v_1.Op != OpARM64MOVHUreg {
7780 break
7781 }
7782 x := v_1.Args[0]
7783 mem := v_2
7784 v.reset(OpARM64MOVBstore)
7785 v.AuxInt = int32ToAuxInt(off)
7786 v.Aux = symToAux(sym)
7787 v.AddArg3(ptr, x, mem)
7788 return true
7789 }
7790
7791
7792 for {
7793 off := auxIntToInt32(v.AuxInt)
7794 sym := auxToSym(v.Aux)
7795 ptr := v_0
7796 if v_1.Op != OpARM64MOVWreg {
7797 break
7798 }
7799 x := v_1.Args[0]
7800 mem := v_2
7801 v.reset(OpARM64MOVBstore)
7802 v.AuxInt = int32ToAuxInt(off)
7803 v.Aux = symToAux(sym)
7804 v.AddArg3(ptr, x, mem)
7805 return true
7806 }
7807
7808
7809 for {
7810 off := auxIntToInt32(v.AuxInt)
7811 sym := auxToSym(v.Aux)
7812 ptr := v_0
7813 if v_1.Op != OpARM64MOVWUreg {
7814 break
7815 }
7816 x := v_1.Args[0]
7817 mem := v_2
7818 v.reset(OpARM64MOVBstore)
7819 v.AuxInt = int32ToAuxInt(off)
7820 v.Aux = symToAux(sym)
7821 v.AddArg3(ptr, x, mem)
7822 return true
7823 }
7824
7825
7826
7827 for {
7828 i := auxIntToInt32(v.AuxInt)
7829 s := auxToSym(v.Aux)
7830 ptr0 := v_0
7831 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
7832 break
7833 }
7834 w := v_1.Args[0]
7835 x := v_2
7836 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
7837 break
7838 }
7839 mem := x.Args[2]
7840 ptr1 := x.Args[0]
7841 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
7842 break
7843 }
7844 v.reset(OpARM64MOVHstore)
7845 v.AuxInt = int32ToAuxInt(i - 1)
7846 v.Aux = symToAux(s)
7847 v.AddArg3(ptr0, w, mem)
7848 return true
7849 }
7850
7851
7852
7853 for {
7854 if auxIntToInt32(v.AuxInt) != 1 {
7855 break
7856 }
7857 s := auxToSym(v.Aux)
7858 if v_0.Op != OpARM64ADD {
7859 break
7860 }
7861 _ = v_0.Args[1]
7862 v_0_0 := v_0.Args[0]
7863 v_0_1 := v_0.Args[1]
7864 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7865 ptr0 := v_0_0
7866 idx0 := v_0_1
7867 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
7868 continue
7869 }
7870 w := v_1.Args[0]
7871 x := v_2
7872 if x.Op != OpARM64MOVBstoreidx {
7873 continue
7874 }
7875 mem := x.Args[3]
7876 ptr1 := x.Args[0]
7877 idx1 := x.Args[1]
7878 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
7879 continue
7880 }
7881 v.reset(OpARM64MOVHstoreidx)
7882 v.AddArg4(ptr1, idx1, w, mem)
7883 return true
7884 }
7885 break
7886 }
7887
7888
7889
7890 for {
7891 i := auxIntToInt32(v.AuxInt)
7892 s := auxToSym(v.Aux)
7893 ptr0 := v_0
7894 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 8) {
7895 break
7896 }
7897 w := v_1.Args[0]
7898 x := v_2
7899 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
7900 break
7901 }
7902 mem := x.Args[2]
7903 ptr1 := x.Args[0]
7904 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
7905 break
7906 }
7907 v.reset(OpARM64MOVHstore)
7908 v.AuxInt = int32ToAuxInt(i - 1)
7909 v.Aux = symToAux(s)
7910 v.AddArg3(ptr0, w, mem)
7911 return true
7912 }
7913
7914
7915
7916 for {
7917 if auxIntToInt32(v.AuxInt) != 1 {
7918 break
7919 }
7920 s := auxToSym(v.Aux)
7921 if v_0.Op != OpARM64ADD {
7922 break
7923 }
7924 _ = v_0.Args[1]
7925 v_0_0 := v_0.Args[0]
7926 v_0_1 := v_0.Args[1]
7927 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7928 ptr0 := v_0_0
7929 idx0 := v_0_1
7930 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 8) {
7931 continue
7932 }
7933 w := v_1.Args[0]
7934 x := v_2
7935 if x.Op != OpARM64MOVBstoreidx {
7936 continue
7937 }
7938 mem := x.Args[3]
7939 ptr1 := x.Args[0]
7940 idx1 := x.Args[1]
7941 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
7942 continue
7943 }
7944 v.reset(OpARM64MOVHstoreidx)
7945 v.AddArg4(ptr1, idx1, w, mem)
7946 return true
7947 }
7948 break
7949 }
7950
7951
7952
7953 for {
7954 i := auxIntToInt32(v.AuxInt)
7955 s := auxToSym(v.Aux)
7956 ptr0 := v_0
7957 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 24) {
7958 break
7959 }
7960 w := v_1.Args[0]
7961 x := v_2
7962 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
7963 break
7964 }
7965 mem := x.Args[2]
7966 ptr1 := x.Args[0]
7967 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
7968 break
7969 }
7970 v.reset(OpARM64MOVHstore)
7971 v.AuxInt = int32ToAuxInt(i - 1)
7972 v.Aux = symToAux(s)
7973 v.AddArg3(ptr0, w, mem)
7974 return true
7975 }
7976
7977
7978
7979 for {
7980 if auxIntToInt32(v.AuxInt) != 1 {
7981 break
7982 }
7983 s := auxToSym(v.Aux)
7984 if v_0.Op != OpARM64ADD {
7985 break
7986 }
7987 _ = v_0.Args[1]
7988 v_0_0 := v_0.Args[0]
7989 v_0_1 := v_0.Args[1]
7990 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7991 ptr0 := v_0_0
7992 idx0 := v_0_1
7993 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 24) {
7994 continue
7995 }
7996 w := v_1.Args[0]
7997 x := v_2
7998 if x.Op != OpARM64MOVBstoreidx {
7999 continue
8000 }
8001 mem := x.Args[3]
8002 ptr1 := x.Args[0]
8003 idx1 := x.Args[1]
8004 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
8005 continue
8006 }
8007 v.reset(OpARM64MOVHstoreidx)
8008 v.AddArg4(ptr1, idx1, w, mem)
8009 return true
8010 }
8011 break
8012 }
8013
8014
8015
8016 for {
8017 i := auxIntToInt32(v.AuxInt)
8018 s := auxToSym(v.Aux)
8019 ptr0 := v_0
8020 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
8021 break
8022 }
8023 v_1_0 := v_1.Args[0]
8024 if v_1_0.Op != OpARM64MOVDreg {
8025 break
8026 }
8027 w := v_1_0.Args[0]
8028 x := v_2
8029 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
8030 break
8031 }
8032 mem := x.Args[2]
8033 ptr1 := x.Args[0]
8034 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
8035 break
8036 }
8037 v.reset(OpARM64MOVHstore)
8038 v.AuxInt = int32ToAuxInt(i - 1)
8039 v.Aux = symToAux(s)
8040 v.AddArg3(ptr0, w, mem)
8041 return true
8042 }
8043
8044
8045
8046 for {
8047 if auxIntToInt32(v.AuxInt) != 1 {
8048 break
8049 }
8050 s := auxToSym(v.Aux)
8051 if v_0.Op != OpARM64ADD {
8052 break
8053 }
8054 _ = v_0.Args[1]
8055 v_0_0 := v_0.Args[0]
8056 v_0_1 := v_0.Args[1]
8057 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8058 ptr0 := v_0_0
8059 idx0 := v_0_1
8060 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
8061 continue
8062 }
8063 v_1_0 := v_1.Args[0]
8064 if v_1_0.Op != OpARM64MOVDreg {
8065 continue
8066 }
8067 w := v_1_0.Args[0]
8068 x := v_2
8069 if x.Op != OpARM64MOVBstoreidx {
8070 continue
8071 }
8072 mem := x.Args[3]
8073 ptr1 := x.Args[0]
8074 idx1 := x.Args[1]
8075 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
8076 continue
8077 }
8078 v.reset(OpARM64MOVHstoreidx)
8079 v.AddArg4(ptr1, idx1, w, mem)
8080 return true
8081 }
8082 break
8083 }
8084
8085
8086
8087 for {
8088 i := auxIntToInt32(v.AuxInt)
8089 s := auxToSym(v.Aux)
8090 ptr0 := v_0
8091 if v_1.Op != OpARM64SRLconst {
8092 break
8093 }
8094 j := auxIntToInt64(v_1.AuxInt)
8095 w := v_1.Args[0]
8096 x := v_2
8097 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
8098 break
8099 }
8100 mem := x.Args[2]
8101 ptr1 := x.Args[0]
8102 w0 := x.Args[1]
8103 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
8104 break
8105 }
8106 v.reset(OpARM64MOVHstore)
8107 v.AuxInt = int32ToAuxInt(i - 1)
8108 v.Aux = symToAux(s)
8109 v.AddArg3(ptr0, w0, mem)
8110 return true
8111 }
8112
8113
8114
8115 for {
8116 if auxIntToInt32(v.AuxInt) != 1 {
8117 break
8118 }
8119 s := auxToSym(v.Aux)
8120 if v_0.Op != OpARM64ADD {
8121 break
8122 }
8123 _ = v_0.Args[1]
8124 v_0_0 := v_0.Args[0]
8125 v_0_1 := v_0.Args[1]
8126 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8127 ptr0 := v_0_0
8128 idx0 := v_0_1
8129 if v_1.Op != OpARM64SRLconst {
8130 continue
8131 }
8132 j := auxIntToInt64(v_1.AuxInt)
8133 w := v_1.Args[0]
8134 x := v_2
8135 if x.Op != OpARM64MOVBstoreidx {
8136 continue
8137 }
8138 mem := x.Args[3]
8139 ptr1 := x.Args[0]
8140 idx1 := x.Args[1]
8141 w0 := x.Args[2]
8142 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
8143 continue
8144 }
8145 v.reset(OpARM64MOVHstoreidx)
8146 v.AddArg4(ptr1, idx1, w0, mem)
8147 return true
8148 }
8149 break
8150 }
8151
8152
8153
8154 for {
8155 i := auxIntToInt32(v.AuxInt)
8156 s := auxToSym(v.Aux)
8157 ptr0 := v_0
8158 if v_1.Op != OpARM64UBFX {
8159 break
8160 }
8161 bfc := auxIntToArm64BitField(v_1.AuxInt)
8162 w := v_1.Args[0]
8163 x := v_2
8164 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
8165 break
8166 }
8167 mem := x.Args[2]
8168 ptr1 := x.Args[0]
8169 w0 := x.Args[1]
8170 if w0.Op != OpARM64UBFX {
8171 break
8172 }
8173 bfc2 := auxIntToArm64BitField(w0.AuxInt)
8174 if w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && bfc.getARM64BFwidth() == 32-bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32-bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb()-8 && clobber(x)) {
8175 break
8176 }
8177 v.reset(OpARM64MOVHstore)
8178 v.AuxInt = int32ToAuxInt(i - 1)
8179 v.Aux = symToAux(s)
8180 v.AddArg3(ptr0, w0, mem)
8181 return true
8182 }
8183
8184
8185
8186 for {
8187 if auxIntToInt32(v.AuxInt) != 1 {
8188 break
8189 }
8190 s := auxToSym(v.Aux)
8191 if v_0.Op != OpARM64ADD {
8192 break
8193 }
8194 _ = v_0.Args[1]
8195 v_0_0 := v_0.Args[0]
8196 v_0_1 := v_0.Args[1]
8197 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8198 ptr0 := v_0_0
8199 idx0 := v_0_1
8200 if v_1.Op != OpARM64UBFX {
8201 continue
8202 }
8203 bfc := auxIntToArm64BitField(v_1.AuxInt)
8204 w := v_1.Args[0]
8205 x := v_2
8206 if x.Op != OpARM64MOVBstoreidx {
8207 continue
8208 }
8209 mem := x.Args[3]
8210 ptr1 := x.Args[0]
8211 idx1 := x.Args[1]
8212 w0 := x.Args[2]
8213 if w0.Op != OpARM64UBFX {
8214 continue
8215 }
8216 bfc2 := auxIntToArm64BitField(w0.AuxInt)
8217 if w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && bfc.getARM64BFwidth() == 32-bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32-bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb()-8 && clobber(x)) {
8218 continue
8219 }
8220 v.reset(OpARM64MOVHstoreidx)
8221 v.AddArg4(ptr1, idx1, w0, mem)
8222 return true
8223 }
8224 break
8225 }
8226
8227
8228
8229 for {
8230 i := auxIntToInt32(v.AuxInt)
8231 s := auxToSym(v.Aux)
8232 ptr0 := v_0
8233 if v_1.Op != OpARM64SRLconst {
8234 break
8235 }
8236 j := auxIntToInt64(v_1.AuxInt)
8237 v_1_0 := v_1.Args[0]
8238 if v_1_0.Op != OpARM64MOVDreg {
8239 break
8240 }
8241 w := v_1_0.Args[0]
8242 x := v_2
8243 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
8244 break
8245 }
8246 mem := x.Args[2]
8247 ptr1 := x.Args[0]
8248 w0 := x.Args[1]
8249 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 {
8250 break
8251 }
8252 w0_0 := w0.Args[0]
8253 if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
8254 break
8255 }
8256 v.reset(OpARM64MOVHstore)
8257 v.AuxInt = int32ToAuxInt(i - 1)
8258 v.Aux = symToAux(s)
8259 v.AddArg3(ptr0, w0, mem)
8260 return true
8261 }
8262
8263
8264
8265 for {
8266 if auxIntToInt32(v.AuxInt) != 1 {
8267 break
8268 }
8269 s := auxToSym(v.Aux)
8270 if v_0.Op != OpARM64ADD {
8271 break
8272 }
8273 _ = v_0.Args[1]
8274 v_0_0 := v_0.Args[0]
8275 v_0_1 := v_0.Args[1]
8276 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8277 ptr0 := v_0_0
8278 idx0 := v_0_1
8279 if v_1.Op != OpARM64SRLconst {
8280 continue
8281 }
8282 j := auxIntToInt64(v_1.AuxInt)
8283 v_1_0 := v_1.Args[0]
8284 if v_1_0.Op != OpARM64MOVDreg {
8285 continue
8286 }
8287 w := v_1_0.Args[0]
8288 x := v_2
8289 if x.Op != OpARM64MOVBstoreidx {
8290 continue
8291 }
8292 mem := x.Args[3]
8293 ptr1 := x.Args[0]
8294 idx1 := x.Args[1]
8295 w0 := x.Args[2]
8296 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 {
8297 continue
8298 }
8299 w0_0 := w0.Args[0]
8300 if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
8301 continue
8302 }
8303 v.reset(OpARM64MOVHstoreidx)
8304 v.AddArg4(ptr1, idx1, w0, mem)
8305 return true
8306 }
8307 break
8308 }
8309
8310
8311
8312 for {
8313 i := auxIntToInt32(v.AuxInt)
8314 s := auxToSym(v.Aux)
8315 ptr := v_0
8316 w := v_1
8317 x0 := v_2
8318 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
8319 break
8320 }
8321 _ = x0.Args[2]
8322 if ptr != x0.Args[0] {
8323 break
8324 }
8325 x0_1 := x0.Args[1]
8326 if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
8327 break
8328 }
8329 x1 := x0.Args[2]
8330 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
8331 break
8332 }
8333 _ = x1.Args[2]
8334 if ptr != x1.Args[0] {
8335 break
8336 }
8337 x1_1 := x1.Args[1]
8338 if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
8339 break
8340 }
8341 x2 := x1.Args[2]
8342 if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
8343 break
8344 }
8345 _ = x2.Args[2]
8346 if ptr != x2.Args[0] {
8347 break
8348 }
8349 x2_1 := x2.Args[1]
8350 if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] {
8351 break
8352 }
8353 x3 := x2.Args[2]
8354 if x3.Op != OpARM64MOVBstore || auxIntToInt32(x3.AuxInt) != i-4 || auxToSym(x3.Aux) != s {
8355 break
8356 }
8357 _ = x3.Args[2]
8358 if ptr != x3.Args[0] {
8359 break
8360 }
8361 x3_1 := x3.Args[1]
8362 if x3_1.Op != OpARM64SRLconst || auxIntToInt64(x3_1.AuxInt) != 32 || w != x3_1.Args[0] {
8363 break
8364 }
8365 x4 := x3.Args[2]
8366 if x4.Op != OpARM64MOVBstore || auxIntToInt32(x4.AuxInt) != i-5 || auxToSym(x4.Aux) != s {
8367 break
8368 }
8369 _ = x4.Args[2]
8370 if ptr != x4.Args[0] {
8371 break
8372 }
8373 x4_1 := x4.Args[1]
8374 if x4_1.Op != OpARM64SRLconst || auxIntToInt64(x4_1.AuxInt) != 40 || w != x4_1.Args[0] {
8375 break
8376 }
8377 x5 := x4.Args[2]
8378 if x5.Op != OpARM64MOVBstore || auxIntToInt32(x5.AuxInt) != i-6 || auxToSym(x5.Aux) != s {
8379 break
8380 }
8381 _ = x5.Args[2]
8382 if ptr != x5.Args[0] {
8383 break
8384 }
8385 x5_1 := x5.Args[1]
8386 if x5_1.Op != OpARM64SRLconst || auxIntToInt64(x5_1.AuxInt) != 48 || w != x5_1.Args[0] {
8387 break
8388 }
8389 x6 := x5.Args[2]
8390 if x6.Op != OpARM64MOVBstore || auxIntToInt32(x6.AuxInt) != i-7 || auxToSym(x6.Aux) != s {
8391 break
8392 }
8393 mem := x6.Args[2]
8394 if ptr != x6.Args[0] {
8395 break
8396 }
8397 x6_1 := x6.Args[1]
8398 if x6_1.Op != OpARM64SRLconst || auxIntToInt64(x6_1.AuxInt) != 56 || w != x6_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)) {
8399 break
8400 }
8401 v.reset(OpARM64MOVDstore)
8402 v.AuxInt = int32ToAuxInt(i - 7)
8403 v.Aux = symToAux(s)
8404 v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type)
8405 v0.AddArg(w)
8406 v.AddArg3(ptr, v0, mem)
8407 return true
8408 }
8409
8410
8411
8412 for {
8413 if auxIntToInt32(v.AuxInt) != 7 {
8414 break
8415 }
8416 s := auxToSym(v.Aux)
8417 p := v_0
8418 w := v_1
8419 x0 := v_2
8420 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 6 || auxToSym(x0.Aux) != s {
8421 break
8422 }
8423 _ = x0.Args[2]
8424 if p != x0.Args[0] {
8425 break
8426 }
8427 x0_1 := x0.Args[1]
8428 if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
8429 break
8430 }
8431 x1 := x0.Args[2]
8432 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 5 || auxToSym(x1.Aux) != s {
8433 break
8434 }
8435 _ = x1.Args[2]
8436 if p != x1.Args[0] {
8437 break
8438 }
8439 x1_1 := x1.Args[1]
8440 if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
8441 break
8442 }
8443 x2 := x1.Args[2]
8444 if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != 4 || auxToSym(x2.Aux) != s {
8445 break
8446 }
8447 _ = x2.Args[2]
8448 if p != x2.Args[0] {
8449 break
8450 }
8451 x2_1 := x2.Args[1]
8452 if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] {
8453 break
8454 }
8455 x3 := x2.Args[2]
8456 if x3.Op != OpARM64MOVBstore || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
8457 break
8458 }
8459 _ = x3.Args[2]
8460 if p != x3.Args[0] {
8461 break
8462 }
8463 x3_1 := x3.Args[1]
8464 if x3_1.Op != OpARM64SRLconst || auxIntToInt64(x3_1.AuxInt) != 32 || w != x3_1.Args[0] {
8465 break
8466 }
8467 x4 := x3.Args[2]
8468 if x4.Op != OpARM64MOVBstore || auxIntToInt32(x4.AuxInt) != 2 || auxToSym(x4.Aux) != s {
8469 break
8470 }
8471 _ = x4.Args[2]
8472 if p != x4.Args[0] {
8473 break
8474 }
8475 x4_1 := x4.Args[1]
8476 if x4_1.Op != OpARM64SRLconst || auxIntToInt64(x4_1.AuxInt) != 40 || w != x4_1.Args[0] {
8477 break
8478 }
8479 x5 := x4.Args[2]
8480 if x5.Op != OpARM64MOVBstore || auxIntToInt32(x5.AuxInt) != 1 || auxToSym(x5.Aux) != s {
8481 break
8482 }
8483 _ = x5.Args[2]
8484 p1 := x5.Args[0]
8485 if p1.Op != OpARM64ADD {
8486 break
8487 }
8488 _ = p1.Args[1]
8489 p1_0 := p1.Args[0]
8490 p1_1 := p1.Args[1]
8491 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
8492 ptr1 := p1_0
8493 idx1 := p1_1
8494 x5_1 := x5.Args[1]
8495 if x5_1.Op != OpARM64SRLconst || auxIntToInt64(x5_1.AuxInt) != 48 || w != x5_1.Args[0] {
8496 continue
8497 }
8498 x6 := x5.Args[2]
8499 if x6.Op != OpARM64MOVBstoreidx {
8500 continue
8501 }
8502 mem := x6.Args[3]
8503 ptr0 := x6.Args[0]
8504 idx0 := x6.Args[1]
8505 x6_2 := x6.Args[2]
8506 if x6_2.Op != OpARM64SRLconst || auxIntToInt64(x6_2.AuxInt) != 56 || w != x6_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6)) {
8507 continue
8508 }
8509 v.reset(OpARM64MOVDstoreidx)
8510 v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type)
8511 v0.AddArg(w)
8512 v.AddArg4(ptr0, idx0, v0, mem)
8513 return true
8514 }
8515 break
8516 }
8517
8518
8519
8520 for {
8521 i := auxIntToInt32(v.AuxInt)
8522 s := auxToSym(v.Aux)
8523 ptr := v_0
8524 w := v_1
8525 x0 := v_2
8526 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
8527 break
8528 }
8529 _ = x0.Args[2]
8530 if ptr != x0.Args[0] {
8531 break
8532 }
8533 x0_1 := x0.Args[1]
8534 if x0_1.Op != OpARM64UBFX || auxIntToArm64BitField(x0_1.AuxInt) != armBFAuxInt(8, 24) || w != x0_1.Args[0] {
8535 break
8536 }
8537 x1 := x0.Args[2]
8538 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
8539 break
8540 }
8541 _ = x1.Args[2]
8542 if ptr != x1.Args[0] {
8543 break
8544 }
8545 x1_1 := x1.Args[1]
8546 if x1_1.Op != OpARM64UBFX || auxIntToArm64BitField(x1_1.AuxInt) != armBFAuxInt(16, 16) || w != x1_1.Args[0] {
8547 break
8548 }
8549 x2 := x1.Args[2]
8550 if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
8551 break
8552 }
8553 mem := x2.Args[2]
8554 if ptr != x2.Args[0] {
8555 break
8556 }
8557 x2_1 := x2.Args[1]
8558 if x2_1.Op != OpARM64UBFX || auxIntToArm64BitField(x2_1.AuxInt) != armBFAuxInt(24, 8) || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
8559 break
8560 }
8561 v.reset(OpARM64MOVWstore)
8562 v.AuxInt = int32ToAuxInt(i - 3)
8563 v.Aux = symToAux(s)
8564 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
8565 v0.AddArg(w)
8566 v.AddArg3(ptr, v0, mem)
8567 return true
8568 }
8569
8570
8571
8572 for {
8573 if auxIntToInt32(v.AuxInt) != 3 {
8574 break
8575 }
8576 s := auxToSym(v.Aux)
8577 p := v_0
8578 w := v_1
8579 x0 := v_2
8580 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
8581 break
8582 }
8583 _ = x0.Args[2]
8584 if p != x0.Args[0] {
8585 break
8586 }
8587 x0_1 := x0.Args[1]
8588 if x0_1.Op != OpARM64UBFX || auxIntToArm64BitField(x0_1.AuxInt) != armBFAuxInt(8, 24) || w != x0_1.Args[0] {
8589 break
8590 }
8591 x1 := x0.Args[2]
8592 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
8593 break
8594 }
8595 _ = x1.Args[2]
8596 p1 := x1.Args[0]
8597 if p1.Op != OpARM64ADD {
8598 break
8599 }
8600 _ = p1.Args[1]
8601 p1_0 := p1.Args[0]
8602 p1_1 := p1.Args[1]
8603 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
8604 ptr1 := p1_0
8605 idx1 := p1_1
8606 x1_1 := x1.Args[1]
8607 if x1_1.Op != OpARM64UBFX || auxIntToArm64BitField(x1_1.AuxInt) != armBFAuxInt(16, 16) || w != x1_1.Args[0] {
8608 continue
8609 }
8610 x2 := x1.Args[2]
8611 if x2.Op != OpARM64MOVBstoreidx {
8612 continue
8613 }
8614 mem := x2.Args[3]
8615 ptr0 := x2.Args[0]
8616 idx0 := x2.Args[1]
8617 x2_2 := x2.Args[2]
8618 if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
8619 continue
8620 }
8621 v.reset(OpARM64MOVWstoreidx)
8622 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
8623 v0.AddArg(w)
8624 v.AddArg4(ptr0, idx0, v0, mem)
8625 return true
8626 }
8627 break
8628 }
8629
8630
8631
8632 for {
8633 i := auxIntToInt32(v.AuxInt)
8634 s := auxToSym(v.Aux)
8635 ptr := v_0
8636 w := v_1
8637 x0 := v_2
8638 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
8639 break
8640 }
8641 _ = x0.Args[2]
8642 if ptr != x0.Args[0] {
8643 break
8644 }
8645 x0_1 := x0.Args[1]
8646 if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 {
8647 break
8648 }
8649 x0_1_0 := x0_1.Args[0]
8650 if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] {
8651 break
8652 }
8653 x1 := x0.Args[2]
8654 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
8655 break
8656 }
8657 _ = x1.Args[2]
8658 if ptr != x1.Args[0] {
8659 break
8660 }
8661 x1_1 := x1.Args[1]
8662 if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 {
8663 break
8664 }
8665 x1_1_0 := x1_1.Args[0]
8666 if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] {
8667 break
8668 }
8669 x2 := x1.Args[2]
8670 if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
8671 break
8672 }
8673 mem := x2.Args[2]
8674 if ptr != x2.Args[0] {
8675 break
8676 }
8677 x2_1 := x2.Args[1]
8678 if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 {
8679 break
8680 }
8681 x2_1_0 := x2_1.Args[0]
8682 if x2_1_0.Op != OpARM64MOVDreg || w != x2_1_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
8683 break
8684 }
8685 v.reset(OpARM64MOVWstore)
8686 v.AuxInt = int32ToAuxInt(i - 3)
8687 v.Aux = symToAux(s)
8688 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
8689 v0.AddArg(w)
8690 v.AddArg3(ptr, v0, mem)
8691 return true
8692 }
8693
8694
8695
8696 for {
8697 if auxIntToInt32(v.AuxInt) != 3 {
8698 break
8699 }
8700 s := auxToSym(v.Aux)
8701 p := v_0
8702 w := v_1
8703 x0 := v_2
8704 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
8705 break
8706 }
8707 _ = x0.Args[2]
8708 if p != x0.Args[0] {
8709 break
8710 }
8711 x0_1 := x0.Args[1]
8712 if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 {
8713 break
8714 }
8715 x0_1_0 := x0_1.Args[0]
8716 if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] {
8717 break
8718 }
8719 x1 := x0.Args[2]
8720 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
8721 break
8722 }
8723 _ = x1.Args[2]
8724 p1 := x1.Args[0]
8725 if p1.Op != OpARM64ADD {
8726 break
8727 }
8728 _ = p1.Args[1]
8729 p1_0 := p1.Args[0]
8730 p1_1 := p1.Args[1]
8731 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
8732 ptr1 := p1_0
8733 idx1 := p1_1
8734 x1_1 := x1.Args[1]
8735 if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 {
8736 continue
8737 }
8738 x1_1_0 := x1_1.Args[0]
8739 if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] {
8740 continue
8741 }
8742 x2 := x1.Args[2]
8743 if x2.Op != OpARM64MOVBstoreidx {
8744 continue
8745 }
8746 mem := x2.Args[3]
8747 ptr0 := x2.Args[0]
8748 idx0 := x2.Args[1]
8749 x2_2 := x2.Args[2]
8750 if x2_2.Op != OpARM64SRLconst || auxIntToInt64(x2_2.AuxInt) != 24 {
8751 continue
8752 }
8753 x2_2_0 := x2_2.Args[0]
8754 if x2_2_0.Op != OpARM64MOVDreg || w != x2_2_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
8755 continue
8756 }
8757 v.reset(OpARM64MOVWstoreidx)
8758 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
8759 v0.AddArg(w)
8760 v.AddArg4(ptr0, idx0, v0, mem)
8761 return true
8762 }
8763 break
8764 }
8765
8766
8767
8768 for {
8769 i := auxIntToInt32(v.AuxInt)
8770 s := auxToSym(v.Aux)
8771 ptr := v_0
8772 w := v_1
8773 x0 := v_2
8774 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
8775 break
8776 }
8777 _ = x0.Args[2]
8778 if ptr != x0.Args[0] {
8779 break
8780 }
8781 x0_1 := x0.Args[1]
8782 if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
8783 break
8784 }
8785 x1 := x0.Args[2]
8786 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
8787 break
8788 }
8789 _ = x1.Args[2]
8790 if ptr != x1.Args[0] {
8791 break
8792 }
8793 x1_1 := x1.Args[1]
8794 if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
8795 break
8796 }
8797 x2 := x1.Args[2]
8798 if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
8799 break
8800 }
8801 mem := x2.Args[2]
8802 if ptr != x2.Args[0] {
8803 break
8804 }
8805 x2_1 := x2.Args[1]
8806 if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
8807 break
8808 }
8809 v.reset(OpARM64MOVWstore)
8810 v.AuxInt = int32ToAuxInt(i - 3)
8811 v.Aux = symToAux(s)
8812 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
8813 v0.AddArg(w)
8814 v.AddArg3(ptr, v0, mem)
8815 return true
8816 }
8817
8818
8819
8820 for {
8821 if auxIntToInt32(v.AuxInt) != 3 {
8822 break
8823 }
8824 s := auxToSym(v.Aux)
8825 p := v_0
8826 w := v_1
8827 x0 := v_2
8828 if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
8829 break
8830 }
8831 _ = x0.Args[2]
8832 if p != x0.Args[0] {
8833 break
8834 }
8835 x0_1 := x0.Args[1]
8836 if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
8837 break
8838 }
8839 x1 := x0.Args[2]
8840 if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
8841 break
8842 }
8843 _ = x1.Args[2]
8844 p1 := x1.Args[0]
8845 if p1.Op != OpARM64ADD {
8846 break
8847 }
8848 _ = p1.Args[1]
8849 p1_0 := p1.Args[0]
8850 p1_1 := p1.Args[1]
8851 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
8852 ptr1 := p1_0
8853 idx1 := p1_1
8854 x1_1 := x1.Args[1]
8855 if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
8856 continue
8857 }
8858 x2 := x1.Args[2]
8859 if x2.Op != OpARM64MOVBstoreidx {
8860 continue
8861 }
8862 mem := x2.Args[3]
8863 ptr0 := x2.Args[0]
8864 idx0 := x2.Args[1]
8865 x2_2 := x2.Args[2]
8866 if x2_2.Op != OpARM64SRLconst || auxIntToInt64(x2_2.AuxInt) != 24 || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
8867 continue
8868 }
8869 v.reset(OpARM64MOVWstoreidx)
8870 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
8871 v0.AddArg(w)
8872 v.AddArg4(ptr0, idx0, v0, mem)
8873 return true
8874 }
8875 break
8876 }
8877
8878
8879
8880 for {
8881 i := auxIntToInt32(v.AuxInt)
8882 s := auxToSym(v.Aux)
8883 ptr := v_0
8884 w := v_1
8885 x := v_2
8886 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
8887 break
8888 }
8889 mem := x.Args[2]
8890 if ptr != x.Args[0] {
8891 break
8892 }
8893 x_1 := x.Args[1]
8894 if x_1.Op != OpARM64SRLconst || auxIntToInt64(x_1.AuxInt) != 8 || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
8895 break
8896 }
8897 v.reset(OpARM64MOVHstore)
8898 v.AuxInt = int32ToAuxInt(i - 1)
8899 v.Aux = symToAux(s)
8900 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
8901 v0.AddArg(w)
8902 v.AddArg3(ptr, v0, mem)
8903 return true
8904 }
8905
8906
8907
8908 for {
8909 if auxIntToInt32(v.AuxInt) != 1 {
8910 break
8911 }
8912 s := auxToSym(v.Aux)
8913 if v_0.Op != OpARM64ADD {
8914 break
8915 }
8916 _ = v_0.Args[1]
8917 v_0_0 := v_0.Args[0]
8918 v_0_1 := v_0.Args[1]
8919 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8920 ptr1 := v_0_0
8921 idx1 := v_0_1
8922 w := v_1
8923 x := v_2
8924 if x.Op != OpARM64MOVBstoreidx {
8925 continue
8926 }
8927 mem := x.Args[3]
8928 ptr0 := x.Args[0]
8929 idx0 := x.Args[1]
8930 x_2 := x.Args[2]
8931 if x_2.Op != OpARM64SRLconst || auxIntToInt64(x_2.AuxInt) != 8 || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
8932 continue
8933 }
8934 v.reset(OpARM64MOVHstoreidx)
8935 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
8936 v0.AddArg(w)
8937 v.AddArg4(ptr0, idx0, v0, mem)
8938 return true
8939 }
8940 break
8941 }
8942
8943
8944
8945 for {
8946 i := auxIntToInt32(v.AuxInt)
8947 s := auxToSym(v.Aux)
8948 ptr := v_0
8949 w := v_1
8950 x := v_2
8951 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
8952 break
8953 }
8954 mem := x.Args[2]
8955 if ptr != x.Args[0] {
8956 break
8957 }
8958 x_1 := x.Args[1]
8959 if x_1.Op != OpARM64UBFX || auxIntToArm64BitField(x_1.AuxInt) != armBFAuxInt(8, 8) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
8960 break
8961 }
8962 v.reset(OpARM64MOVHstore)
8963 v.AuxInt = int32ToAuxInt(i - 1)
8964 v.Aux = symToAux(s)
8965 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
8966 v0.AddArg(w)
8967 v.AddArg3(ptr, v0, mem)
8968 return true
8969 }
8970
8971
8972
8973 for {
8974 if auxIntToInt32(v.AuxInt) != 1 {
8975 break
8976 }
8977 s := auxToSym(v.Aux)
8978 if v_0.Op != OpARM64ADD {
8979 break
8980 }
8981 _ = v_0.Args[1]
8982 v_0_0 := v_0.Args[0]
8983 v_0_1 := v_0.Args[1]
8984 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8985 ptr1 := v_0_0
8986 idx1 := v_0_1
8987 w := v_1
8988 x := v_2
8989 if x.Op != OpARM64MOVBstoreidx {
8990 continue
8991 }
8992 mem := x.Args[3]
8993 ptr0 := x.Args[0]
8994 idx0 := x.Args[1]
8995 x_2 := x.Args[2]
8996 if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
8997 continue
8998 }
8999 v.reset(OpARM64MOVHstoreidx)
9000 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
9001 v0.AddArg(w)
9002 v.AddArg4(ptr0, idx0, v0, mem)
9003 return true
9004 }
9005 break
9006 }
9007
9008
9009
9010 for {
9011 i := auxIntToInt32(v.AuxInt)
9012 s := auxToSym(v.Aux)
9013 ptr := v_0
9014 w := v_1
9015 x := v_2
9016 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
9017 break
9018 }
9019 mem := x.Args[2]
9020 if ptr != x.Args[0] {
9021 break
9022 }
9023 x_1 := x.Args[1]
9024 if x_1.Op != OpARM64SRLconst || auxIntToInt64(x_1.AuxInt) != 8 {
9025 break
9026 }
9027 x_1_0 := x_1.Args[0]
9028 if x_1_0.Op != OpARM64MOVDreg || w != x_1_0.Args[0] || !(x.Uses == 1 && clobber(x)) {
9029 break
9030 }
9031 v.reset(OpARM64MOVHstore)
9032 v.AuxInt = int32ToAuxInt(i - 1)
9033 v.Aux = symToAux(s)
9034 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
9035 v0.AddArg(w)
9036 v.AddArg3(ptr, v0, mem)
9037 return true
9038 }
9039
9040
9041
9042 for {
9043 if auxIntToInt32(v.AuxInt) != 1 {
9044 break
9045 }
9046 s := auxToSym(v.Aux)
9047 if v_0.Op != OpARM64ADD {
9048 break
9049 }
9050 _ = v_0.Args[1]
9051 v_0_0 := v_0.Args[0]
9052 v_0_1 := v_0.Args[1]
9053 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
9054 ptr1 := v_0_0
9055 idx1 := v_0_1
9056 w := v_1
9057 x := v_2
9058 if x.Op != OpARM64MOVBstoreidx {
9059 continue
9060 }
9061 mem := x.Args[3]
9062 ptr0 := x.Args[0]
9063 idx0 := x.Args[1]
9064 x_2 := x.Args[2]
9065 if x_2.Op != OpARM64SRLconst || auxIntToInt64(x_2.AuxInt) != 8 {
9066 continue
9067 }
9068 x_2_0 := x_2.Args[0]
9069 if x_2_0.Op != OpARM64MOVDreg || w != x_2_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
9070 continue
9071 }
9072 v.reset(OpARM64MOVHstoreidx)
9073 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
9074 v0.AddArg(w)
9075 v.AddArg4(ptr0, idx0, v0, mem)
9076 return true
9077 }
9078 break
9079 }
9080
9081
9082
9083 for {
9084 i := auxIntToInt32(v.AuxInt)
9085 s := auxToSym(v.Aux)
9086 ptr := v_0
9087 w := v_1
9088 x := v_2
9089 if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
9090 break
9091 }
9092 mem := x.Args[2]
9093 if ptr != x.Args[0] {
9094 break
9095 }
9096 x_1 := x.Args[1]
9097 if x_1.Op != OpARM64UBFX || auxIntToArm64BitField(x_1.AuxInt) != armBFAuxInt(8, 24) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
9098 break
9099 }
9100 v.reset(OpARM64MOVHstore)
9101 v.AuxInt = int32ToAuxInt(i - 1)
9102 v.Aux = symToAux(s)
9103 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
9104 v0.AddArg(w)
9105 v.AddArg3(ptr, v0, mem)
9106 return true
9107 }
9108
9109
9110
9111 for {
9112 if auxIntToInt32(v.AuxInt) != 1 {
9113 break
9114 }
9115 s := auxToSym(v.Aux)
9116 if v_0.Op != OpARM64ADD {
9117 break
9118 }
9119 _ = v_0.Args[1]
9120 v_0_0 := v_0.Args[0]
9121 v_0_1 := v_0.Args[1]
9122 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
9123 ptr1 := v_0_0
9124 idx1 := v_0_1
9125 w := v_1
9126 x := v_2
9127 if x.Op != OpARM64MOVBstoreidx {
9128 continue
9129 }
9130 mem := x.Args[3]
9131 ptr0 := x.Args[0]
9132 idx0 := x.Args[1]
9133 x_2 := x.Args[2]
9134 if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 24) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
9135 continue
9136 }
9137 v.reset(OpARM64MOVHstoreidx)
9138 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
9139 v0.AddArg(w)
9140 v.AddArg4(ptr0, idx0, v0, mem)
9141 return true
9142 }
9143 break
9144 }
9145 return false
9146 }
9147 func rewriteValueARM64_OpARM64MOVBstoreidx(v *Value) bool {
9148 v_3 := v.Args[3]
9149 v_2 := v.Args[2]
9150 v_1 := v.Args[1]
9151 v_0 := v.Args[0]
9152 b := v.Block
9153
9154
9155
9156 for {
9157 ptr := v_0
9158 if v_1.Op != OpARM64MOVDconst {
9159 break
9160 }
9161 c := auxIntToInt64(v_1.AuxInt)
9162 val := v_2
9163 mem := v_3
9164 if !(is32Bit(c)) {
9165 break
9166 }
9167 v.reset(OpARM64MOVBstore)
9168 v.AuxInt = int32ToAuxInt(int32(c))
9169 v.AddArg3(ptr, val, mem)
9170 return true
9171 }
9172
9173
9174
9175 for {
9176 if v_0.Op != OpARM64MOVDconst {
9177 break
9178 }
9179 c := auxIntToInt64(v_0.AuxInt)
9180 idx := v_1
9181 val := v_2
9182 mem := v_3
9183 if !(is32Bit(c)) {
9184 break
9185 }
9186 v.reset(OpARM64MOVBstore)
9187 v.AuxInt = int32ToAuxInt(int32(c))
9188 v.AddArg3(idx, val, mem)
9189 return true
9190 }
9191
9192
9193 for {
9194 ptr := v_0
9195 idx := v_1
9196 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
9197 break
9198 }
9199 mem := v_3
9200 v.reset(OpARM64MOVBstorezeroidx)
9201 v.AddArg3(ptr, idx, mem)
9202 return true
9203 }
9204
9205
9206 for {
9207 ptr := v_0
9208 idx := v_1
9209 if v_2.Op != OpARM64MOVBreg {
9210 break
9211 }
9212 x := v_2.Args[0]
9213 mem := v_3
9214 v.reset(OpARM64MOVBstoreidx)
9215 v.AddArg4(ptr, idx, x, mem)
9216 return true
9217 }
9218
9219
9220 for {
9221 ptr := v_0
9222 idx := v_1
9223 if v_2.Op != OpARM64MOVBUreg {
9224 break
9225 }
9226 x := v_2.Args[0]
9227 mem := v_3
9228 v.reset(OpARM64MOVBstoreidx)
9229 v.AddArg4(ptr, idx, x, mem)
9230 return true
9231 }
9232
9233
9234 for {
9235 ptr := v_0
9236 idx := v_1
9237 if v_2.Op != OpARM64MOVHreg {
9238 break
9239 }
9240 x := v_2.Args[0]
9241 mem := v_3
9242 v.reset(OpARM64MOVBstoreidx)
9243 v.AddArg4(ptr, idx, x, mem)
9244 return true
9245 }
9246
9247
9248 for {
9249 ptr := v_0
9250 idx := v_1
9251 if v_2.Op != OpARM64MOVHUreg {
9252 break
9253 }
9254 x := v_2.Args[0]
9255 mem := v_3
9256 v.reset(OpARM64MOVBstoreidx)
9257 v.AddArg4(ptr, idx, x, mem)
9258 return true
9259 }
9260
9261
9262 for {
9263 ptr := v_0
9264 idx := v_1
9265 if v_2.Op != OpARM64MOVWreg {
9266 break
9267 }
9268 x := v_2.Args[0]
9269 mem := v_3
9270 v.reset(OpARM64MOVBstoreidx)
9271 v.AddArg4(ptr, idx, x, mem)
9272 return true
9273 }
9274
9275
9276 for {
9277 ptr := v_0
9278 idx := v_1
9279 if v_2.Op != OpARM64MOVWUreg {
9280 break
9281 }
9282 x := v_2.Args[0]
9283 mem := v_3
9284 v.reset(OpARM64MOVBstoreidx)
9285 v.AddArg4(ptr, idx, x, mem)
9286 return true
9287 }
9288
9289
9290
9291 for {
9292 ptr := v_0
9293 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
9294 break
9295 }
9296 idx := v_1.Args[0]
9297 if v_2.Op != OpARM64SRLconst || auxIntToInt64(v_2.AuxInt) != 8 {
9298 break
9299 }
9300 w := v_2.Args[0]
9301 x := v_3
9302 if x.Op != OpARM64MOVBstoreidx {
9303 break
9304 }
9305 mem := x.Args[3]
9306 if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) {
9307 break
9308 }
9309 v.reset(OpARM64MOVHstoreidx)
9310 v.AddArg4(ptr, idx, w, mem)
9311 return true
9312 }
9313
9314
9315
9316 for {
9317 ptr := v_0
9318 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 3 {
9319 break
9320 }
9321 idx := v_1.Args[0]
9322 w := v_2
9323 x0 := v_3
9324 if x0.Op != OpARM64MOVBstoreidx {
9325 break
9326 }
9327 _ = x0.Args[3]
9328 if ptr != x0.Args[0] {
9329 break
9330 }
9331 x0_1 := x0.Args[1]
9332 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 2 || idx != x0_1.Args[0] {
9333 break
9334 }
9335 x0_2 := x0.Args[2]
9336 if x0_2.Op != OpARM64UBFX || auxIntToArm64BitField(x0_2.AuxInt) != armBFAuxInt(8, 24) || w != x0_2.Args[0] {
9337 break
9338 }
9339 x1 := x0.Args[3]
9340 if x1.Op != OpARM64MOVBstoreidx {
9341 break
9342 }
9343 _ = x1.Args[3]
9344 if ptr != x1.Args[0] {
9345 break
9346 }
9347 x1_1 := x1.Args[1]
9348 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] {
9349 break
9350 }
9351 x1_2 := x1.Args[2]
9352 if x1_2.Op != OpARM64UBFX || auxIntToArm64BitField(x1_2.AuxInt) != armBFAuxInt(16, 16) || w != x1_2.Args[0] {
9353 break
9354 }
9355 x2 := x1.Args[3]
9356 if x2.Op != OpARM64MOVBstoreidx {
9357 break
9358 }
9359 mem := x2.Args[3]
9360 if ptr != x2.Args[0] || idx != x2.Args[1] {
9361 break
9362 }
9363 x2_2 := x2.Args[2]
9364 if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
9365 break
9366 }
9367 v.reset(OpARM64MOVWstoreidx)
9368 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type)
9369 v0.AddArg(w)
9370 v.AddArg4(ptr, idx, v0, mem)
9371 return true
9372 }
9373
9374
9375
9376 for {
9377 ptr := v_0
9378 idx := v_1
9379 w := v_2
9380 x0 := v_3
9381 if x0.Op != OpARM64MOVBstoreidx {
9382 break
9383 }
9384 _ = x0.Args[3]
9385 if ptr != x0.Args[0] {
9386 break
9387 }
9388 x0_1 := x0.Args[1]
9389 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 1 || idx != x0_1.Args[0] {
9390 break
9391 }
9392 x0_2 := x0.Args[2]
9393 if x0_2.Op != OpARM64UBFX || auxIntToArm64BitField(x0_2.AuxInt) != armBFAuxInt(8, 24) || w != x0_2.Args[0] {
9394 break
9395 }
9396 x1 := x0.Args[3]
9397 if x1.Op != OpARM64MOVBstoreidx {
9398 break
9399 }
9400 _ = x1.Args[3]
9401 if ptr != x1.Args[0] {
9402 break
9403 }
9404 x1_1 := x1.Args[1]
9405 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 2 || idx != x1_1.Args[0] {
9406 break
9407 }
9408 x1_2 := x1.Args[2]
9409 if x1_2.Op != OpARM64UBFX || auxIntToArm64BitField(x1_2.AuxInt) != armBFAuxInt(16, 16) || w != x1_2.Args[0] {
9410 break
9411 }
9412 x2 := x1.Args[3]
9413 if x2.Op != OpARM64MOVBstoreidx {
9414 break
9415 }
9416 mem := x2.Args[3]
9417 if ptr != x2.Args[0] {
9418 break
9419 }
9420 x2_1 := x2.Args[1]
9421 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 3 || idx != x2_1.Args[0] {
9422 break
9423 }
9424 x2_2 := x2.Args[2]
9425 if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
9426 break
9427 }
9428 v.reset(OpARM64MOVWstoreidx)
9429 v.AddArg4(ptr, idx, w, mem)
9430 return true
9431 }
9432
9433
9434
9435 for {
9436 ptr := v_0
9437 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
9438 break
9439 }
9440 idx := v_1.Args[0]
9441 w := v_2
9442 x := v_3
9443 if x.Op != OpARM64MOVBstoreidx {
9444 break
9445 }
9446 mem := x.Args[3]
9447 if ptr != x.Args[0] || idx != x.Args[1] {
9448 break
9449 }
9450 x_2 := x.Args[2]
9451 if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && clobber(x)) {
9452 break
9453 }
9454 v.reset(OpARM64MOVHstoreidx)
9455 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
9456 v0.AddArg(w)
9457 v.AddArg4(ptr, idx, v0, mem)
9458 return true
9459 }
9460
9461
9462
9463 for {
9464 ptr := v_0
9465 idx := v_1
9466 w := v_2
9467 x := v_3
9468 if x.Op != OpARM64MOVBstoreidx {
9469 break
9470 }
9471 mem := x.Args[3]
9472 if ptr != x.Args[0] {
9473 break
9474 }
9475 x_1 := x.Args[1]
9476 if x_1.Op != OpARM64ADDconst || auxIntToInt64(x_1.AuxInt) != 1 || idx != x_1.Args[0] {
9477 break
9478 }
9479 x_2 := x.Args[2]
9480 if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && clobber(x)) {
9481 break
9482 }
9483 v.reset(OpARM64MOVHstoreidx)
9484 v.AddArg4(ptr, idx, w, mem)
9485 return true
9486 }
9487 return false
9488 }
9489 func rewriteValueARM64_OpARM64MOVBstorezero(v *Value) bool {
9490 v_1 := v.Args[1]
9491 v_0 := v.Args[0]
9492 b := v.Block
9493 config := b.Func.Config
9494
9495
9496
9497 for {
9498 off1 := auxIntToInt32(v.AuxInt)
9499 sym := auxToSym(v.Aux)
9500 if v_0.Op != OpARM64ADDconst {
9501 break
9502 }
9503 off2 := auxIntToInt64(v_0.AuxInt)
9504 ptr := v_0.Args[0]
9505 mem := v_1
9506 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9507 break
9508 }
9509 v.reset(OpARM64MOVBstorezero)
9510 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9511 v.Aux = symToAux(sym)
9512 v.AddArg2(ptr, mem)
9513 return true
9514 }
9515
9516
9517
9518 for {
9519 off1 := auxIntToInt32(v.AuxInt)
9520 sym1 := auxToSym(v.Aux)
9521 if v_0.Op != OpARM64MOVDaddr {
9522 break
9523 }
9524 off2 := auxIntToInt32(v_0.AuxInt)
9525 sym2 := auxToSym(v_0.Aux)
9526 ptr := v_0.Args[0]
9527 mem := v_1
9528 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9529 break
9530 }
9531 v.reset(OpARM64MOVBstorezero)
9532 v.AuxInt = int32ToAuxInt(off1 + off2)
9533 v.Aux = symToAux(mergeSym(sym1, sym2))
9534 v.AddArg2(ptr, mem)
9535 return true
9536 }
9537
9538
9539
9540 for {
9541 off := auxIntToInt32(v.AuxInt)
9542 sym := auxToSym(v.Aux)
9543 if v_0.Op != OpARM64ADD {
9544 break
9545 }
9546 idx := v_0.Args[1]
9547 ptr := v_0.Args[0]
9548 mem := v_1
9549 if !(off == 0 && sym == nil) {
9550 break
9551 }
9552 v.reset(OpARM64MOVBstorezeroidx)
9553 v.AddArg3(ptr, idx, mem)
9554 return true
9555 }
9556
9557
9558
9559 for {
9560 i := auxIntToInt32(v.AuxInt)
9561 s := auxToSym(v.Aux)
9562 ptr0 := v_0
9563 x := v_1
9564 if x.Op != OpARM64MOVBstorezero {
9565 break
9566 }
9567 j := auxIntToInt32(x.AuxInt)
9568 if auxToSym(x.Aux) != s {
9569 break
9570 }
9571 mem := x.Args[1]
9572 ptr1 := x.Args[0]
9573 if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 1) && isSamePtr(ptr0, ptr1) && clobber(x)) {
9574 break
9575 }
9576 v.reset(OpARM64MOVHstorezero)
9577 v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
9578 v.Aux = symToAux(s)
9579 v.AddArg2(ptr0, mem)
9580 return true
9581 }
9582
9583
9584
9585 for {
9586 if auxIntToInt32(v.AuxInt) != 1 {
9587 break
9588 }
9589 s := auxToSym(v.Aux)
9590 if v_0.Op != OpARM64ADD {
9591 break
9592 }
9593 _ = v_0.Args[1]
9594 v_0_0 := v_0.Args[0]
9595 v_0_1 := v_0.Args[1]
9596 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
9597 ptr0 := v_0_0
9598 idx0 := v_0_1
9599 x := v_1
9600 if x.Op != OpARM64MOVBstorezeroidx {
9601 continue
9602 }
9603 mem := x.Args[2]
9604 ptr1 := x.Args[0]
9605 idx1 := x.Args[1]
9606 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
9607 continue
9608 }
9609 v.reset(OpARM64MOVHstorezeroidx)
9610 v.AddArg3(ptr1, idx1, mem)
9611 return true
9612 }
9613 break
9614 }
9615 return false
9616 }
9617 func rewriteValueARM64_OpARM64MOVBstorezeroidx(v *Value) bool {
9618 v_2 := v.Args[2]
9619 v_1 := v.Args[1]
9620 v_0 := v.Args[0]
9621
9622
9623
9624 for {
9625 ptr := v_0
9626 if v_1.Op != OpARM64MOVDconst {
9627 break
9628 }
9629 c := auxIntToInt64(v_1.AuxInt)
9630 mem := v_2
9631 if !(is32Bit(c)) {
9632 break
9633 }
9634 v.reset(OpARM64MOVBstorezero)
9635 v.AuxInt = int32ToAuxInt(int32(c))
9636 v.AddArg2(ptr, mem)
9637 return true
9638 }
9639
9640
9641
9642 for {
9643 if v_0.Op != OpARM64MOVDconst {
9644 break
9645 }
9646 c := auxIntToInt64(v_0.AuxInt)
9647 idx := v_1
9648 mem := v_2
9649 if !(is32Bit(c)) {
9650 break
9651 }
9652 v.reset(OpARM64MOVBstorezero)
9653 v.AuxInt = int32ToAuxInt(int32(c))
9654 v.AddArg2(idx, mem)
9655 return true
9656 }
9657
9658
9659
9660 for {
9661 ptr := v_0
9662 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
9663 break
9664 }
9665 idx := v_1.Args[0]
9666 x := v_2
9667 if x.Op != OpARM64MOVBstorezeroidx {
9668 break
9669 }
9670 mem := x.Args[2]
9671 if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) {
9672 break
9673 }
9674 v.reset(OpARM64MOVHstorezeroidx)
9675 v.AddArg3(ptr, idx, mem)
9676 return true
9677 }
9678 return false
9679 }
9680 func rewriteValueARM64_OpARM64MOVDload(v *Value) bool {
9681 v_1 := v.Args[1]
9682 v_0 := v.Args[0]
9683 b := v.Block
9684 config := b.Func.Config
9685
9686
9687 for {
9688 off := auxIntToInt32(v.AuxInt)
9689 sym := auxToSym(v.Aux)
9690 ptr := v_0
9691 if v_1.Op != OpARM64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
9692 break
9693 }
9694 val := v_1.Args[1]
9695 if ptr != v_1.Args[0] {
9696 break
9697 }
9698 v.reset(OpARM64FMOVDfpgp)
9699 v.AddArg(val)
9700 return true
9701 }
9702
9703
9704
9705 for {
9706 off1 := auxIntToInt32(v.AuxInt)
9707 sym := auxToSym(v.Aux)
9708 if v_0.Op != OpARM64ADDconst {
9709 break
9710 }
9711 off2 := auxIntToInt64(v_0.AuxInt)
9712 ptr := v_0.Args[0]
9713 mem := v_1
9714 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9715 break
9716 }
9717 v.reset(OpARM64MOVDload)
9718 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
9719 v.Aux = symToAux(sym)
9720 v.AddArg2(ptr, mem)
9721 return true
9722 }
9723
9724
9725
9726 for {
9727 off := auxIntToInt32(v.AuxInt)
9728 sym := auxToSym(v.Aux)
9729 if v_0.Op != OpARM64ADD {
9730 break
9731 }
9732 idx := v_0.Args[1]
9733 ptr := v_0.Args[0]
9734 mem := v_1
9735 if !(off == 0 && sym == nil) {
9736 break
9737 }
9738 v.reset(OpARM64MOVDloadidx)
9739 v.AddArg3(ptr, idx, mem)
9740 return true
9741 }
9742
9743
9744
9745 for {
9746 off := auxIntToInt32(v.AuxInt)
9747 sym := auxToSym(v.Aux)
9748 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
9749 break
9750 }
9751 idx := v_0.Args[1]
9752 ptr := v_0.Args[0]
9753 mem := v_1
9754 if !(off == 0 && sym == nil) {
9755 break
9756 }
9757 v.reset(OpARM64MOVDloadidx8)
9758 v.AddArg3(ptr, idx, mem)
9759 return true
9760 }
9761
9762
9763
9764 for {
9765 off1 := auxIntToInt32(v.AuxInt)
9766 sym1 := auxToSym(v.Aux)
9767 if v_0.Op != OpARM64MOVDaddr {
9768 break
9769 }
9770 off2 := auxIntToInt32(v_0.AuxInt)
9771 sym2 := auxToSym(v_0.Aux)
9772 ptr := v_0.Args[0]
9773 mem := v_1
9774 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9775 break
9776 }
9777 v.reset(OpARM64MOVDload)
9778 v.AuxInt = int32ToAuxInt(off1 + off2)
9779 v.Aux = symToAux(mergeSym(sym1, sym2))
9780 v.AddArg2(ptr, mem)
9781 return true
9782 }
9783
9784
9785
9786 for {
9787 off := auxIntToInt32(v.AuxInt)
9788 sym := auxToSym(v.Aux)
9789 ptr := v_0
9790 if v_1.Op != OpARM64MOVDstorezero {
9791 break
9792 }
9793 off2 := auxIntToInt32(v_1.AuxInt)
9794 sym2 := auxToSym(v_1.Aux)
9795 ptr2 := v_1.Args[0]
9796 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
9797 break
9798 }
9799 v.reset(OpARM64MOVDconst)
9800 v.AuxInt = int64ToAuxInt(0)
9801 return true
9802 }
9803
9804
9805
9806 for {
9807 off := auxIntToInt32(v.AuxInt)
9808 sym := auxToSym(v.Aux)
9809 if v_0.Op != OpSB || !(symIsRO(sym)) {
9810 break
9811 }
9812 v.reset(OpARM64MOVDconst)
9813 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
9814 return true
9815 }
9816 return false
9817 }
9818 func rewriteValueARM64_OpARM64MOVDloadidx(v *Value) bool {
9819 v_2 := v.Args[2]
9820 v_1 := v.Args[1]
9821 v_0 := v.Args[0]
9822
9823
9824
9825 for {
9826 ptr := v_0
9827 if v_1.Op != OpARM64MOVDconst {
9828 break
9829 }
9830 c := auxIntToInt64(v_1.AuxInt)
9831 mem := v_2
9832 if !(is32Bit(c)) {
9833 break
9834 }
9835 v.reset(OpARM64MOVDload)
9836 v.AuxInt = int32ToAuxInt(int32(c))
9837 v.AddArg2(ptr, mem)
9838 return true
9839 }
9840
9841
9842
9843 for {
9844 if v_0.Op != OpARM64MOVDconst {
9845 break
9846 }
9847 c := auxIntToInt64(v_0.AuxInt)
9848 ptr := v_1
9849 mem := v_2
9850 if !(is32Bit(c)) {
9851 break
9852 }
9853 v.reset(OpARM64MOVDload)
9854 v.AuxInt = int32ToAuxInt(int32(c))
9855 v.AddArg2(ptr, mem)
9856 return true
9857 }
9858
9859
9860 for {
9861 ptr := v_0
9862 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
9863 break
9864 }
9865 idx := v_1.Args[0]
9866 mem := v_2
9867 v.reset(OpARM64MOVDloadidx8)
9868 v.AddArg3(ptr, idx, mem)
9869 return true
9870 }
9871
9872
9873 for {
9874 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
9875 break
9876 }
9877 idx := v_0.Args[0]
9878 ptr := v_1
9879 mem := v_2
9880 v.reset(OpARM64MOVDloadidx8)
9881 v.AddArg3(ptr, idx, mem)
9882 return true
9883 }
9884
9885
9886
9887 for {
9888 ptr := v_0
9889 idx := v_1
9890 if v_2.Op != OpARM64MOVDstorezeroidx {
9891 break
9892 }
9893 idx2 := v_2.Args[1]
9894 ptr2 := v_2.Args[0]
9895 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
9896 break
9897 }
9898 v.reset(OpARM64MOVDconst)
9899 v.AuxInt = int64ToAuxInt(0)
9900 return true
9901 }
9902 return false
9903 }
9904 func rewriteValueARM64_OpARM64MOVDloadidx8(v *Value) bool {
9905 v_2 := v.Args[2]
9906 v_1 := v.Args[1]
9907 v_0 := v.Args[0]
9908
9909
9910
9911 for {
9912 ptr := v_0
9913 if v_1.Op != OpARM64MOVDconst {
9914 break
9915 }
9916 c := auxIntToInt64(v_1.AuxInt)
9917 mem := v_2
9918 if !(is32Bit(c << 3)) {
9919 break
9920 }
9921 v.reset(OpARM64MOVDload)
9922 v.AuxInt = int32ToAuxInt(int32(c) << 3)
9923 v.AddArg2(ptr, mem)
9924 return true
9925 }
9926
9927
9928
9929 for {
9930 ptr := v_0
9931 idx := v_1
9932 if v_2.Op != OpARM64MOVDstorezeroidx8 {
9933 break
9934 }
9935 idx2 := v_2.Args[1]
9936 ptr2 := v_2.Args[0]
9937 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
9938 break
9939 }
9940 v.reset(OpARM64MOVDconst)
9941 v.AuxInt = int64ToAuxInt(0)
9942 return true
9943 }
9944 return false
9945 }
9946 func rewriteValueARM64_OpARM64MOVDnop(v *Value) bool {
9947 v_0 := v.Args[0]
9948
9949
9950 for {
9951 if v_0.Op != OpARM64MOVDconst {
9952 break
9953 }
9954 c := auxIntToInt64(v_0.AuxInt)
9955 v.reset(OpARM64MOVDconst)
9956 v.AuxInt = int64ToAuxInt(c)
9957 return true
9958 }
9959 return false
9960 }
9961 func rewriteValueARM64_OpARM64MOVDreg(v *Value) bool {
9962 v_0 := v.Args[0]
9963
9964
9965
9966 for {
9967 x := v_0
9968 if !(x.Uses == 1) {
9969 break
9970 }
9971 v.reset(OpARM64MOVDnop)
9972 v.AddArg(x)
9973 return true
9974 }
9975
9976
9977 for {
9978 if v_0.Op != OpARM64MOVDconst {
9979 break
9980 }
9981 c := auxIntToInt64(v_0.AuxInt)
9982 v.reset(OpARM64MOVDconst)
9983 v.AuxInt = int64ToAuxInt(c)
9984 return true
9985 }
9986 return false
9987 }
9988 func rewriteValueARM64_OpARM64MOVDstore(v *Value) bool {
9989 v_2 := v.Args[2]
9990 v_1 := v.Args[1]
9991 v_0 := v.Args[0]
9992 b := v.Block
9993 config := b.Func.Config
9994
9995
9996 for {
9997 off := auxIntToInt32(v.AuxInt)
9998 sym := auxToSym(v.Aux)
9999 ptr := v_0
10000 if v_1.Op != OpARM64FMOVDfpgp {
10001 break
10002 }
10003 val := v_1.Args[0]
10004 mem := v_2
10005 v.reset(OpARM64FMOVDstore)
10006 v.AuxInt = int32ToAuxInt(off)
10007 v.Aux = symToAux(sym)
10008 v.AddArg3(ptr, val, mem)
10009 return true
10010 }
10011
10012
10013
10014 for {
10015 off1 := auxIntToInt32(v.AuxInt)
10016 sym := auxToSym(v.Aux)
10017 if v_0.Op != OpARM64ADDconst {
10018 break
10019 }
10020 off2 := auxIntToInt64(v_0.AuxInt)
10021 ptr := v_0.Args[0]
10022 val := v_1
10023 mem := v_2
10024 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10025 break
10026 }
10027 v.reset(OpARM64MOVDstore)
10028 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
10029 v.Aux = symToAux(sym)
10030 v.AddArg3(ptr, val, mem)
10031 return true
10032 }
10033
10034
10035
10036 for {
10037 off := auxIntToInt32(v.AuxInt)
10038 sym := auxToSym(v.Aux)
10039 if v_0.Op != OpARM64ADD {
10040 break
10041 }
10042 idx := v_0.Args[1]
10043 ptr := v_0.Args[0]
10044 val := v_1
10045 mem := v_2
10046 if !(off == 0 && sym == nil) {
10047 break
10048 }
10049 v.reset(OpARM64MOVDstoreidx)
10050 v.AddArg4(ptr, idx, val, mem)
10051 return true
10052 }
10053
10054
10055
10056 for {
10057 off := auxIntToInt32(v.AuxInt)
10058 sym := auxToSym(v.Aux)
10059 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
10060 break
10061 }
10062 idx := v_0.Args[1]
10063 ptr := v_0.Args[0]
10064 val := v_1
10065 mem := v_2
10066 if !(off == 0 && sym == nil) {
10067 break
10068 }
10069 v.reset(OpARM64MOVDstoreidx8)
10070 v.AddArg4(ptr, idx, val, mem)
10071 return true
10072 }
10073
10074
10075
10076 for {
10077 off1 := auxIntToInt32(v.AuxInt)
10078 sym1 := auxToSym(v.Aux)
10079 if v_0.Op != OpARM64MOVDaddr {
10080 break
10081 }
10082 off2 := auxIntToInt32(v_0.AuxInt)
10083 sym2 := auxToSym(v_0.Aux)
10084 ptr := v_0.Args[0]
10085 val := v_1
10086 mem := v_2
10087 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10088 break
10089 }
10090 v.reset(OpARM64MOVDstore)
10091 v.AuxInt = int32ToAuxInt(off1 + off2)
10092 v.Aux = symToAux(mergeSym(sym1, sym2))
10093 v.AddArg3(ptr, val, mem)
10094 return true
10095 }
10096
10097
10098 for {
10099 off := auxIntToInt32(v.AuxInt)
10100 sym := auxToSym(v.Aux)
10101 ptr := v_0
10102 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
10103 break
10104 }
10105 mem := v_2
10106 v.reset(OpARM64MOVDstorezero)
10107 v.AuxInt = int32ToAuxInt(off)
10108 v.Aux = symToAux(sym)
10109 v.AddArg2(ptr, mem)
10110 return true
10111 }
10112 return false
10113 }
10114 func rewriteValueARM64_OpARM64MOVDstoreidx(v *Value) bool {
10115 v_3 := v.Args[3]
10116 v_2 := v.Args[2]
10117 v_1 := v.Args[1]
10118 v_0 := v.Args[0]
10119
10120
10121
10122 for {
10123 ptr := v_0
10124 if v_1.Op != OpARM64MOVDconst {
10125 break
10126 }
10127 c := auxIntToInt64(v_1.AuxInt)
10128 val := v_2
10129 mem := v_3
10130 if !(is32Bit(c)) {
10131 break
10132 }
10133 v.reset(OpARM64MOVDstore)
10134 v.AuxInt = int32ToAuxInt(int32(c))
10135 v.AddArg3(ptr, val, mem)
10136 return true
10137 }
10138
10139
10140
10141 for {
10142 if v_0.Op != OpARM64MOVDconst {
10143 break
10144 }
10145 c := auxIntToInt64(v_0.AuxInt)
10146 idx := v_1
10147 val := v_2
10148 mem := v_3
10149 if !(is32Bit(c)) {
10150 break
10151 }
10152 v.reset(OpARM64MOVDstore)
10153 v.AuxInt = int32ToAuxInt(int32(c))
10154 v.AddArg3(idx, val, mem)
10155 return true
10156 }
10157
10158
10159 for {
10160 ptr := v_0
10161 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
10162 break
10163 }
10164 idx := v_1.Args[0]
10165 val := v_2
10166 mem := v_3
10167 v.reset(OpARM64MOVDstoreidx8)
10168 v.AddArg4(ptr, idx, val, mem)
10169 return true
10170 }
10171
10172
10173 for {
10174 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
10175 break
10176 }
10177 idx := v_0.Args[0]
10178 ptr := v_1
10179 val := v_2
10180 mem := v_3
10181 v.reset(OpARM64MOVDstoreidx8)
10182 v.AddArg4(ptr, idx, val, mem)
10183 return true
10184 }
10185
10186
10187 for {
10188 ptr := v_0
10189 idx := v_1
10190 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
10191 break
10192 }
10193 mem := v_3
10194 v.reset(OpARM64MOVDstorezeroidx)
10195 v.AddArg3(ptr, idx, mem)
10196 return true
10197 }
10198 return false
10199 }
10200 func rewriteValueARM64_OpARM64MOVDstoreidx8(v *Value) bool {
10201 v_3 := v.Args[3]
10202 v_2 := v.Args[2]
10203 v_1 := v.Args[1]
10204 v_0 := v.Args[0]
10205
10206
10207
10208 for {
10209 ptr := v_0
10210 if v_1.Op != OpARM64MOVDconst {
10211 break
10212 }
10213 c := auxIntToInt64(v_1.AuxInt)
10214 val := v_2
10215 mem := v_3
10216 if !(is32Bit(c << 3)) {
10217 break
10218 }
10219 v.reset(OpARM64MOVDstore)
10220 v.AuxInt = int32ToAuxInt(int32(c) << 3)
10221 v.AddArg3(ptr, val, mem)
10222 return true
10223 }
10224
10225
10226 for {
10227 ptr := v_0
10228 idx := v_1
10229 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
10230 break
10231 }
10232 mem := v_3
10233 v.reset(OpARM64MOVDstorezeroidx8)
10234 v.AddArg3(ptr, idx, mem)
10235 return true
10236 }
10237 return false
10238 }
10239 func rewriteValueARM64_OpARM64MOVDstorezero(v *Value) bool {
10240 v_1 := v.Args[1]
10241 v_0 := v.Args[0]
10242 b := v.Block
10243 config := b.Func.Config
10244
10245
10246
10247 for {
10248 off1 := auxIntToInt32(v.AuxInt)
10249 sym := auxToSym(v.Aux)
10250 if v_0.Op != OpARM64ADDconst {
10251 break
10252 }
10253 off2 := auxIntToInt64(v_0.AuxInt)
10254 ptr := v_0.Args[0]
10255 mem := v_1
10256 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10257 break
10258 }
10259 v.reset(OpARM64MOVDstorezero)
10260 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
10261 v.Aux = symToAux(sym)
10262 v.AddArg2(ptr, mem)
10263 return true
10264 }
10265
10266
10267
10268 for {
10269 off1 := auxIntToInt32(v.AuxInt)
10270 sym1 := auxToSym(v.Aux)
10271 if v_0.Op != OpARM64MOVDaddr {
10272 break
10273 }
10274 off2 := auxIntToInt32(v_0.AuxInt)
10275 sym2 := auxToSym(v_0.Aux)
10276 ptr := v_0.Args[0]
10277 mem := v_1
10278 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10279 break
10280 }
10281 v.reset(OpARM64MOVDstorezero)
10282 v.AuxInt = int32ToAuxInt(off1 + off2)
10283 v.Aux = symToAux(mergeSym(sym1, sym2))
10284 v.AddArg2(ptr, mem)
10285 return true
10286 }
10287
10288
10289
10290 for {
10291 off := auxIntToInt32(v.AuxInt)
10292 sym := auxToSym(v.Aux)
10293 if v_0.Op != OpARM64ADD {
10294 break
10295 }
10296 idx := v_0.Args[1]
10297 ptr := v_0.Args[0]
10298 mem := v_1
10299 if !(off == 0 && sym == nil) {
10300 break
10301 }
10302 v.reset(OpARM64MOVDstorezeroidx)
10303 v.AddArg3(ptr, idx, mem)
10304 return true
10305 }
10306
10307
10308
10309 for {
10310 off := auxIntToInt32(v.AuxInt)
10311 sym := auxToSym(v.Aux)
10312 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 3 {
10313 break
10314 }
10315 idx := v_0.Args[1]
10316 ptr := v_0.Args[0]
10317 mem := v_1
10318 if !(off == 0 && sym == nil) {
10319 break
10320 }
10321 v.reset(OpARM64MOVDstorezeroidx8)
10322 v.AddArg3(ptr, idx, mem)
10323 return true
10324 }
10325
10326
10327
10328 for {
10329 i := auxIntToInt32(v.AuxInt)
10330 s := auxToSym(v.Aux)
10331 ptr0 := v_0
10332 x := v_1
10333 if x.Op != OpARM64MOVDstorezero {
10334 break
10335 }
10336 j := auxIntToInt32(x.AuxInt)
10337 if auxToSym(x.Aux) != s {
10338 break
10339 }
10340 mem := x.Args[1]
10341 ptr1 := x.Args[0]
10342 if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 8) && isSamePtr(ptr0, ptr1) && clobber(x)) {
10343 break
10344 }
10345 v.reset(OpARM64MOVQstorezero)
10346 v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
10347 v.Aux = symToAux(s)
10348 v.AddArg2(ptr0, mem)
10349 return true
10350 }
10351
10352
10353
10354 for {
10355 if auxIntToInt32(v.AuxInt) != 8 {
10356 break
10357 }
10358 s := auxToSym(v.Aux)
10359 p0 := v_0
10360 if p0.Op != OpARM64ADD {
10361 break
10362 }
10363 _ = p0.Args[1]
10364 p0_0 := p0.Args[0]
10365 p0_1 := p0.Args[1]
10366 for _i0 := 0; _i0 <= 1; _i0, p0_0, p0_1 = _i0+1, p0_1, p0_0 {
10367 ptr0 := p0_0
10368 idx0 := p0_1
10369 x := v_1
10370 if x.Op != OpARM64MOVDstorezeroidx {
10371 continue
10372 }
10373 mem := x.Args[2]
10374 ptr1 := x.Args[0]
10375 idx1 := x.Args[1]
10376 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
10377 continue
10378 }
10379 v.reset(OpARM64MOVQstorezero)
10380 v.AuxInt = int32ToAuxInt(0)
10381 v.Aux = symToAux(s)
10382 v.AddArg2(p0, mem)
10383 return true
10384 }
10385 break
10386 }
10387
10388
10389
10390 for {
10391 if auxIntToInt32(v.AuxInt) != 8 {
10392 break
10393 }
10394 s := auxToSym(v.Aux)
10395 p0 := v_0
10396 if p0.Op != OpARM64ADDshiftLL || auxIntToInt64(p0.AuxInt) != 3 {
10397 break
10398 }
10399 idx0 := p0.Args[1]
10400 ptr0 := p0.Args[0]
10401 x := v_1
10402 if x.Op != OpARM64MOVDstorezeroidx8 {
10403 break
10404 }
10405 mem := x.Args[2]
10406 ptr1 := x.Args[0]
10407 idx1 := x.Args[1]
10408 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
10409 break
10410 }
10411 v.reset(OpARM64MOVQstorezero)
10412 v.AuxInt = int32ToAuxInt(0)
10413 v.Aux = symToAux(s)
10414 v.AddArg2(p0, mem)
10415 return true
10416 }
10417 return false
10418 }
10419 func rewriteValueARM64_OpARM64MOVDstorezeroidx(v *Value) bool {
10420 v_2 := v.Args[2]
10421 v_1 := v.Args[1]
10422 v_0 := v.Args[0]
10423
10424
10425
10426 for {
10427 ptr := v_0
10428 if v_1.Op != OpARM64MOVDconst {
10429 break
10430 }
10431 c := auxIntToInt64(v_1.AuxInt)
10432 mem := v_2
10433 if !(is32Bit(c)) {
10434 break
10435 }
10436 v.reset(OpARM64MOVDstorezero)
10437 v.AuxInt = int32ToAuxInt(int32(c))
10438 v.AddArg2(ptr, mem)
10439 return true
10440 }
10441
10442
10443
10444 for {
10445 if v_0.Op != OpARM64MOVDconst {
10446 break
10447 }
10448 c := auxIntToInt64(v_0.AuxInt)
10449 idx := v_1
10450 mem := v_2
10451 if !(is32Bit(c)) {
10452 break
10453 }
10454 v.reset(OpARM64MOVDstorezero)
10455 v.AuxInt = int32ToAuxInt(int32(c))
10456 v.AddArg2(idx, mem)
10457 return true
10458 }
10459
10460
10461 for {
10462 ptr := v_0
10463 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 3 {
10464 break
10465 }
10466 idx := v_1.Args[0]
10467 mem := v_2
10468 v.reset(OpARM64MOVDstorezeroidx8)
10469 v.AddArg3(ptr, idx, mem)
10470 return true
10471 }
10472
10473
10474 for {
10475 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 3 {
10476 break
10477 }
10478 idx := v_0.Args[0]
10479 ptr := v_1
10480 mem := v_2
10481 v.reset(OpARM64MOVDstorezeroidx8)
10482 v.AddArg3(ptr, idx, mem)
10483 return true
10484 }
10485 return false
10486 }
10487 func rewriteValueARM64_OpARM64MOVDstorezeroidx8(v *Value) bool {
10488 v_2 := v.Args[2]
10489 v_1 := v.Args[1]
10490 v_0 := v.Args[0]
10491
10492
10493
10494 for {
10495 ptr := v_0
10496 if v_1.Op != OpARM64MOVDconst {
10497 break
10498 }
10499 c := auxIntToInt64(v_1.AuxInt)
10500 mem := v_2
10501 if !(is32Bit(c << 3)) {
10502 break
10503 }
10504 v.reset(OpARM64MOVDstorezero)
10505 v.AuxInt = int32ToAuxInt(int32(c << 3))
10506 v.AddArg2(ptr, mem)
10507 return true
10508 }
10509 return false
10510 }
10511 func rewriteValueARM64_OpARM64MOVHUload(v *Value) bool {
10512 v_1 := v.Args[1]
10513 v_0 := v.Args[0]
10514 b := v.Block
10515 config := b.Func.Config
10516
10517
10518
10519 for {
10520 off1 := auxIntToInt32(v.AuxInt)
10521 sym := auxToSym(v.Aux)
10522 if v_0.Op != OpARM64ADDconst {
10523 break
10524 }
10525 off2 := auxIntToInt64(v_0.AuxInt)
10526 ptr := v_0.Args[0]
10527 mem := v_1
10528 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10529 break
10530 }
10531 v.reset(OpARM64MOVHUload)
10532 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
10533 v.Aux = symToAux(sym)
10534 v.AddArg2(ptr, mem)
10535 return true
10536 }
10537
10538
10539
10540 for {
10541 off := auxIntToInt32(v.AuxInt)
10542 sym := auxToSym(v.Aux)
10543 if v_0.Op != OpARM64ADD {
10544 break
10545 }
10546 idx := v_0.Args[1]
10547 ptr := v_0.Args[0]
10548 mem := v_1
10549 if !(off == 0 && sym == nil) {
10550 break
10551 }
10552 v.reset(OpARM64MOVHUloadidx)
10553 v.AddArg3(ptr, idx, mem)
10554 return true
10555 }
10556
10557
10558
10559 for {
10560 off := auxIntToInt32(v.AuxInt)
10561 sym := auxToSym(v.Aux)
10562 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
10563 break
10564 }
10565 idx := v_0.Args[1]
10566 ptr := v_0.Args[0]
10567 mem := v_1
10568 if !(off == 0 && sym == nil) {
10569 break
10570 }
10571 v.reset(OpARM64MOVHUloadidx2)
10572 v.AddArg3(ptr, idx, mem)
10573 return true
10574 }
10575
10576
10577
10578 for {
10579 off1 := auxIntToInt32(v.AuxInt)
10580 sym1 := auxToSym(v.Aux)
10581 if v_0.Op != OpARM64MOVDaddr {
10582 break
10583 }
10584 off2 := auxIntToInt32(v_0.AuxInt)
10585 sym2 := auxToSym(v_0.Aux)
10586 ptr := v_0.Args[0]
10587 mem := v_1
10588 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10589 break
10590 }
10591 v.reset(OpARM64MOVHUload)
10592 v.AuxInt = int32ToAuxInt(off1 + off2)
10593 v.Aux = symToAux(mergeSym(sym1, sym2))
10594 v.AddArg2(ptr, mem)
10595 return true
10596 }
10597
10598
10599
10600 for {
10601 off := auxIntToInt32(v.AuxInt)
10602 sym := auxToSym(v.Aux)
10603 ptr := v_0
10604 if v_1.Op != OpARM64MOVHstorezero {
10605 break
10606 }
10607 off2 := auxIntToInt32(v_1.AuxInt)
10608 sym2 := auxToSym(v_1.Aux)
10609 ptr2 := v_1.Args[0]
10610 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
10611 break
10612 }
10613 v.reset(OpARM64MOVDconst)
10614 v.AuxInt = int64ToAuxInt(0)
10615 return true
10616 }
10617
10618
10619
10620 for {
10621 off := auxIntToInt32(v.AuxInt)
10622 sym := auxToSym(v.Aux)
10623 if v_0.Op != OpSB || !(symIsRO(sym)) {
10624 break
10625 }
10626 v.reset(OpARM64MOVDconst)
10627 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
10628 return true
10629 }
10630 return false
10631 }
10632 func rewriteValueARM64_OpARM64MOVHUloadidx(v *Value) bool {
10633 v_2 := v.Args[2]
10634 v_1 := v.Args[1]
10635 v_0 := v.Args[0]
10636
10637
10638
10639 for {
10640 ptr := v_0
10641 if v_1.Op != OpARM64MOVDconst {
10642 break
10643 }
10644 c := auxIntToInt64(v_1.AuxInt)
10645 mem := v_2
10646 if !(is32Bit(c)) {
10647 break
10648 }
10649 v.reset(OpARM64MOVHUload)
10650 v.AuxInt = int32ToAuxInt(int32(c))
10651 v.AddArg2(ptr, mem)
10652 return true
10653 }
10654
10655
10656
10657 for {
10658 if v_0.Op != OpARM64MOVDconst {
10659 break
10660 }
10661 c := auxIntToInt64(v_0.AuxInt)
10662 ptr := v_1
10663 mem := v_2
10664 if !(is32Bit(c)) {
10665 break
10666 }
10667 v.reset(OpARM64MOVHUload)
10668 v.AuxInt = int32ToAuxInt(int32(c))
10669 v.AddArg2(ptr, mem)
10670 return true
10671 }
10672
10673
10674 for {
10675 ptr := v_0
10676 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
10677 break
10678 }
10679 idx := v_1.Args[0]
10680 mem := v_2
10681 v.reset(OpARM64MOVHUloadidx2)
10682 v.AddArg3(ptr, idx, mem)
10683 return true
10684 }
10685
10686
10687 for {
10688 ptr := v_0
10689 if v_1.Op != OpARM64ADD {
10690 break
10691 }
10692 idx := v_1.Args[1]
10693 if idx != v_1.Args[0] {
10694 break
10695 }
10696 mem := v_2
10697 v.reset(OpARM64MOVHUloadidx2)
10698 v.AddArg3(ptr, idx, mem)
10699 return true
10700 }
10701
10702
10703 for {
10704 if v_0.Op != OpARM64ADD {
10705 break
10706 }
10707 idx := v_0.Args[1]
10708 if idx != v_0.Args[0] {
10709 break
10710 }
10711 ptr := v_1
10712 mem := v_2
10713 v.reset(OpARM64MOVHUloadidx2)
10714 v.AddArg3(ptr, idx, mem)
10715 return true
10716 }
10717
10718
10719
10720 for {
10721 ptr := v_0
10722 idx := v_1
10723 if v_2.Op != OpARM64MOVHstorezeroidx {
10724 break
10725 }
10726 idx2 := v_2.Args[1]
10727 ptr2 := v_2.Args[0]
10728 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
10729 break
10730 }
10731 v.reset(OpARM64MOVDconst)
10732 v.AuxInt = int64ToAuxInt(0)
10733 return true
10734 }
10735 return false
10736 }
10737 func rewriteValueARM64_OpARM64MOVHUloadidx2(v *Value) bool {
10738 v_2 := v.Args[2]
10739 v_1 := v.Args[1]
10740 v_0 := v.Args[0]
10741
10742
10743
10744 for {
10745 ptr := v_0
10746 if v_1.Op != OpARM64MOVDconst {
10747 break
10748 }
10749 c := auxIntToInt64(v_1.AuxInt)
10750 mem := v_2
10751 if !(is32Bit(c << 1)) {
10752 break
10753 }
10754 v.reset(OpARM64MOVHUload)
10755 v.AuxInt = int32ToAuxInt(int32(c) << 1)
10756 v.AddArg2(ptr, mem)
10757 return true
10758 }
10759
10760
10761
10762 for {
10763 ptr := v_0
10764 idx := v_1
10765 if v_2.Op != OpARM64MOVHstorezeroidx2 {
10766 break
10767 }
10768 idx2 := v_2.Args[1]
10769 ptr2 := v_2.Args[0]
10770 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
10771 break
10772 }
10773 v.reset(OpARM64MOVDconst)
10774 v.AuxInt = int64ToAuxInt(0)
10775 return true
10776 }
10777 return false
10778 }
10779 func rewriteValueARM64_OpARM64MOVHUreg(v *Value) bool {
10780 v_0 := v.Args[0]
10781
10782
10783 for {
10784 x := v_0
10785 if x.Op != OpARM64MOVBUload {
10786 break
10787 }
10788 v.reset(OpARM64MOVDreg)
10789 v.AddArg(x)
10790 return true
10791 }
10792
10793
10794 for {
10795 x := v_0
10796 if x.Op != OpARM64MOVHUload {
10797 break
10798 }
10799 v.reset(OpARM64MOVDreg)
10800 v.AddArg(x)
10801 return true
10802 }
10803
10804
10805 for {
10806 x := v_0
10807 if x.Op != OpARM64MOVBUloadidx {
10808 break
10809 }
10810 v.reset(OpARM64MOVDreg)
10811 v.AddArg(x)
10812 return true
10813 }
10814
10815
10816 for {
10817 x := v_0
10818 if x.Op != OpARM64MOVHUloadidx {
10819 break
10820 }
10821 v.reset(OpARM64MOVDreg)
10822 v.AddArg(x)
10823 return true
10824 }
10825
10826
10827 for {
10828 x := v_0
10829 if x.Op != OpARM64MOVHUloadidx2 {
10830 break
10831 }
10832 v.reset(OpARM64MOVDreg)
10833 v.AddArg(x)
10834 return true
10835 }
10836
10837
10838 for {
10839 x := v_0
10840 if x.Op != OpARM64MOVBUreg {
10841 break
10842 }
10843 v.reset(OpARM64MOVDreg)
10844 v.AddArg(x)
10845 return true
10846 }
10847
10848
10849 for {
10850 x := v_0
10851 if x.Op != OpARM64MOVHUreg {
10852 break
10853 }
10854 v.reset(OpARM64MOVDreg)
10855 v.AddArg(x)
10856 return true
10857 }
10858
10859
10860 for {
10861 if v_0.Op != OpARM64ANDconst {
10862 break
10863 }
10864 c := auxIntToInt64(v_0.AuxInt)
10865 x := v_0.Args[0]
10866 v.reset(OpARM64ANDconst)
10867 v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
10868 v.AddArg(x)
10869 return true
10870 }
10871
10872
10873 for {
10874 if v_0.Op != OpARM64MOVDconst {
10875 break
10876 }
10877 c := auxIntToInt64(v_0.AuxInt)
10878 v.reset(OpARM64MOVDconst)
10879 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
10880 return true
10881 }
10882
10883
10884
10885 for {
10886 if v_0.Op != OpARM64SLLconst {
10887 break
10888 }
10889 lc := auxIntToInt64(v_0.AuxInt)
10890 if !(lc >= 16) {
10891 break
10892 }
10893 v.reset(OpARM64MOVDconst)
10894 v.AuxInt = int64ToAuxInt(0)
10895 return true
10896 }
10897
10898
10899
10900 for {
10901 if v_0.Op != OpARM64SLLconst {
10902 break
10903 }
10904 lc := auxIntToInt64(v_0.AuxInt)
10905 x := v_0.Args[0]
10906 if !(lc < 16) {
10907 break
10908 }
10909 v.reset(OpARM64UBFIZ)
10910 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc))
10911 v.AddArg(x)
10912 return true
10913 }
10914
10915
10916
10917 for {
10918 if v_0.Op != OpARM64SRLconst {
10919 break
10920 }
10921 rc := auxIntToInt64(v_0.AuxInt)
10922 x := v_0.Args[0]
10923 if !(rc < 16) {
10924 break
10925 }
10926 v.reset(OpARM64UBFX)
10927 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16))
10928 v.AddArg(x)
10929 return true
10930 }
10931
10932
10933
10934 for {
10935 if v_0.Op != OpARM64UBFX {
10936 break
10937 }
10938 bfc := auxIntToArm64BitField(v_0.AuxInt)
10939 x := v_0.Args[0]
10940 if !(bfc.getARM64BFwidth() <= 16) {
10941 break
10942 }
10943 v.reset(OpARM64UBFX)
10944 v.AuxInt = arm64BitFieldToAuxInt(bfc)
10945 v.AddArg(x)
10946 return true
10947 }
10948 return false
10949 }
10950 func rewriteValueARM64_OpARM64MOVHload(v *Value) bool {
10951 v_1 := v.Args[1]
10952 v_0 := v.Args[0]
10953 b := v.Block
10954 config := b.Func.Config
10955
10956
10957
10958 for {
10959 off1 := auxIntToInt32(v.AuxInt)
10960 sym := auxToSym(v.Aux)
10961 if v_0.Op != OpARM64ADDconst {
10962 break
10963 }
10964 off2 := auxIntToInt64(v_0.AuxInt)
10965 ptr := v_0.Args[0]
10966 mem := v_1
10967 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
10968 break
10969 }
10970 v.reset(OpARM64MOVHload)
10971 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
10972 v.Aux = symToAux(sym)
10973 v.AddArg2(ptr, mem)
10974 return true
10975 }
10976
10977
10978
10979 for {
10980 off := auxIntToInt32(v.AuxInt)
10981 sym := auxToSym(v.Aux)
10982 if v_0.Op != OpARM64ADD {
10983 break
10984 }
10985 idx := v_0.Args[1]
10986 ptr := v_0.Args[0]
10987 mem := v_1
10988 if !(off == 0 && sym == nil) {
10989 break
10990 }
10991 v.reset(OpARM64MOVHloadidx)
10992 v.AddArg3(ptr, idx, mem)
10993 return true
10994 }
10995
10996
10997
10998 for {
10999 off := auxIntToInt32(v.AuxInt)
11000 sym := auxToSym(v.Aux)
11001 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11002 break
11003 }
11004 idx := v_0.Args[1]
11005 ptr := v_0.Args[0]
11006 mem := v_1
11007 if !(off == 0 && sym == nil) {
11008 break
11009 }
11010 v.reset(OpARM64MOVHloadidx2)
11011 v.AddArg3(ptr, idx, mem)
11012 return true
11013 }
11014
11015
11016
11017 for {
11018 off1 := auxIntToInt32(v.AuxInt)
11019 sym1 := auxToSym(v.Aux)
11020 if v_0.Op != OpARM64MOVDaddr {
11021 break
11022 }
11023 off2 := auxIntToInt32(v_0.AuxInt)
11024 sym2 := auxToSym(v_0.Aux)
11025 ptr := v_0.Args[0]
11026 mem := v_1
11027 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
11028 break
11029 }
11030 v.reset(OpARM64MOVHload)
11031 v.AuxInt = int32ToAuxInt(off1 + off2)
11032 v.Aux = symToAux(mergeSym(sym1, sym2))
11033 v.AddArg2(ptr, mem)
11034 return true
11035 }
11036
11037
11038
11039 for {
11040 off := auxIntToInt32(v.AuxInt)
11041 sym := auxToSym(v.Aux)
11042 ptr := v_0
11043 if v_1.Op != OpARM64MOVHstorezero {
11044 break
11045 }
11046 off2 := auxIntToInt32(v_1.AuxInt)
11047 sym2 := auxToSym(v_1.Aux)
11048 ptr2 := v_1.Args[0]
11049 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
11050 break
11051 }
11052 v.reset(OpARM64MOVDconst)
11053 v.AuxInt = int64ToAuxInt(0)
11054 return true
11055 }
11056 return false
11057 }
11058 func rewriteValueARM64_OpARM64MOVHloadidx(v *Value) bool {
11059 v_2 := v.Args[2]
11060 v_1 := v.Args[1]
11061 v_0 := v.Args[0]
11062
11063
11064
11065 for {
11066 ptr := v_0
11067 if v_1.Op != OpARM64MOVDconst {
11068 break
11069 }
11070 c := auxIntToInt64(v_1.AuxInt)
11071 mem := v_2
11072 if !(is32Bit(c)) {
11073 break
11074 }
11075 v.reset(OpARM64MOVHload)
11076 v.AuxInt = int32ToAuxInt(int32(c))
11077 v.AddArg2(ptr, mem)
11078 return true
11079 }
11080
11081
11082
11083 for {
11084 if v_0.Op != OpARM64MOVDconst {
11085 break
11086 }
11087 c := auxIntToInt64(v_0.AuxInt)
11088 ptr := v_1
11089 mem := v_2
11090 if !(is32Bit(c)) {
11091 break
11092 }
11093 v.reset(OpARM64MOVHload)
11094 v.AuxInt = int32ToAuxInt(int32(c))
11095 v.AddArg2(ptr, mem)
11096 return true
11097 }
11098
11099
11100 for {
11101 ptr := v_0
11102 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
11103 break
11104 }
11105 idx := v_1.Args[0]
11106 mem := v_2
11107 v.reset(OpARM64MOVHloadidx2)
11108 v.AddArg3(ptr, idx, mem)
11109 return true
11110 }
11111
11112
11113 for {
11114 ptr := v_0
11115 if v_1.Op != OpARM64ADD {
11116 break
11117 }
11118 idx := v_1.Args[1]
11119 if idx != v_1.Args[0] {
11120 break
11121 }
11122 mem := v_2
11123 v.reset(OpARM64MOVHloadidx2)
11124 v.AddArg3(ptr, idx, mem)
11125 return true
11126 }
11127
11128
11129 for {
11130 if v_0.Op != OpARM64ADD {
11131 break
11132 }
11133 idx := v_0.Args[1]
11134 if idx != v_0.Args[0] {
11135 break
11136 }
11137 ptr := v_1
11138 mem := v_2
11139 v.reset(OpARM64MOVHloadidx2)
11140 v.AddArg3(ptr, idx, mem)
11141 return true
11142 }
11143
11144
11145
11146 for {
11147 ptr := v_0
11148 idx := v_1
11149 if v_2.Op != OpARM64MOVHstorezeroidx {
11150 break
11151 }
11152 idx2 := v_2.Args[1]
11153 ptr2 := v_2.Args[0]
11154 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
11155 break
11156 }
11157 v.reset(OpARM64MOVDconst)
11158 v.AuxInt = int64ToAuxInt(0)
11159 return true
11160 }
11161 return false
11162 }
11163 func rewriteValueARM64_OpARM64MOVHloadidx2(v *Value) bool {
11164 v_2 := v.Args[2]
11165 v_1 := v.Args[1]
11166 v_0 := v.Args[0]
11167
11168
11169
11170 for {
11171 ptr := v_0
11172 if v_1.Op != OpARM64MOVDconst {
11173 break
11174 }
11175 c := auxIntToInt64(v_1.AuxInt)
11176 mem := v_2
11177 if !(is32Bit(c << 1)) {
11178 break
11179 }
11180 v.reset(OpARM64MOVHload)
11181 v.AuxInt = int32ToAuxInt(int32(c) << 1)
11182 v.AddArg2(ptr, mem)
11183 return true
11184 }
11185
11186
11187
11188 for {
11189 ptr := v_0
11190 idx := v_1
11191 if v_2.Op != OpARM64MOVHstorezeroidx2 {
11192 break
11193 }
11194 idx2 := v_2.Args[1]
11195 ptr2 := v_2.Args[0]
11196 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
11197 break
11198 }
11199 v.reset(OpARM64MOVDconst)
11200 v.AuxInt = int64ToAuxInt(0)
11201 return true
11202 }
11203 return false
11204 }
11205 func rewriteValueARM64_OpARM64MOVHreg(v *Value) bool {
11206 v_0 := v.Args[0]
11207
11208
11209 for {
11210 x := v_0
11211 if x.Op != OpARM64MOVBload {
11212 break
11213 }
11214 v.reset(OpARM64MOVDreg)
11215 v.AddArg(x)
11216 return true
11217 }
11218
11219
11220 for {
11221 x := v_0
11222 if x.Op != OpARM64MOVBUload {
11223 break
11224 }
11225 v.reset(OpARM64MOVDreg)
11226 v.AddArg(x)
11227 return true
11228 }
11229
11230
11231 for {
11232 x := v_0
11233 if x.Op != OpARM64MOVHload {
11234 break
11235 }
11236 v.reset(OpARM64MOVDreg)
11237 v.AddArg(x)
11238 return true
11239 }
11240
11241
11242 for {
11243 x := v_0
11244 if x.Op != OpARM64MOVBloadidx {
11245 break
11246 }
11247 v.reset(OpARM64MOVDreg)
11248 v.AddArg(x)
11249 return true
11250 }
11251
11252
11253 for {
11254 x := v_0
11255 if x.Op != OpARM64MOVBUloadidx {
11256 break
11257 }
11258 v.reset(OpARM64MOVDreg)
11259 v.AddArg(x)
11260 return true
11261 }
11262
11263
11264 for {
11265 x := v_0
11266 if x.Op != OpARM64MOVHloadidx {
11267 break
11268 }
11269 v.reset(OpARM64MOVDreg)
11270 v.AddArg(x)
11271 return true
11272 }
11273
11274
11275 for {
11276 x := v_0
11277 if x.Op != OpARM64MOVHloadidx2 {
11278 break
11279 }
11280 v.reset(OpARM64MOVDreg)
11281 v.AddArg(x)
11282 return true
11283 }
11284
11285
11286 for {
11287 x := v_0
11288 if x.Op != OpARM64MOVBreg {
11289 break
11290 }
11291 v.reset(OpARM64MOVDreg)
11292 v.AddArg(x)
11293 return true
11294 }
11295
11296
11297 for {
11298 x := v_0
11299 if x.Op != OpARM64MOVBUreg {
11300 break
11301 }
11302 v.reset(OpARM64MOVDreg)
11303 v.AddArg(x)
11304 return true
11305 }
11306
11307
11308 for {
11309 x := v_0
11310 if x.Op != OpARM64MOVHreg {
11311 break
11312 }
11313 v.reset(OpARM64MOVDreg)
11314 v.AddArg(x)
11315 return true
11316 }
11317
11318
11319 for {
11320 if v_0.Op != OpARM64MOVDconst {
11321 break
11322 }
11323 c := auxIntToInt64(v_0.AuxInt)
11324 v.reset(OpARM64MOVDconst)
11325 v.AuxInt = int64ToAuxInt(int64(int16(c)))
11326 return true
11327 }
11328
11329
11330
11331 for {
11332 if v_0.Op != OpARM64SLLconst {
11333 break
11334 }
11335 lc := auxIntToInt64(v_0.AuxInt)
11336 x := v_0.Args[0]
11337 if !(lc < 16) {
11338 break
11339 }
11340 v.reset(OpARM64SBFIZ)
11341 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 16-lc))
11342 v.AddArg(x)
11343 return true
11344 }
11345
11346
11347
11348 for {
11349 if v_0.Op != OpARM64SBFX {
11350 break
11351 }
11352 bfc := auxIntToArm64BitField(v_0.AuxInt)
11353 x := v_0.Args[0]
11354 if !(bfc.getARM64BFwidth() <= 16) {
11355 break
11356 }
11357 v.reset(OpARM64SBFX)
11358 v.AuxInt = arm64BitFieldToAuxInt(bfc)
11359 v.AddArg(x)
11360 return true
11361 }
11362 return false
11363 }
11364 func rewriteValueARM64_OpARM64MOVHstore(v *Value) bool {
11365 v_2 := v.Args[2]
11366 v_1 := v.Args[1]
11367 v_0 := v.Args[0]
11368 b := v.Block
11369 config := b.Func.Config
11370
11371
11372
11373 for {
11374 off1 := auxIntToInt32(v.AuxInt)
11375 sym := auxToSym(v.Aux)
11376 if v_0.Op != OpARM64ADDconst {
11377 break
11378 }
11379 off2 := auxIntToInt64(v_0.AuxInt)
11380 ptr := v_0.Args[0]
11381 val := v_1
11382 mem := v_2
11383 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
11384 break
11385 }
11386 v.reset(OpARM64MOVHstore)
11387 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
11388 v.Aux = symToAux(sym)
11389 v.AddArg3(ptr, val, mem)
11390 return true
11391 }
11392
11393
11394
11395 for {
11396 off := auxIntToInt32(v.AuxInt)
11397 sym := auxToSym(v.Aux)
11398 if v_0.Op != OpARM64ADD {
11399 break
11400 }
11401 idx := v_0.Args[1]
11402 ptr := v_0.Args[0]
11403 val := v_1
11404 mem := v_2
11405 if !(off == 0 && sym == nil) {
11406 break
11407 }
11408 v.reset(OpARM64MOVHstoreidx)
11409 v.AddArg4(ptr, idx, val, mem)
11410 return true
11411 }
11412
11413
11414
11415 for {
11416 off := auxIntToInt32(v.AuxInt)
11417 sym := auxToSym(v.Aux)
11418 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11419 break
11420 }
11421 idx := v_0.Args[1]
11422 ptr := v_0.Args[0]
11423 val := v_1
11424 mem := v_2
11425 if !(off == 0 && sym == nil) {
11426 break
11427 }
11428 v.reset(OpARM64MOVHstoreidx2)
11429 v.AddArg4(ptr, idx, val, mem)
11430 return true
11431 }
11432
11433
11434
11435 for {
11436 off1 := auxIntToInt32(v.AuxInt)
11437 sym1 := auxToSym(v.Aux)
11438 if v_0.Op != OpARM64MOVDaddr {
11439 break
11440 }
11441 off2 := auxIntToInt32(v_0.AuxInt)
11442 sym2 := auxToSym(v_0.Aux)
11443 ptr := v_0.Args[0]
11444 val := v_1
11445 mem := v_2
11446 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
11447 break
11448 }
11449 v.reset(OpARM64MOVHstore)
11450 v.AuxInt = int32ToAuxInt(off1 + off2)
11451 v.Aux = symToAux(mergeSym(sym1, sym2))
11452 v.AddArg3(ptr, val, mem)
11453 return true
11454 }
11455
11456
11457 for {
11458 off := auxIntToInt32(v.AuxInt)
11459 sym := auxToSym(v.Aux)
11460 ptr := v_0
11461 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
11462 break
11463 }
11464 mem := v_2
11465 v.reset(OpARM64MOVHstorezero)
11466 v.AuxInt = int32ToAuxInt(off)
11467 v.Aux = symToAux(sym)
11468 v.AddArg2(ptr, mem)
11469 return true
11470 }
11471
11472
11473 for {
11474 off := auxIntToInt32(v.AuxInt)
11475 sym := auxToSym(v.Aux)
11476 ptr := v_0
11477 if v_1.Op != OpARM64MOVHreg {
11478 break
11479 }
11480 x := v_1.Args[0]
11481 mem := v_2
11482 v.reset(OpARM64MOVHstore)
11483 v.AuxInt = int32ToAuxInt(off)
11484 v.Aux = symToAux(sym)
11485 v.AddArg3(ptr, x, mem)
11486 return true
11487 }
11488
11489
11490 for {
11491 off := auxIntToInt32(v.AuxInt)
11492 sym := auxToSym(v.Aux)
11493 ptr := v_0
11494 if v_1.Op != OpARM64MOVHUreg {
11495 break
11496 }
11497 x := v_1.Args[0]
11498 mem := v_2
11499 v.reset(OpARM64MOVHstore)
11500 v.AuxInt = int32ToAuxInt(off)
11501 v.Aux = symToAux(sym)
11502 v.AddArg3(ptr, x, mem)
11503 return true
11504 }
11505
11506
11507 for {
11508 off := auxIntToInt32(v.AuxInt)
11509 sym := auxToSym(v.Aux)
11510 ptr := v_0
11511 if v_1.Op != OpARM64MOVWreg {
11512 break
11513 }
11514 x := v_1.Args[0]
11515 mem := v_2
11516 v.reset(OpARM64MOVHstore)
11517 v.AuxInt = int32ToAuxInt(off)
11518 v.Aux = symToAux(sym)
11519 v.AddArg3(ptr, x, mem)
11520 return true
11521 }
11522
11523
11524 for {
11525 off := auxIntToInt32(v.AuxInt)
11526 sym := auxToSym(v.Aux)
11527 ptr := v_0
11528 if v_1.Op != OpARM64MOVWUreg {
11529 break
11530 }
11531 x := v_1.Args[0]
11532 mem := v_2
11533 v.reset(OpARM64MOVHstore)
11534 v.AuxInt = int32ToAuxInt(off)
11535 v.Aux = symToAux(sym)
11536 v.AddArg3(ptr, x, mem)
11537 return true
11538 }
11539
11540
11541
11542 for {
11543 i := auxIntToInt32(v.AuxInt)
11544 s := auxToSym(v.Aux)
11545 ptr0 := v_0
11546 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
11547 break
11548 }
11549 w := v_1.Args[0]
11550 x := v_2
11551 if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
11552 break
11553 }
11554 mem := x.Args[2]
11555 ptr1 := x.Args[0]
11556 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
11557 break
11558 }
11559 v.reset(OpARM64MOVWstore)
11560 v.AuxInt = int32ToAuxInt(i - 2)
11561 v.Aux = symToAux(s)
11562 v.AddArg3(ptr0, w, mem)
11563 return true
11564 }
11565
11566
11567
11568 for {
11569 if auxIntToInt32(v.AuxInt) != 2 {
11570 break
11571 }
11572 s := auxToSym(v.Aux)
11573 if v_0.Op != OpARM64ADD {
11574 break
11575 }
11576 _ = v_0.Args[1]
11577 v_0_0 := v_0.Args[0]
11578 v_0_1 := v_0.Args[1]
11579 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11580 ptr0 := v_0_0
11581 idx0 := v_0_1
11582 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
11583 continue
11584 }
11585 w := v_1.Args[0]
11586 x := v_2
11587 if x.Op != OpARM64MOVHstoreidx {
11588 continue
11589 }
11590 mem := x.Args[3]
11591 ptr1 := x.Args[0]
11592 idx1 := x.Args[1]
11593 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11594 continue
11595 }
11596 v.reset(OpARM64MOVWstoreidx)
11597 v.AddArg4(ptr1, idx1, w, mem)
11598 return true
11599 }
11600 break
11601 }
11602
11603
11604
11605 for {
11606 if auxIntToInt32(v.AuxInt) != 2 {
11607 break
11608 }
11609 s := auxToSym(v.Aux)
11610 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11611 break
11612 }
11613 idx0 := v_0.Args[1]
11614 ptr0 := v_0.Args[0]
11615 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
11616 break
11617 }
11618 w := v_1.Args[0]
11619 x := v_2
11620 if x.Op != OpARM64MOVHstoreidx2 {
11621 break
11622 }
11623 mem := x.Args[3]
11624 ptr1 := x.Args[0]
11625 idx1 := x.Args[1]
11626 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
11627 break
11628 }
11629 v.reset(OpARM64MOVWstoreidx)
11630 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
11631 v0.AuxInt = int64ToAuxInt(1)
11632 v0.AddArg(idx1)
11633 v.AddArg4(ptr1, v0, w, mem)
11634 return true
11635 }
11636
11637
11638
11639 for {
11640 i := auxIntToInt32(v.AuxInt)
11641 s := auxToSym(v.Aux)
11642 ptr0 := v_0
11643 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(16, 16) {
11644 break
11645 }
11646 w := v_1.Args[0]
11647 x := v_2
11648 if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
11649 break
11650 }
11651 mem := x.Args[2]
11652 ptr1 := x.Args[0]
11653 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
11654 break
11655 }
11656 v.reset(OpARM64MOVWstore)
11657 v.AuxInt = int32ToAuxInt(i - 2)
11658 v.Aux = symToAux(s)
11659 v.AddArg3(ptr0, w, mem)
11660 return true
11661 }
11662
11663
11664
11665 for {
11666 if auxIntToInt32(v.AuxInt) != 2 {
11667 break
11668 }
11669 s := auxToSym(v.Aux)
11670 if v_0.Op != OpARM64ADD {
11671 break
11672 }
11673 _ = v_0.Args[1]
11674 v_0_0 := v_0.Args[0]
11675 v_0_1 := v_0.Args[1]
11676 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11677 ptr0 := v_0_0
11678 idx0 := v_0_1
11679 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(16, 16) {
11680 continue
11681 }
11682 w := v_1.Args[0]
11683 x := v_2
11684 if x.Op != OpARM64MOVHstoreidx {
11685 continue
11686 }
11687 mem := x.Args[3]
11688 ptr1 := x.Args[0]
11689 idx1 := x.Args[1]
11690 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11691 continue
11692 }
11693 v.reset(OpARM64MOVWstoreidx)
11694 v.AddArg4(ptr1, idx1, w, mem)
11695 return true
11696 }
11697 break
11698 }
11699
11700
11701
11702 for {
11703 if auxIntToInt32(v.AuxInt) != 2 {
11704 break
11705 }
11706 s := auxToSym(v.Aux)
11707 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11708 break
11709 }
11710 idx0 := v_0.Args[1]
11711 ptr0 := v_0.Args[0]
11712 if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(16, 16) {
11713 break
11714 }
11715 w := v_1.Args[0]
11716 x := v_2
11717 if x.Op != OpARM64MOVHstoreidx2 {
11718 break
11719 }
11720 mem := x.Args[3]
11721 ptr1 := x.Args[0]
11722 idx1 := x.Args[1]
11723 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
11724 break
11725 }
11726 v.reset(OpARM64MOVWstoreidx)
11727 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
11728 v0.AuxInt = int64ToAuxInt(1)
11729 v0.AddArg(idx1)
11730 v.AddArg4(ptr1, v0, w, mem)
11731 return true
11732 }
11733
11734
11735
11736 for {
11737 i := auxIntToInt32(v.AuxInt)
11738 s := auxToSym(v.Aux)
11739 ptr0 := v_0
11740 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
11741 break
11742 }
11743 v_1_0 := v_1.Args[0]
11744 if v_1_0.Op != OpARM64MOVDreg {
11745 break
11746 }
11747 w := v_1_0.Args[0]
11748 x := v_2
11749 if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
11750 break
11751 }
11752 mem := x.Args[2]
11753 ptr1 := x.Args[0]
11754 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
11755 break
11756 }
11757 v.reset(OpARM64MOVWstore)
11758 v.AuxInt = int32ToAuxInt(i - 2)
11759 v.Aux = symToAux(s)
11760 v.AddArg3(ptr0, w, mem)
11761 return true
11762 }
11763
11764
11765
11766 for {
11767 if auxIntToInt32(v.AuxInt) != 2 {
11768 break
11769 }
11770 s := auxToSym(v.Aux)
11771 if v_0.Op != OpARM64ADD {
11772 break
11773 }
11774 _ = v_0.Args[1]
11775 v_0_0 := v_0.Args[0]
11776 v_0_1 := v_0.Args[1]
11777 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11778 ptr0 := v_0_0
11779 idx0 := v_0_1
11780 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
11781 continue
11782 }
11783 v_1_0 := v_1.Args[0]
11784 if v_1_0.Op != OpARM64MOVDreg {
11785 continue
11786 }
11787 w := v_1_0.Args[0]
11788 x := v_2
11789 if x.Op != OpARM64MOVHstoreidx {
11790 continue
11791 }
11792 mem := x.Args[3]
11793 ptr1 := x.Args[0]
11794 idx1 := x.Args[1]
11795 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11796 continue
11797 }
11798 v.reset(OpARM64MOVWstoreidx)
11799 v.AddArg4(ptr1, idx1, w, mem)
11800 return true
11801 }
11802 break
11803 }
11804
11805
11806
11807 for {
11808 if auxIntToInt32(v.AuxInt) != 2 {
11809 break
11810 }
11811 s := auxToSym(v.Aux)
11812 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11813 break
11814 }
11815 idx0 := v_0.Args[1]
11816 ptr0 := v_0.Args[0]
11817 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 16 {
11818 break
11819 }
11820 v_1_0 := v_1.Args[0]
11821 if v_1_0.Op != OpARM64MOVDreg {
11822 break
11823 }
11824 w := v_1_0.Args[0]
11825 x := v_2
11826 if x.Op != OpARM64MOVHstoreidx2 {
11827 break
11828 }
11829 mem := x.Args[3]
11830 ptr1 := x.Args[0]
11831 idx1 := x.Args[1]
11832 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
11833 break
11834 }
11835 v.reset(OpARM64MOVWstoreidx)
11836 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
11837 v0.AuxInt = int64ToAuxInt(1)
11838 v0.AddArg(idx1)
11839 v.AddArg4(ptr1, v0, w, mem)
11840 return true
11841 }
11842
11843
11844
11845 for {
11846 i := auxIntToInt32(v.AuxInt)
11847 s := auxToSym(v.Aux)
11848 ptr0 := v_0
11849 if v_1.Op != OpARM64SRLconst {
11850 break
11851 }
11852 j := auxIntToInt64(v_1.AuxInt)
11853 w := v_1.Args[0]
11854 x := v_2
11855 if x.Op != OpARM64MOVHstore || auxIntToInt32(x.AuxInt) != i-2 || auxToSym(x.Aux) != s {
11856 break
11857 }
11858 mem := x.Args[2]
11859 ptr1 := x.Args[0]
11860 w0 := x.Args[1]
11861 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-16 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
11862 break
11863 }
11864 v.reset(OpARM64MOVWstore)
11865 v.AuxInt = int32ToAuxInt(i - 2)
11866 v.Aux = symToAux(s)
11867 v.AddArg3(ptr0, w0, mem)
11868 return true
11869 }
11870
11871
11872
11873 for {
11874 if auxIntToInt32(v.AuxInt) != 2 {
11875 break
11876 }
11877 s := auxToSym(v.Aux)
11878 if v_0.Op != OpARM64ADD {
11879 break
11880 }
11881 _ = v_0.Args[1]
11882 v_0_0 := v_0.Args[0]
11883 v_0_1 := v_0.Args[1]
11884 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11885 ptr0 := v_0_0
11886 idx0 := v_0_1
11887 if v_1.Op != OpARM64SRLconst {
11888 continue
11889 }
11890 j := auxIntToInt64(v_1.AuxInt)
11891 w := v_1.Args[0]
11892 x := v_2
11893 if x.Op != OpARM64MOVHstoreidx {
11894 continue
11895 }
11896 mem := x.Args[3]
11897 ptr1 := x.Args[0]
11898 idx1 := x.Args[1]
11899 w0 := x.Args[2]
11900 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-16 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11901 continue
11902 }
11903 v.reset(OpARM64MOVWstoreidx)
11904 v.AddArg4(ptr1, idx1, w0, mem)
11905 return true
11906 }
11907 break
11908 }
11909
11910
11911
11912 for {
11913 if auxIntToInt32(v.AuxInt) != 2 {
11914 break
11915 }
11916 s := auxToSym(v.Aux)
11917 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
11918 break
11919 }
11920 idx0 := v_0.Args[1]
11921 ptr0 := v_0.Args[0]
11922 if v_1.Op != OpARM64SRLconst {
11923 break
11924 }
11925 j := auxIntToInt64(v_1.AuxInt)
11926 w := v_1.Args[0]
11927 x := v_2
11928 if x.Op != OpARM64MOVHstoreidx2 {
11929 break
11930 }
11931 mem := x.Args[3]
11932 ptr1 := x.Args[0]
11933 idx1 := x.Args[1]
11934 w0 := x.Args[2]
11935 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-16 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
11936 break
11937 }
11938 v.reset(OpARM64MOVWstoreidx)
11939 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
11940 v0.AuxInt = int64ToAuxInt(1)
11941 v0.AddArg(idx1)
11942 v.AddArg4(ptr1, v0, w0, mem)
11943 return true
11944 }
11945 return false
11946 }
11947 func rewriteValueARM64_OpARM64MOVHstoreidx(v *Value) bool {
11948 v_3 := v.Args[3]
11949 v_2 := v.Args[2]
11950 v_1 := v.Args[1]
11951 v_0 := v.Args[0]
11952
11953
11954
11955 for {
11956 ptr := v_0
11957 if v_1.Op != OpARM64MOVDconst {
11958 break
11959 }
11960 c := auxIntToInt64(v_1.AuxInt)
11961 val := v_2
11962 mem := v_3
11963 if !(is32Bit(c)) {
11964 break
11965 }
11966 v.reset(OpARM64MOVHstore)
11967 v.AuxInt = int32ToAuxInt(int32(c))
11968 v.AddArg3(ptr, val, mem)
11969 return true
11970 }
11971
11972
11973
11974 for {
11975 if v_0.Op != OpARM64MOVDconst {
11976 break
11977 }
11978 c := auxIntToInt64(v_0.AuxInt)
11979 idx := v_1
11980 val := v_2
11981 mem := v_3
11982 if !(is32Bit(c)) {
11983 break
11984 }
11985 v.reset(OpARM64MOVHstore)
11986 v.AuxInt = int32ToAuxInt(int32(c))
11987 v.AddArg3(idx, val, mem)
11988 return true
11989 }
11990
11991
11992 for {
11993 ptr := v_0
11994 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
11995 break
11996 }
11997 idx := v_1.Args[0]
11998 val := v_2
11999 mem := v_3
12000 v.reset(OpARM64MOVHstoreidx2)
12001 v.AddArg4(ptr, idx, val, mem)
12002 return true
12003 }
12004
12005
12006 for {
12007 ptr := v_0
12008 if v_1.Op != OpARM64ADD {
12009 break
12010 }
12011 idx := v_1.Args[1]
12012 if idx != v_1.Args[0] {
12013 break
12014 }
12015 val := v_2
12016 mem := v_3
12017 v.reset(OpARM64MOVHstoreidx2)
12018 v.AddArg4(ptr, idx, val, mem)
12019 return true
12020 }
12021
12022
12023 for {
12024 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 {
12025 break
12026 }
12027 idx := v_0.Args[0]
12028 ptr := v_1
12029 val := v_2
12030 mem := v_3
12031 v.reset(OpARM64MOVHstoreidx2)
12032 v.AddArg4(ptr, idx, val, mem)
12033 return true
12034 }
12035
12036
12037 for {
12038 if v_0.Op != OpARM64ADD {
12039 break
12040 }
12041 idx := v_0.Args[1]
12042 if idx != v_0.Args[0] {
12043 break
12044 }
12045 ptr := v_1
12046 val := v_2
12047 mem := v_3
12048 v.reset(OpARM64MOVHstoreidx2)
12049 v.AddArg4(ptr, idx, val, mem)
12050 return true
12051 }
12052
12053
12054 for {
12055 ptr := v_0
12056 idx := v_1
12057 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
12058 break
12059 }
12060 mem := v_3
12061 v.reset(OpARM64MOVHstorezeroidx)
12062 v.AddArg3(ptr, idx, mem)
12063 return true
12064 }
12065
12066
12067 for {
12068 ptr := v_0
12069 idx := v_1
12070 if v_2.Op != OpARM64MOVHreg {
12071 break
12072 }
12073 x := v_2.Args[0]
12074 mem := v_3
12075 v.reset(OpARM64MOVHstoreidx)
12076 v.AddArg4(ptr, idx, x, mem)
12077 return true
12078 }
12079
12080
12081 for {
12082 ptr := v_0
12083 idx := v_1
12084 if v_2.Op != OpARM64MOVHUreg {
12085 break
12086 }
12087 x := v_2.Args[0]
12088 mem := v_3
12089 v.reset(OpARM64MOVHstoreidx)
12090 v.AddArg4(ptr, idx, x, mem)
12091 return true
12092 }
12093
12094
12095 for {
12096 ptr := v_0
12097 idx := v_1
12098 if v_2.Op != OpARM64MOVWreg {
12099 break
12100 }
12101 x := v_2.Args[0]
12102 mem := v_3
12103 v.reset(OpARM64MOVHstoreidx)
12104 v.AddArg4(ptr, idx, x, mem)
12105 return true
12106 }
12107
12108
12109 for {
12110 ptr := v_0
12111 idx := v_1
12112 if v_2.Op != OpARM64MOVWUreg {
12113 break
12114 }
12115 x := v_2.Args[0]
12116 mem := v_3
12117 v.reset(OpARM64MOVHstoreidx)
12118 v.AddArg4(ptr, idx, x, mem)
12119 return true
12120 }
12121
12122
12123
12124 for {
12125 ptr := v_0
12126 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 2 {
12127 break
12128 }
12129 idx := v_1.Args[0]
12130 if v_2.Op != OpARM64SRLconst || auxIntToInt64(v_2.AuxInt) != 16 {
12131 break
12132 }
12133 w := v_2.Args[0]
12134 x := v_3
12135 if x.Op != OpARM64MOVHstoreidx {
12136 break
12137 }
12138 mem := x.Args[3]
12139 if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) {
12140 break
12141 }
12142 v.reset(OpARM64MOVWstoreidx)
12143 v.AddArg4(ptr, idx, w, mem)
12144 return true
12145 }
12146 return false
12147 }
12148 func rewriteValueARM64_OpARM64MOVHstoreidx2(v *Value) bool {
12149 v_3 := v.Args[3]
12150 v_2 := v.Args[2]
12151 v_1 := v.Args[1]
12152 v_0 := v.Args[0]
12153
12154
12155
12156 for {
12157 ptr := v_0
12158 if v_1.Op != OpARM64MOVDconst {
12159 break
12160 }
12161 c := auxIntToInt64(v_1.AuxInt)
12162 val := v_2
12163 mem := v_3
12164 if !(is32Bit(c << 1)) {
12165 break
12166 }
12167 v.reset(OpARM64MOVHstore)
12168 v.AuxInt = int32ToAuxInt(int32(c) << 1)
12169 v.AddArg3(ptr, val, mem)
12170 return true
12171 }
12172
12173
12174 for {
12175 ptr := v_0
12176 idx := v_1
12177 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
12178 break
12179 }
12180 mem := v_3
12181 v.reset(OpARM64MOVHstorezeroidx2)
12182 v.AddArg3(ptr, idx, mem)
12183 return true
12184 }
12185
12186
12187 for {
12188 ptr := v_0
12189 idx := v_1
12190 if v_2.Op != OpARM64MOVHreg {
12191 break
12192 }
12193 x := v_2.Args[0]
12194 mem := v_3
12195 v.reset(OpARM64MOVHstoreidx2)
12196 v.AddArg4(ptr, idx, x, mem)
12197 return true
12198 }
12199
12200
12201 for {
12202 ptr := v_0
12203 idx := v_1
12204 if v_2.Op != OpARM64MOVHUreg {
12205 break
12206 }
12207 x := v_2.Args[0]
12208 mem := v_3
12209 v.reset(OpARM64MOVHstoreidx2)
12210 v.AddArg4(ptr, idx, x, mem)
12211 return true
12212 }
12213
12214
12215 for {
12216 ptr := v_0
12217 idx := v_1
12218 if v_2.Op != OpARM64MOVWreg {
12219 break
12220 }
12221 x := v_2.Args[0]
12222 mem := v_3
12223 v.reset(OpARM64MOVHstoreidx2)
12224 v.AddArg4(ptr, idx, x, mem)
12225 return true
12226 }
12227
12228
12229 for {
12230 ptr := v_0
12231 idx := v_1
12232 if v_2.Op != OpARM64MOVWUreg {
12233 break
12234 }
12235 x := v_2.Args[0]
12236 mem := v_3
12237 v.reset(OpARM64MOVHstoreidx2)
12238 v.AddArg4(ptr, idx, x, mem)
12239 return true
12240 }
12241 return false
12242 }
12243 func rewriteValueARM64_OpARM64MOVHstorezero(v *Value) bool {
12244 v_1 := v.Args[1]
12245 v_0 := v.Args[0]
12246 b := v.Block
12247 config := b.Func.Config
12248
12249
12250
12251 for {
12252 off1 := auxIntToInt32(v.AuxInt)
12253 sym := auxToSym(v.Aux)
12254 if v_0.Op != OpARM64ADDconst {
12255 break
12256 }
12257 off2 := auxIntToInt64(v_0.AuxInt)
12258 ptr := v_0.Args[0]
12259 mem := v_1
12260 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12261 break
12262 }
12263 v.reset(OpARM64MOVHstorezero)
12264 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
12265 v.Aux = symToAux(sym)
12266 v.AddArg2(ptr, mem)
12267 return true
12268 }
12269
12270
12271
12272 for {
12273 off1 := auxIntToInt32(v.AuxInt)
12274 sym1 := auxToSym(v.Aux)
12275 if v_0.Op != OpARM64MOVDaddr {
12276 break
12277 }
12278 off2 := auxIntToInt32(v_0.AuxInt)
12279 sym2 := auxToSym(v_0.Aux)
12280 ptr := v_0.Args[0]
12281 mem := v_1
12282 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12283 break
12284 }
12285 v.reset(OpARM64MOVHstorezero)
12286 v.AuxInt = int32ToAuxInt(off1 + off2)
12287 v.Aux = symToAux(mergeSym(sym1, sym2))
12288 v.AddArg2(ptr, mem)
12289 return true
12290 }
12291
12292
12293
12294 for {
12295 off := auxIntToInt32(v.AuxInt)
12296 sym := auxToSym(v.Aux)
12297 if v_0.Op != OpARM64ADD {
12298 break
12299 }
12300 idx := v_0.Args[1]
12301 ptr := v_0.Args[0]
12302 mem := v_1
12303 if !(off == 0 && sym == nil) {
12304 break
12305 }
12306 v.reset(OpARM64MOVHstorezeroidx)
12307 v.AddArg3(ptr, idx, mem)
12308 return true
12309 }
12310
12311
12312
12313 for {
12314 off := auxIntToInt32(v.AuxInt)
12315 sym := auxToSym(v.Aux)
12316 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
12317 break
12318 }
12319 idx := v_0.Args[1]
12320 ptr := v_0.Args[0]
12321 mem := v_1
12322 if !(off == 0 && sym == nil) {
12323 break
12324 }
12325 v.reset(OpARM64MOVHstorezeroidx2)
12326 v.AddArg3(ptr, idx, mem)
12327 return true
12328 }
12329
12330
12331
12332 for {
12333 i := auxIntToInt32(v.AuxInt)
12334 s := auxToSym(v.Aux)
12335 ptr0 := v_0
12336 x := v_1
12337 if x.Op != OpARM64MOVHstorezero {
12338 break
12339 }
12340 j := auxIntToInt32(x.AuxInt)
12341 if auxToSym(x.Aux) != s {
12342 break
12343 }
12344 mem := x.Args[1]
12345 ptr1 := x.Args[0]
12346 if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 2) && isSamePtr(ptr0, ptr1) && clobber(x)) {
12347 break
12348 }
12349 v.reset(OpARM64MOVWstorezero)
12350 v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
12351 v.Aux = symToAux(s)
12352 v.AddArg2(ptr0, mem)
12353 return true
12354 }
12355
12356
12357
12358 for {
12359 if auxIntToInt32(v.AuxInt) != 2 {
12360 break
12361 }
12362 s := auxToSym(v.Aux)
12363 if v_0.Op != OpARM64ADD {
12364 break
12365 }
12366 _ = v_0.Args[1]
12367 v_0_0 := v_0.Args[0]
12368 v_0_1 := v_0.Args[1]
12369 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
12370 ptr0 := v_0_0
12371 idx0 := v_0_1
12372 x := v_1
12373 if x.Op != OpARM64MOVHstorezeroidx {
12374 continue
12375 }
12376 mem := x.Args[2]
12377 ptr1 := x.Args[0]
12378 idx1 := x.Args[1]
12379 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
12380 continue
12381 }
12382 v.reset(OpARM64MOVWstorezeroidx)
12383 v.AddArg3(ptr1, idx1, mem)
12384 return true
12385 }
12386 break
12387 }
12388
12389
12390
12391 for {
12392 if auxIntToInt32(v.AuxInt) != 2 {
12393 break
12394 }
12395 s := auxToSym(v.Aux)
12396 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 1 {
12397 break
12398 }
12399 idx0 := v_0.Args[1]
12400 ptr0 := v_0.Args[0]
12401 x := v_1
12402 if x.Op != OpARM64MOVHstorezeroidx2 {
12403 break
12404 }
12405 mem := x.Args[2]
12406 ptr1 := x.Args[0]
12407 idx1 := x.Args[1]
12408 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
12409 break
12410 }
12411 v.reset(OpARM64MOVWstorezeroidx)
12412 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
12413 v0.AuxInt = int64ToAuxInt(1)
12414 v0.AddArg(idx1)
12415 v.AddArg3(ptr1, v0, mem)
12416 return true
12417 }
12418 return false
12419 }
12420 func rewriteValueARM64_OpARM64MOVHstorezeroidx(v *Value) bool {
12421 v_2 := v.Args[2]
12422 v_1 := v.Args[1]
12423 v_0 := v.Args[0]
12424
12425
12426
12427 for {
12428 ptr := v_0
12429 if v_1.Op != OpARM64MOVDconst {
12430 break
12431 }
12432 c := auxIntToInt64(v_1.AuxInt)
12433 mem := v_2
12434 if !(is32Bit(c)) {
12435 break
12436 }
12437 v.reset(OpARM64MOVHstorezero)
12438 v.AuxInt = int32ToAuxInt(int32(c))
12439 v.AddArg2(ptr, mem)
12440 return true
12441 }
12442
12443
12444
12445 for {
12446 if v_0.Op != OpARM64MOVDconst {
12447 break
12448 }
12449 c := auxIntToInt64(v_0.AuxInt)
12450 idx := v_1
12451 mem := v_2
12452 if !(is32Bit(c)) {
12453 break
12454 }
12455 v.reset(OpARM64MOVHstorezero)
12456 v.AuxInt = int32ToAuxInt(int32(c))
12457 v.AddArg2(idx, mem)
12458 return true
12459 }
12460
12461
12462 for {
12463 ptr := v_0
12464 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 1 {
12465 break
12466 }
12467 idx := v_1.Args[0]
12468 mem := v_2
12469 v.reset(OpARM64MOVHstorezeroidx2)
12470 v.AddArg3(ptr, idx, mem)
12471 return true
12472 }
12473
12474
12475 for {
12476 ptr := v_0
12477 if v_1.Op != OpARM64ADD {
12478 break
12479 }
12480 idx := v_1.Args[1]
12481 if idx != v_1.Args[0] {
12482 break
12483 }
12484 mem := v_2
12485 v.reset(OpARM64MOVHstorezeroidx2)
12486 v.AddArg3(ptr, idx, mem)
12487 return true
12488 }
12489
12490
12491 for {
12492 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 1 {
12493 break
12494 }
12495 idx := v_0.Args[0]
12496 ptr := v_1
12497 mem := v_2
12498 v.reset(OpARM64MOVHstorezeroidx2)
12499 v.AddArg3(ptr, idx, mem)
12500 return true
12501 }
12502
12503
12504 for {
12505 if v_0.Op != OpARM64ADD {
12506 break
12507 }
12508 idx := v_0.Args[1]
12509 if idx != v_0.Args[0] {
12510 break
12511 }
12512 ptr := v_1
12513 mem := v_2
12514 v.reset(OpARM64MOVHstorezeroidx2)
12515 v.AddArg3(ptr, idx, mem)
12516 return true
12517 }
12518
12519
12520
12521 for {
12522 ptr := v_0
12523 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 2 {
12524 break
12525 }
12526 idx := v_1.Args[0]
12527 x := v_2
12528 if x.Op != OpARM64MOVHstorezeroidx {
12529 break
12530 }
12531 mem := x.Args[2]
12532 if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) {
12533 break
12534 }
12535 v.reset(OpARM64MOVWstorezeroidx)
12536 v.AddArg3(ptr, idx, mem)
12537 return true
12538 }
12539 return false
12540 }
12541 func rewriteValueARM64_OpARM64MOVHstorezeroidx2(v *Value) bool {
12542 v_2 := v.Args[2]
12543 v_1 := v.Args[1]
12544 v_0 := v.Args[0]
12545
12546
12547
12548 for {
12549 ptr := v_0
12550 if v_1.Op != OpARM64MOVDconst {
12551 break
12552 }
12553 c := auxIntToInt64(v_1.AuxInt)
12554 mem := v_2
12555 if !(is32Bit(c << 1)) {
12556 break
12557 }
12558 v.reset(OpARM64MOVHstorezero)
12559 v.AuxInt = int32ToAuxInt(int32(c << 1))
12560 v.AddArg2(ptr, mem)
12561 return true
12562 }
12563 return false
12564 }
12565 func rewriteValueARM64_OpARM64MOVQstorezero(v *Value) bool {
12566 v_1 := v.Args[1]
12567 v_0 := v.Args[0]
12568 b := v.Block
12569 config := b.Func.Config
12570
12571
12572
12573 for {
12574 off1 := auxIntToInt32(v.AuxInt)
12575 sym := auxToSym(v.Aux)
12576 if v_0.Op != OpARM64ADDconst {
12577 break
12578 }
12579 off2 := auxIntToInt64(v_0.AuxInt)
12580 ptr := v_0.Args[0]
12581 mem := v_1
12582 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12583 break
12584 }
12585 v.reset(OpARM64MOVQstorezero)
12586 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
12587 v.Aux = symToAux(sym)
12588 v.AddArg2(ptr, mem)
12589 return true
12590 }
12591
12592
12593
12594 for {
12595 off1 := auxIntToInt32(v.AuxInt)
12596 sym1 := auxToSym(v.Aux)
12597 if v_0.Op != OpARM64MOVDaddr {
12598 break
12599 }
12600 off2 := auxIntToInt32(v_0.AuxInt)
12601 sym2 := auxToSym(v_0.Aux)
12602 ptr := v_0.Args[0]
12603 mem := v_1
12604 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12605 break
12606 }
12607 v.reset(OpARM64MOVQstorezero)
12608 v.AuxInt = int32ToAuxInt(off1 + off2)
12609 v.Aux = symToAux(mergeSym(sym1, sym2))
12610 v.AddArg2(ptr, mem)
12611 return true
12612 }
12613 return false
12614 }
12615 func rewriteValueARM64_OpARM64MOVWUload(v *Value) bool {
12616 v_1 := v.Args[1]
12617 v_0 := v.Args[0]
12618 b := v.Block
12619 config := b.Func.Config
12620
12621
12622 for {
12623 off := auxIntToInt32(v.AuxInt)
12624 sym := auxToSym(v.Aux)
12625 ptr := v_0
12626 if v_1.Op != OpARM64FMOVSstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
12627 break
12628 }
12629 val := v_1.Args[1]
12630 if ptr != v_1.Args[0] {
12631 break
12632 }
12633 v.reset(OpARM64FMOVSfpgp)
12634 v.AddArg(val)
12635 return true
12636 }
12637
12638
12639
12640 for {
12641 off1 := auxIntToInt32(v.AuxInt)
12642 sym := auxToSym(v.Aux)
12643 if v_0.Op != OpARM64ADDconst {
12644 break
12645 }
12646 off2 := auxIntToInt64(v_0.AuxInt)
12647 ptr := v_0.Args[0]
12648 mem := v_1
12649 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12650 break
12651 }
12652 v.reset(OpARM64MOVWUload)
12653 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
12654 v.Aux = symToAux(sym)
12655 v.AddArg2(ptr, mem)
12656 return true
12657 }
12658
12659
12660
12661 for {
12662 off := auxIntToInt32(v.AuxInt)
12663 sym := auxToSym(v.Aux)
12664 if v_0.Op != OpARM64ADD {
12665 break
12666 }
12667 idx := v_0.Args[1]
12668 ptr := v_0.Args[0]
12669 mem := v_1
12670 if !(off == 0 && sym == nil) {
12671 break
12672 }
12673 v.reset(OpARM64MOVWUloadidx)
12674 v.AddArg3(ptr, idx, mem)
12675 return true
12676 }
12677
12678
12679
12680 for {
12681 off := auxIntToInt32(v.AuxInt)
12682 sym := auxToSym(v.Aux)
12683 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
12684 break
12685 }
12686 idx := v_0.Args[1]
12687 ptr := v_0.Args[0]
12688 mem := v_1
12689 if !(off == 0 && sym == nil) {
12690 break
12691 }
12692 v.reset(OpARM64MOVWUloadidx4)
12693 v.AddArg3(ptr, idx, mem)
12694 return true
12695 }
12696
12697
12698
12699 for {
12700 off1 := auxIntToInt32(v.AuxInt)
12701 sym1 := auxToSym(v.Aux)
12702 if v_0.Op != OpARM64MOVDaddr {
12703 break
12704 }
12705 off2 := auxIntToInt32(v_0.AuxInt)
12706 sym2 := auxToSym(v_0.Aux)
12707 ptr := v_0.Args[0]
12708 mem := v_1
12709 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12710 break
12711 }
12712 v.reset(OpARM64MOVWUload)
12713 v.AuxInt = int32ToAuxInt(off1 + off2)
12714 v.Aux = symToAux(mergeSym(sym1, sym2))
12715 v.AddArg2(ptr, mem)
12716 return true
12717 }
12718
12719
12720
12721 for {
12722 off := auxIntToInt32(v.AuxInt)
12723 sym := auxToSym(v.Aux)
12724 ptr := v_0
12725 if v_1.Op != OpARM64MOVWstorezero {
12726 break
12727 }
12728 off2 := auxIntToInt32(v_1.AuxInt)
12729 sym2 := auxToSym(v_1.Aux)
12730 ptr2 := v_1.Args[0]
12731 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
12732 break
12733 }
12734 v.reset(OpARM64MOVDconst)
12735 v.AuxInt = int64ToAuxInt(0)
12736 return true
12737 }
12738
12739
12740
12741 for {
12742 off := auxIntToInt32(v.AuxInt)
12743 sym := auxToSym(v.Aux)
12744 if v_0.Op != OpSB || !(symIsRO(sym)) {
12745 break
12746 }
12747 v.reset(OpARM64MOVDconst)
12748 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
12749 return true
12750 }
12751 return false
12752 }
12753 func rewriteValueARM64_OpARM64MOVWUloadidx(v *Value) bool {
12754 v_2 := v.Args[2]
12755 v_1 := v.Args[1]
12756 v_0 := v.Args[0]
12757
12758
12759
12760 for {
12761 ptr := v_0
12762 if v_1.Op != OpARM64MOVDconst {
12763 break
12764 }
12765 c := auxIntToInt64(v_1.AuxInt)
12766 mem := v_2
12767 if !(is32Bit(c)) {
12768 break
12769 }
12770 v.reset(OpARM64MOVWUload)
12771 v.AuxInt = int32ToAuxInt(int32(c))
12772 v.AddArg2(ptr, mem)
12773 return true
12774 }
12775
12776
12777
12778 for {
12779 if v_0.Op != OpARM64MOVDconst {
12780 break
12781 }
12782 c := auxIntToInt64(v_0.AuxInt)
12783 ptr := v_1
12784 mem := v_2
12785 if !(is32Bit(c)) {
12786 break
12787 }
12788 v.reset(OpARM64MOVWUload)
12789 v.AuxInt = int32ToAuxInt(int32(c))
12790 v.AddArg2(ptr, mem)
12791 return true
12792 }
12793
12794
12795 for {
12796 ptr := v_0
12797 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
12798 break
12799 }
12800 idx := v_1.Args[0]
12801 mem := v_2
12802 v.reset(OpARM64MOVWUloadidx4)
12803 v.AddArg3(ptr, idx, mem)
12804 return true
12805 }
12806
12807
12808 for {
12809 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
12810 break
12811 }
12812 idx := v_0.Args[0]
12813 ptr := v_1
12814 mem := v_2
12815 v.reset(OpARM64MOVWUloadidx4)
12816 v.AddArg3(ptr, idx, mem)
12817 return true
12818 }
12819
12820
12821
12822 for {
12823 ptr := v_0
12824 idx := v_1
12825 if v_2.Op != OpARM64MOVWstorezeroidx {
12826 break
12827 }
12828 idx2 := v_2.Args[1]
12829 ptr2 := v_2.Args[0]
12830 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
12831 break
12832 }
12833 v.reset(OpARM64MOVDconst)
12834 v.AuxInt = int64ToAuxInt(0)
12835 return true
12836 }
12837 return false
12838 }
12839 func rewriteValueARM64_OpARM64MOVWUloadidx4(v *Value) bool {
12840 v_2 := v.Args[2]
12841 v_1 := v.Args[1]
12842 v_0 := v.Args[0]
12843
12844
12845
12846 for {
12847 ptr := v_0
12848 if v_1.Op != OpARM64MOVDconst {
12849 break
12850 }
12851 c := auxIntToInt64(v_1.AuxInt)
12852 mem := v_2
12853 if !(is32Bit(c << 2)) {
12854 break
12855 }
12856 v.reset(OpARM64MOVWUload)
12857 v.AuxInt = int32ToAuxInt(int32(c) << 2)
12858 v.AddArg2(ptr, mem)
12859 return true
12860 }
12861
12862
12863
12864 for {
12865 ptr := v_0
12866 idx := v_1
12867 if v_2.Op != OpARM64MOVWstorezeroidx4 {
12868 break
12869 }
12870 idx2 := v_2.Args[1]
12871 ptr2 := v_2.Args[0]
12872 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
12873 break
12874 }
12875 v.reset(OpARM64MOVDconst)
12876 v.AuxInt = int64ToAuxInt(0)
12877 return true
12878 }
12879 return false
12880 }
12881 func rewriteValueARM64_OpARM64MOVWUreg(v *Value) bool {
12882 v_0 := v.Args[0]
12883
12884
12885 for {
12886 x := v_0
12887 if x.Op != OpARM64MOVBUload {
12888 break
12889 }
12890 v.reset(OpARM64MOVDreg)
12891 v.AddArg(x)
12892 return true
12893 }
12894
12895
12896 for {
12897 x := v_0
12898 if x.Op != OpARM64MOVHUload {
12899 break
12900 }
12901 v.reset(OpARM64MOVDreg)
12902 v.AddArg(x)
12903 return true
12904 }
12905
12906
12907 for {
12908 x := v_0
12909 if x.Op != OpARM64MOVWUload {
12910 break
12911 }
12912 v.reset(OpARM64MOVDreg)
12913 v.AddArg(x)
12914 return true
12915 }
12916
12917
12918 for {
12919 x := v_0
12920 if x.Op != OpARM64MOVBUloadidx {
12921 break
12922 }
12923 v.reset(OpARM64MOVDreg)
12924 v.AddArg(x)
12925 return true
12926 }
12927
12928
12929 for {
12930 x := v_0
12931 if x.Op != OpARM64MOVHUloadidx {
12932 break
12933 }
12934 v.reset(OpARM64MOVDreg)
12935 v.AddArg(x)
12936 return true
12937 }
12938
12939
12940 for {
12941 x := v_0
12942 if x.Op != OpARM64MOVWUloadidx {
12943 break
12944 }
12945 v.reset(OpARM64MOVDreg)
12946 v.AddArg(x)
12947 return true
12948 }
12949
12950
12951 for {
12952 x := v_0
12953 if x.Op != OpARM64MOVHUloadidx2 {
12954 break
12955 }
12956 v.reset(OpARM64MOVDreg)
12957 v.AddArg(x)
12958 return true
12959 }
12960
12961
12962 for {
12963 x := v_0
12964 if x.Op != OpARM64MOVWUloadidx4 {
12965 break
12966 }
12967 v.reset(OpARM64MOVDreg)
12968 v.AddArg(x)
12969 return true
12970 }
12971
12972
12973 for {
12974 x := v_0
12975 if x.Op != OpARM64MOVBUreg {
12976 break
12977 }
12978 v.reset(OpARM64MOVDreg)
12979 v.AddArg(x)
12980 return true
12981 }
12982
12983
12984 for {
12985 x := v_0
12986 if x.Op != OpARM64MOVHUreg {
12987 break
12988 }
12989 v.reset(OpARM64MOVDreg)
12990 v.AddArg(x)
12991 return true
12992 }
12993
12994
12995 for {
12996 x := v_0
12997 if x.Op != OpARM64MOVWUreg {
12998 break
12999 }
13000 v.reset(OpARM64MOVDreg)
13001 v.AddArg(x)
13002 return true
13003 }
13004
13005
13006 for {
13007 if v_0.Op != OpARM64ANDconst {
13008 break
13009 }
13010 c := auxIntToInt64(v_0.AuxInt)
13011 x := v_0.Args[0]
13012 v.reset(OpARM64ANDconst)
13013 v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
13014 v.AddArg(x)
13015 return true
13016 }
13017
13018
13019 for {
13020 if v_0.Op != OpARM64MOVDconst {
13021 break
13022 }
13023 c := auxIntToInt64(v_0.AuxInt)
13024 v.reset(OpARM64MOVDconst)
13025 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
13026 return true
13027 }
13028
13029
13030
13031 for {
13032 if v_0.Op != OpARM64SLLconst {
13033 break
13034 }
13035 lc := auxIntToInt64(v_0.AuxInt)
13036 if !(lc >= 32) {
13037 break
13038 }
13039 v.reset(OpARM64MOVDconst)
13040 v.AuxInt = int64ToAuxInt(0)
13041 return true
13042 }
13043
13044
13045
13046 for {
13047 if v_0.Op != OpARM64SLLconst {
13048 break
13049 }
13050 lc := auxIntToInt64(v_0.AuxInt)
13051 x := v_0.Args[0]
13052 if !(lc < 32) {
13053 break
13054 }
13055 v.reset(OpARM64UBFIZ)
13056 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc))
13057 v.AddArg(x)
13058 return true
13059 }
13060
13061
13062
13063 for {
13064 if v_0.Op != OpARM64SRLconst {
13065 break
13066 }
13067 rc := auxIntToInt64(v_0.AuxInt)
13068 x := v_0.Args[0]
13069 if !(rc < 32) {
13070 break
13071 }
13072 v.reset(OpARM64UBFX)
13073 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32))
13074 v.AddArg(x)
13075 return true
13076 }
13077
13078
13079
13080 for {
13081 if v_0.Op != OpARM64UBFX {
13082 break
13083 }
13084 bfc := auxIntToArm64BitField(v_0.AuxInt)
13085 x := v_0.Args[0]
13086 if !(bfc.getARM64BFwidth() <= 32) {
13087 break
13088 }
13089 v.reset(OpARM64UBFX)
13090 v.AuxInt = arm64BitFieldToAuxInt(bfc)
13091 v.AddArg(x)
13092 return true
13093 }
13094 return false
13095 }
13096 func rewriteValueARM64_OpARM64MOVWload(v *Value) bool {
13097 v_1 := v.Args[1]
13098 v_0 := v.Args[0]
13099 b := v.Block
13100 config := b.Func.Config
13101
13102
13103
13104 for {
13105 off1 := auxIntToInt32(v.AuxInt)
13106 sym := auxToSym(v.Aux)
13107 if v_0.Op != OpARM64ADDconst {
13108 break
13109 }
13110 off2 := auxIntToInt64(v_0.AuxInt)
13111 ptr := v_0.Args[0]
13112 mem := v_1
13113 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13114 break
13115 }
13116 v.reset(OpARM64MOVWload)
13117 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
13118 v.Aux = symToAux(sym)
13119 v.AddArg2(ptr, mem)
13120 return true
13121 }
13122
13123
13124
13125 for {
13126 off := auxIntToInt32(v.AuxInt)
13127 sym := auxToSym(v.Aux)
13128 if v_0.Op != OpARM64ADD {
13129 break
13130 }
13131 idx := v_0.Args[1]
13132 ptr := v_0.Args[0]
13133 mem := v_1
13134 if !(off == 0 && sym == nil) {
13135 break
13136 }
13137 v.reset(OpARM64MOVWloadidx)
13138 v.AddArg3(ptr, idx, mem)
13139 return true
13140 }
13141
13142
13143
13144 for {
13145 off := auxIntToInt32(v.AuxInt)
13146 sym := auxToSym(v.Aux)
13147 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
13148 break
13149 }
13150 idx := v_0.Args[1]
13151 ptr := v_0.Args[0]
13152 mem := v_1
13153 if !(off == 0 && sym == nil) {
13154 break
13155 }
13156 v.reset(OpARM64MOVWloadidx4)
13157 v.AddArg3(ptr, idx, mem)
13158 return true
13159 }
13160
13161
13162
13163 for {
13164 off1 := auxIntToInt32(v.AuxInt)
13165 sym1 := auxToSym(v.Aux)
13166 if v_0.Op != OpARM64MOVDaddr {
13167 break
13168 }
13169 off2 := auxIntToInt32(v_0.AuxInt)
13170 sym2 := auxToSym(v_0.Aux)
13171 ptr := v_0.Args[0]
13172 mem := v_1
13173 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13174 break
13175 }
13176 v.reset(OpARM64MOVWload)
13177 v.AuxInt = int32ToAuxInt(off1 + off2)
13178 v.Aux = symToAux(mergeSym(sym1, sym2))
13179 v.AddArg2(ptr, mem)
13180 return true
13181 }
13182
13183
13184
13185 for {
13186 off := auxIntToInt32(v.AuxInt)
13187 sym := auxToSym(v.Aux)
13188 ptr := v_0
13189 if v_1.Op != OpARM64MOVWstorezero {
13190 break
13191 }
13192 off2 := auxIntToInt32(v_1.AuxInt)
13193 sym2 := auxToSym(v_1.Aux)
13194 ptr2 := v_1.Args[0]
13195 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
13196 break
13197 }
13198 v.reset(OpARM64MOVDconst)
13199 v.AuxInt = int64ToAuxInt(0)
13200 return true
13201 }
13202 return false
13203 }
13204 func rewriteValueARM64_OpARM64MOVWloadidx(v *Value) bool {
13205 v_2 := v.Args[2]
13206 v_1 := v.Args[1]
13207 v_0 := v.Args[0]
13208
13209
13210
13211 for {
13212 ptr := v_0
13213 if v_1.Op != OpARM64MOVDconst {
13214 break
13215 }
13216 c := auxIntToInt64(v_1.AuxInt)
13217 mem := v_2
13218 if !(is32Bit(c)) {
13219 break
13220 }
13221 v.reset(OpARM64MOVWload)
13222 v.AuxInt = int32ToAuxInt(int32(c))
13223 v.AddArg2(ptr, mem)
13224 return true
13225 }
13226
13227
13228
13229 for {
13230 if v_0.Op != OpARM64MOVDconst {
13231 break
13232 }
13233 c := auxIntToInt64(v_0.AuxInt)
13234 ptr := v_1
13235 mem := v_2
13236 if !(is32Bit(c)) {
13237 break
13238 }
13239 v.reset(OpARM64MOVWload)
13240 v.AuxInt = int32ToAuxInt(int32(c))
13241 v.AddArg2(ptr, mem)
13242 return true
13243 }
13244
13245
13246 for {
13247 ptr := v_0
13248 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
13249 break
13250 }
13251 idx := v_1.Args[0]
13252 mem := v_2
13253 v.reset(OpARM64MOVWloadidx4)
13254 v.AddArg3(ptr, idx, mem)
13255 return true
13256 }
13257
13258
13259 for {
13260 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
13261 break
13262 }
13263 idx := v_0.Args[0]
13264 ptr := v_1
13265 mem := v_2
13266 v.reset(OpARM64MOVWloadidx4)
13267 v.AddArg3(ptr, idx, mem)
13268 return true
13269 }
13270
13271
13272
13273 for {
13274 ptr := v_0
13275 idx := v_1
13276 if v_2.Op != OpARM64MOVWstorezeroidx {
13277 break
13278 }
13279 idx2 := v_2.Args[1]
13280 ptr2 := v_2.Args[0]
13281 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
13282 break
13283 }
13284 v.reset(OpARM64MOVDconst)
13285 v.AuxInt = int64ToAuxInt(0)
13286 return true
13287 }
13288 return false
13289 }
13290 func rewriteValueARM64_OpARM64MOVWloadidx4(v *Value) bool {
13291 v_2 := v.Args[2]
13292 v_1 := v.Args[1]
13293 v_0 := v.Args[0]
13294
13295
13296
13297 for {
13298 ptr := v_0
13299 if v_1.Op != OpARM64MOVDconst {
13300 break
13301 }
13302 c := auxIntToInt64(v_1.AuxInt)
13303 mem := v_2
13304 if !(is32Bit(c << 2)) {
13305 break
13306 }
13307 v.reset(OpARM64MOVWload)
13308 v.AuxInt = int32ToAuxInt(int32(c) << 2)
13309 v.AddArg2(ptr, mem)
13310 return true
13311 }
13312
13313
13314
13315 for {
13316 ptr := v_0
13317 idx := v_1
13318 if v_2.Op != OpARM64MOVWstorezeroidx4 {
13319 break
13320 }
13321 idx2 := v_2.Args[1]
13322 ptr2 := v_2.Args[0]
13323 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
13324 break
13325 }
13326 v.reset(OpARM64MOVDconst)
13327 v.AuxInt = int64ToAuxInt(0)
13328 return true
13329 }
13330 return false
13331 }
13332 func rewriteValueARM64_OpARM64MOVWreg(v *Value) bool {
13333 v_0 := v.Args[0]
13334
13335
13336 for {
13337 x := v_0
13338 if x.Op != OpARM64MOVBload {
13339 break
13340 }
13341 v.reset(OpARM64MOVDreg)
13342 v.AddArg(x)
13343 return true
13344 }
13345
13346
13347 for {
13348 x := v_0
13349 if x.Op != OpARM64MOVBUload {
13350 break
13351 }
13352 v.reset(OpARM64MOVDreg)
13353 v.AddArg(x)
13354 return true
13355 }
13356
13357
13358 for {
13359 x := v_0
13360 if x.Op != OpARM64MOVHload {
13361 break
13362 }
13363 v.reset(OpARM64MOVDreg)
13364 v.AddArg(x)
13365 return true
13366 }
13367
13368
13369 for {
13370 x := v_0
13371 if x.Op != OpARM64MOVHUload {
13372 break
13373 }
13374 v.reset(OpARM64MOVDreg)
13375 v.AddArg(x)
13376 return true
13377 }
13378
13379
13380 for {
13381 x := v_0
13382 if x.Op != OpARM64MOVWload {
13383 break
13384 }
13385 v.reset(OpARM64MOVDreg)
13386 v.AddArg(x)
13387 return true
13388 }
13389
13390
13391 for {
13392 x := v_0
13393 if x.Op != OpARM64MOVBloadidx {
13394 break
13395 }
13396 v.reset(OpARM64MOVDreg)
13397 v.AddArg(x)
13398 return true
13399 }
13400
13401
13402 for {
13403 x := v_0
13404 if x.Op != OpARM64MOVBUloadidx {
13405 break
13406 }
13407 v.reset(OpARM64MOVDreg)
13408 v.AddArg(x)
13409 return true
13410 }
13411
13412
13413 for {
13414 x := v_0
13415 if x.Op != OpARM64MOVHloadidx {
13416 break
13417 }
13418 v.reset(OpARM64MOVDreg)
13419 v.AddArg(x)
13420 return true
13421 }
13422
13423
13424 for {
13425 x := v_0
13426 if x.Op != OpARM64MOVHUloadidx {
13427 break
13428 }
13429 v.reset(OpARM64MOVDreg)
13430 v.AddArg(x)
13431 return true
13432 }
13433
13434
13435 for {
13436 x := v_0
13437 if x.Op != OpARM64MOVWloadidx {
13438 break
13439 }
13440 v.reset(OpARM64MOVDreg)
13441 v.AddArg(x)
13442 return true
13443 }
13444
13445
13446 for {
13447 x := v_0
13448 if x.Op != OpARM64MOVHloadidx2 {
13449 break
13450 }
13451 v.reset(OpARM64MOVDreg)
13452 v.AddArg(x)
13453 return true
13454 }
13455
13456
13457 for {
13458 x := v_0
13459 if x.Op != OpARM64MOVHUloadidx2 {
13460 break
13461 }
13462 v.reset(OpARM64MOVDreg)
13463 v.AddArg(x)
13464 return true
13465 }
13466
13467
13468 for {
13469 x := v_0
13470 if x.Op != OpARM64MOVWloadidx4 {
13471 break
13472 }
13473 v.reset(OpARM64MOVDreg)
13474 v.AddArg(x)
13475 return true
13476 }
13477
13478
13479 for {
13480 x := v_0
13481 if x.Op != OpARM64MOVBreg {
13482 break
13483 }
13484 v.reset(OpARM64MOVDreg)
13485 v.AddArg(x)
13486 return true
13487 }
13488
13489
13490 for {
13491 x := v_0
13492 if x.Op != OpARM64MOVBUreg {
13493 break
13494 }
13495 v.reset(OpARM64MOVDreg)
13496 v.AddArg(x)
13497 return true
13498 }
13499
13500
13501 for {
13502 x := v_0
13503 if x.Op != OpARM64MOVHreg {
13504 break
13505 }
13506 v.reset(OpARM64MOVDreg)
13507 v.AddArg(x)
13508 return true
13509 }
13510
13511
13512 for {
13513 x := v_0
13514 if x.Op != OpARM64MOVWreg {
13515 break
13516 }
13517 v.reset(OpARM64MOVDreg)
13518 v.AddArg(x)
13519 return true
13520 }
13521
13522
13523 for {
13524 if v_0.Op != OpARM64MOVDconst {
13525 break
13526 }
13527 c := auxIntToInt64(v_0.AuxInt)
13528 v.reset(OpARM64MOVDconst)
13529 v.AuxInt = int64ToAuxInt(int64(int32(c)))
13530 return true
13531 }
13532
13533
13534
13535 for {
13536 if v_0.Op != OpARM64SLLconst {
13537 break
13538 }
13539 lc := auxIntToInt64(v_0.AuxInt)
13540 x := v_0.Args[0]
13541 if !(lc < 32) {
13542 break
13543 }
13544 v.reset(OpARM64SBFIZ)
13545 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, 32-lc))
13546 v.AddArg(x)
13547 return true
13548 }
13549
13550
13551
13552 for {
13553 if v_0.Op != OpARM64SBFX {
13554 break
13555 }
13556 bfc := auxIntToArm64BitField(v_0.AuxInt)
13557 x := v_0.Args[0]
13558 if !(bfc.getARM64BFwidth() <= 32) {
13559 break
13560 }
13561 v.reset(OpARM64SBFX)
13562 v.AuxInt = arm64BitFieldToAuxInt(bfc)
13563 v.AddArg(x)
13564 return true
13565 }
13566 return false
13567 }
13568 func rewriteValueARM64_OpARM64MOVWstore(v *Value) bool {
13569 v_2 := v.Args[2]
13570 v_1 := v.Args[1]
13571 v_0 := v.Args[0]
13572 b := v.Block
13573 config := b.Func.Config
13574
13575
13576 for {
13577 off := auxIntToInt32(v.AuxInt)
13578 sym := auxToSym(v.Aux)
13579 ptr := v_0
13580 if v_1.Op != OpARM64FMOVSfpgp {
13581 break
13582 }
13583 val := v_1.Args[0]
13584 mem := v_2
13585 v.reset(OpARM64FMOVSstore)
13586 v.AuxInt = int32ToAuxInt(off)
13587 v.Aux = symToAux(sym)
13588 v.AddArg3(ptr, val, mem)
13589 return true
13590 }
13591
13592
13593
13594 for {
13595 off1 := auxIntToInt32(v.AuxInt)
13596 sym := auxToSym(v.Aux)
13597 if v_0.Op != OpARM64ADDconst {
13598 break
13599 }
13600 off2 := auxIntToInt64(v_0.AuxInt)
13601 ptr := v_0.Args[0]
13602 val := v_1
13603 mem := v_2
13604 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13605 break
13606 }
13607 v.reset(OpARM64MOVWstore)
13608 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
13609 v.Aux = symToAux(sym)
13610 v.AddArg3(ptr, val, mem)
13611 return true
13612 }
13613
13614
13615
13616 for {
13617 off := auxIntToInt32(v.AuxInt)
13618 sym := auxToSym(v.Aux)
13619 if v_0.Op != OpARM64ADD {
13620 break
13621 }
13622 idx := v_0.Args[1]
13623 ptr := v_0.Args[0]
13624 val := v_1
13625 mem := v_2
13626 if !(off == 0 && sym == nil) {
13627 break
13628 }
13629 v.reset(OpARM64MOVWstoreidx)
13630 v.AddArg4(ptr, idx, val, mem)
13631 return true
13632 }
13633
13634
13635
13636 for {
13637 off := auxIntToInt32(v.AuxInt)
13638 sym := auxToSym(v.Aux)
13639 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
13640 break
13641 }
13642 idx := v_0.Args[1]
13643 ptr := v_0.Args[0]
13644 val := v_1
13645 mem := v_2
13646 if !(off == 0 && sym == nil) {
13647 break
13648 }
13649 v.reset(OpARM64MOVWstoreidx4)
13650 v.AddArg4(ptr, idx, val, mem)
13651 return true
13652 }
13653
13654
13655
13656 for {
13657 off1 := auxIntToInt32(v.AuxInt)
13658 sym1 := auxToSym(v.Aux)
13659 if v_0.Op != OpARM64MOVDaddr {
13660 break
13661 }
13662 off2 := auxIntToInt32(v_0.AuxInt)
13663 sym2 := auxToSym(v_0.Aux)
13664 ptr := v_0.Args[0]
13665 val := v_1
13666 mem := v_2
13667 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13668 break
13669 }
13670 v.reset(OpARM64MOVWstore)
13671 v.AuxInt = int32ToAuxInt(off1 + off2)
13672 v.Aux = symToAux(mergeSym(sym1, sym2))
13673 v.AddArg3(ptr, val, mem)
13674 return true
13675 }
13676
13677
13678 for {
13679 off := auxIntToInt32(v.AuxInt)
13680 sym := auxToSym(v.Aux)
13681 ptr := v_0
13682 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
13683 break
13684 }
13685 mem := v_2
13686 v.reset(OpARM64MOVWstorezero)
13687 v.AuxInt = int32ToAuxInt(off)
13688 v.Aux = symToAux(sym)
13689 v.AddArg2(ptr, mem)
13690 return true
13691 }
13692
13693
13694 for {
13695 off := auxIntToInt32(v.AuxInt)
13696 sym := auxToSym(v.Aux)
13697 ptr := v_0
13698 if v_1.Op != OpARM64MOVWreg {
13699 break
13700 }
13701 x := v_1.Args[0]
13702 mem := v_2
13703 v.reset(OpARM64MOVWstore)
13704 v.AuxInt = int32ToAuxInt(off)
13705 v.Aux = symToAux(sym)
13706 v.AddArg3(ptr, x, mem)
13707 return true
13708 }
13709
13710
13711 for {
13712 off := auxIntToInt32(v.AuxInt)
13713 sym := auxToSym(v.Aux)
13714 ptr := v_0
13715 if v_1.Op != OpARM64MOVWUreg {
13716 break
13717 }
13718 x := v_1.Args[0]
13719 mem := v_2
13720 v.reset(OpARM64MOVWstore)
13721 v.AuxInt = int32ToAuxInt(off)
13722 v.Aux = symToAux(sym)
13723 v.AddArg3(ptr, x, mem)
13724 return true
13725 }
13726
13727
13728
13729 for {
13730 i := auxIntToInt32(v.AuxInt)
13731 s := auxToSym(v.Aux)
13732 ptr0 := v_0
13733 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 32 {
13734 break
13735 }
13736 w := v_1.Args[0]
13737 x := v_2
13738 if x.Op != OpARM64MOVWstore || auxIntToInt32(x.AuxInt) != i-4 || auxToSym(x.Aux) != s {
13739 break
13740 }
13741 mem := x.Args[2]
13742 ptr1 := x.Args[0]
13743 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
13744 break
13745 }
13746 v.reset(OpARM64MOVDstore)
13747 v.AuxInt = int32ToAuxInt(i - 4)
13748 v.Aux = symToAux(s)
13749 v.AddArg3(ptr0, w, mem)
13750 return true
13751 }
13752
13753
13754
13755 for {
13756 if auxIntToInt32(v.AuxInt) != 4 {
13757 break
13758 }
13759 s := auxToSym(v.Aux)
13760 if v_0.Op != OpARM64ADD {
13761 break
13762 }
13763 _ = v_0.Args[1]
13764 v_0_0 := v_0.Args[0]
13765 v_0_1 := v_0.Args[1]
13766 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
13767 ptr0 := v_0_0
13768 idx0 := v_0_1
13769 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 32 {
13770 continue
13771 }
13772 w := v_1.Args[0]
13773 x := v_2
13774 if x.Op != OpARM64MOVWstoreidx {
13775 continue
13776 }
13777 mem := x.Args[3]
13778 ptr1 := x.Args[0]
13779 idx1 := x.Args[1]
13780 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
13781 continue
13782 }
13783 v.reset(OpARM64MOVDstoreidx)
13784 v.AddArg4(ptr1, idx1, w, mem)
13785 return true
13786 }
13787 break
13788 }
13789
13790
13791
13792 for {
13793 if auxIntToInt32(v.AuxInt) != 4 {
13794 break
13795 }
13796 s := auxToSym(v.Aux)
13797 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
13798 break
13799 }
13800 idx0 := v_0.Args[1]
13801 ptr0 := v_0.Args[0]
13802 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 32 {
13803 break
13804 }
13805 w := v_1.Args[0]
13806 x := v_2
13807 if x.Op != OpARM64MOVWstoreidx4 {
13808 break
13809 }
13810 mem := x.Args[3]
13811 ptr1 := x.Args[0]
13812 idx1 := x.Args[1]
13813 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
13814 break
13815 }
13816 v.reset(OpARM64MOVDstoreidx)
13817 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
13818 v0.AuxInt = int64ToAuxInt(2)
13819 v0.AddArg(idx1)
13820 v.AddArg4(ptr1, v0, w, mem)
13821 return true
13822 }
13823
13824
13825
13826 for {
13827 i := auxIntToInt32(v.AuxInt)
13828 s := auxToSym(v.Aux)
13829 ptr0 := v_0
13830 if v_1.Op != OpARM64SRLconst {
13831 break
13832 }
13833 j := auxIntToInt64(v_1.AuxInt)
13834 w := v_1.Args[0]
13835 x := v_2
13836 if x.Op != OpARM64MOVWstore || auxIntToInt32(x.AuxInt) != i-4 || auxToSym(x.Aux) != s {
13837 break
13838 }
13839 mem := x.Args[2]
13840 ptr1 := x.Args[0]
13841 w0 := x.Args[1]
13842 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-32 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
13843 break
13844 }
13845 v.reset(OpARM64MOVDstore)
13846 v.AuxInt = int32ToAuxInt(i - 4)
13847 v.Aux = symToAux(s)
13848 v.AddArg3(ptr0, w0, mem)
13849 return true
13850 }
13851
13852
13853
13854 for {
13855 if auxIntToInt32(v.AuxInt) != 4 {
13856 break
13857 }
13858 s := auxToSym(v.Aux)
13859 if v_0.Op != OpARM64ADD {
13860 break
13861 }
13862 _ = v_0.Args[1]
13863 v_0_0 := v_0.Args[0]
13864 v_0_1 := v_0.Args[1]
13865 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
13866 ptr0 := v_0_0
13867 idx0 := v_0_1
13868 if v_1.Op != OpARM64SRLconst {
13869 continue
13870 }
13871 j := auxIntToInt64(v_1.AuxInt)
13872 w := v_1.Args[0]
13873 x := v_2
13874 if x.Op != OpARM64MOVWstoreidx {
13875 continue
13876 }
13877 mem := x.Args[3]
13878 ptr1 := x.Args[0]
13879 idx1 := x.Args[1]
13880 w0 := x.Args[2]
13881 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-32 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
13882 continue
13883 }
13884 v.reset(OpARM64MOVDstoreidx)
13885 v.AddArg4(ptr1, idx1, w0, mem)
13886 return true
13887 }
13888 break
13889 }
13890
13891
13892
13893 for {
13894 if auxIntToInt32(v.AuxInt) != 4 {
13895 break
13896 }
13897 s := auxToSym(v.Aux)
13898 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
13899 break
13900 }
13901 idx0 := v_0.Args[1]
13902 ptr0 := v_0.Args[0]
13903 if v_1.Op != OpARM64SRLconst {
13904 break
13905 }
13906 j := auxIntToInt64(v_1.AuxInt)
13907 w := v_1.Args[0]
13908 x := v_2
13909 if x.Op != OpARM64MOVWstoreidx4 {
13910 break
13911 }
13912 mem := x.Args[3]
13913 ptr1 := x.Args[0]
13914 idx1 := x.Args[1]
13915 w0 := x.Args[2]
13916 if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-32 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
13917 break
13918 }
13919 v.reset(OpARM64MOVDstoreidx)
13920 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
13921 v0.AuxInt = int64ToAuxInt(2)
13922 v0.AddArg(idx1)
13923 v.AddArg4(ptr1, v0, w0, mem)
13924 return true
13925 }
13926 return false
13927 }
13928 func rewriteValueARM64_OpARM64MOVWstoreidx(v *Value) bool {
13929 v_3 := v.Args[3]
13930 v_2 := v.Args[2]
13931 v_1 := v.Args[1]
13932 v_0 := v.Args[0]
13933
13934
13935
13936 for {
13937 ptr := v_0
13938 if v_1.Op != OpARM64MOVDconst {
13939 break
13940 }
13941 c := auxIntToInt64(v_1.AuxInt)
13942 val := v_2
13943 mem := v_3
13944 if !(is32Bit(c)) {
13945 break
13946 }
13947 v.reset(OpARM64MOVWstore)
13948 v.AuxInt = int32ToAuxInt(int32(c))
13949 v.AddArg3(ptr, val, mem)
13950 return true
13951 }
13952
13953
13954
13955 for {
13956 if v_0.Op != OpARM64MOVDconst {
13957 break
13958 }
13959 c := auxIntToInt64(v_0.AuxInt)
13960 idx := v_1
13961 val := v_2
13962 mem := v_3
13963 if !(is32Bit(c)) {
13964 break
13965 }
13966 v.reset(OpARM64MOVWstore)
13967 v.AuxInt = int32ToAuxInt(int32(c))
13968 v.AddArg3(idx, val, mem)
13969 return true
13970 }
13971
13972
13973 for {
13974 ptr := v_0
13975 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
13976 break
13977 }
13978 idx := v_1.Args[0]
13979 val := v_2
13980 mem := v_3
13981 v.reset(OpARM64MOVWstoreidx4)
13982 v.AddArg4(ptr, idx, val, mem)
13983 return true
13984 }
13985
13986
13987 for {
13988 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
13989 break
13990 }
13991 idx := v_0.Args[0]
13992 ptr := v_1
13993 val := v_2
13994 mem := v_3
13995 v.reset(OpARM64MOVWstoreidx4)
13996 v.AddArg4(ptr, idx, val, mem)
13997 return true
13998 }
13999
14000
14001 for {
14002 ptr := v_0
14003 idx := v_1
14004 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
14005 break
14006 }
14007 mem := v_3
14008 v.reset(OpARM64MOVWstorezeroidx)
14009 v.AddArg3(ptr, idx, mem)
14010 return true
14011 }
14012
14013
14014 for {
14015 ptr := v_0
14016 idx := v_1
14017 if v_2.Op != OpARM64MOVWreg {
14018 break
14019 }
14020 x := v_2.Args[0]
14021 mem := v_3
14022 v.reset(OpARM64MOVWstoreidx)
14023 v.AddArg4(ptr, idx, x, mem)
14024 return true
14025 }
14026
14027
14028 for {
14029 ptr := v_0
14030 idx := v_1
14031 if v_2.Op != OpARM64MOVWUreg {
14032 break
14033 }
14034 x := v_2.Args[0]
14035 mem := v_3
14036 v.reset(OpARM64MOVWstoreidx)
14037 v.AddArg4(ptr, idx, x, mem)
14038 return true
14039 }
14040
14041
14042
14043 for {
14044 ptr := v_0
14045 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 4 {
14046 break
14047 }
14048 idx := v_1.Args[0]
14049 if v_2.Op != OpARM64SRLconst || auxIntToInt64(v_2.AuxInt) != 32 {
14050 break
14051 }
14052 w := v_2.Args[0]
14053 x := v_3
14054 if x.Op != OpARM64MOVWstoreidx {
14055 break
14056 }
14057 mem := x.Args[3]
14058 if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) {
14059 break
14060 }
14061 v.reset(OpARM64MOVDstoreidx)
14062 v.AddArg4(ptr, idx, w, mem)
14063 return true
14064 }
14065 return false
14066 }
14067 func rewriteValueARM64_OpARM64MOVWstoreidx4(v *Value) bool {
14068 v_3 := v.Args[3]
14069 v_2 := v.Args[2]
14070 v_1 := v.Args[1]
14071 v_0 := v.Args[0]
14072
14073
14074
14075 for {
14076 ptr := v_0
14077 if v_1.Op != OpARM64MOVDconst {
14078 break
14079 }
14080 c := auxIntToInt64(v_1.AuxInt)
14081 val := v_2
14082 mem := v_3
14083 if !(is32Bit(c << 2)) {
14084 break
14085 }
14086 v.reset(OpARM64MOVWstore)
14087 v.AuxInt = int32ToAuxInt(int32(c) << 2)
14088 v.AddArg3(ptr, val, mem)
14089 return true
14090 }
14091
14092
14093 for {
14094 ptr := v_0
14095 idx := v_1
14096 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
14097 break
14098 }
14099 mem := v_3
14100 v.reset(OpARM64MOVWstorezeroidx4)
14101 v.AddArg3(ptr, idx, mem)
14102 return true
14103 }
14104
14105
14106 for {
14107 ptr := v_0
14108 idx := v_1
14109 if v_2.Op != OpARM64MOVWreg {
14110 break
14111 }
14112 x := v_2.Args[0]
14113 mem := v_3
14114 v.reset(OpARM64MOVWstoreidx4)
14115 v.AddArg4(ptr, idx, x, mem)
14116 return true
14117 }
14118
14119
14120 for {
14121 ptr := v_0
14122 idx := v_1
14123 if v_2.Op != OpARM64MOVWUreg {
14124 break
14125 }
14126 x := v_2.Args[0]
14127 mem := v_3
14128 v.reset(OpARM64MOVWstoreidx4)
14129 v.AddArg4(ptr, idx, x, mem)
14130 return true
14131 }
14132 return false
14133 }
14134 func rewriteValueARM64_OpARM64MOVWstorezero(v *Value) bool {
14135 v_1 := v.Args[1]
14136 v_0 := v.Args[0]
14137 b := v.Block
14138 config := b.Func.Config
14139
14140
14141
14142 for {
14143 off1 := auxIntToInt32(v.AuxInt)
14144 sym := auxToSym(v.Aux)
14145 if v_0.Op != OpARM64ADDconst {
14146 break
14147 }
14148 off2 := auxIntToInt64(v_0.AuxInt)
14149 ptr := v_0.Args[0]
14150 mem := v_1
14151 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
14152 break
14153 }
14154 v.reset(OpARM64MOVWstorezero)
14155 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
14156 v.Aux = symToAux(sym)
14157 v.AddArg2(ptr, mem)
14158 return true
14159 }
14160
14161
14162
14163 for {
14164 off1 := auxIntToInt32(v.AuxInt)
14165 sym1 := auxToSym(v.Aux)
14166 if v_0.Op != OpARM64MOVDaddr {
14167 break
14168 }
14169 off2 := auxIntToInt32(v_0.AuxInt)
14170 sym2 := auxToSym(v_0.Aux)
14171 ptr := v_0.Args[0]
14172 mem := v_1
14173 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
14174 break
14175 }
14176 v.reset(OpARM64MOVWstorezero)
14177 v.AuxInt = int32ToAuxInt(off1 + off2)
14178 v.Aux = symToAux(mergeSym(sym1, sym2))
14179 v.AddArg2(ptr, mem)
14180 return true
14181 }
14182
14183
14184
14185 for {
14186 off := auxIntToInt32(v.AuxInt)
14187 sym := auxToSym(v.Aux)
14188 if v_0.Op != OpARM64ADD {
14189 break
14190 }
14191 idx := v_0.Args[1]
14192 ptr := v_0.Args[0]
14193 mem := v_1
14194 if !(off == 0 && sym == nil) {
14195 break
14196 }
14197 v.reset(OpARM64MOVWstorezeroidx)
14198 v.AddArg3(ptr, idx, mem)
14199 return true
14200 }
14201
14202
14203
14204 for {
14205 off := auxIntToInt32(v.AuxInt)
14206 sym := auxToSym(v.Aux)
14207 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
14208 break
14209 }
14210 idx := v_0.Args[1]
14211 ptr := v_0.Args[0]
14212 mem := v_1
14213 if !(off == 0 && sym == nil) {
14214 break
14215 }
14216 v.reset(OpARM64MOVWstorezeroidx4)
14217 v.AddArg3(ptr, idx, mem)
14218 return true
14219 }
14220
14221
14222
14223 for {
14224 i := auxIntToInt32(v.AuxInt)
14225 s := auxToSym(v.Aux)
14226 ptr0 := v_0
14227 x := v_1
14228 if x.Op != OpARM64MOVWstorezero {
14229 break
14230 }
14231 j := auxIntToInt32(x.AuxInt)
14232 if auxToSym(x.Aux) != s {
14233 break
14234 }
14235 mem := x.Args[1]
14236 ptr1 := x.Args[0]
14237 if !(x.Uses == 1 && areAdjacentOffsets(int64(i), int64(j), 4) && isSamePtr(ptr0, ptr1) && clobber(x)) {
14238 break
14239 }
14240 v.reset(OpARM64MOVDstorezero)
14241 v.AuxInt = int32ToAuxInt(int32(min(int64(i), int64(j))))
14242 v.Aux = symToAux(s)
14243 v.AddArg2(ptr0, mem)
14244 return true
14245 }
14246
14247
14248
14249 for {
14250 if auxIntToInt32(v.AuxInt) != 4 {
14251 break
14252 }
14253 s := auxToSym(v.Aux)
14254 if v_0.Op != OpARM64ADD {
14255 break
14256 }
14257 _ = v_0.Args[1]
14258 v_0_0 := v_0.Args[0]
14259 v_0_1 := v_0.Args[1]
14260 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
14261 ptr0 := v_0_0
14262 idx0 := v_0_1
14263 x := v_1
14264 if x.Op != OpARM64MOVWstorezeroidx {
14265 continue
14266 }
14267 mem := x.Args[2]
14268 ptr1 := x.Args[0]
14269 idx1 := x.Args[1]
14270 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
14271 continue
14272 }
14273 v.reset(OpARM64MOVDstorezeroidx)
14274 v.AddArg3(ptr1, idx1, mem)
14275 return true
14276 }
14277 break
14278 }
14279
14280
14281
14282 for {
14283 if auxIntToInt32(v.AuxInt) != 4 {
14284 break
14285 }
14286 s := auxToSym(v.Aux)
14287 if v_0.Op != OpARM64ADDshiftLL || auxIntToInt64(v_0.AuxInt) != 2 {
14288 break
14289 }
14290 idx0 := v_0.Args[1]
14291 ptr0 := v_0.Args[0]
14292 x := v_1
14293 if x.Op != OpARM64MOVWstorezeroidx4 {
14294 break
14295 }
14296 mem := x.Args[2]
14297 ptr1 := x.Args[0]
14298 idx1 := x.Args[1]
14299 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
14300 break
14301 }
14302 v.reset(OpARM64MOVDstorezeroidx)
14303 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
14304 v0.AuxInt = int64ToAuxInt(2)
14305 v0.AddArg(idx1)
14306 v.AddArg3(ptr1, v0, mem)
14307 return true
14308 }
14309 return false
14310 }
14311 func rewriteValueARM64_OpARM64MOVWstorezeroidx(v *Value) bool {
14312 v_2 := v.Args[2]
14313 v_1 := v.Args[1]
14314 v_0 := v.Args[0]
14315
14316
14317
14318 for {
14319 ptr := v_0
14320 if v_1.Op != OpARM64MOVDconst {
14321 break
14322 }
14323 c := auxIntToInt64(v_1.AuxInt)
14324 mem := v_2
14325 if !(is32Bit(c)) {
14326 break
14327 }
14328 v.reset(OpARM64MOVWstorezero)
14329 v.AuxInt = int32ToAuxInt(int32(c))
14330 v.AddArg2(ptr, mem)
14331 return true
14332 }
14333
14334
14335
14336 for {
14337 if v_0.Op != OpARM64MOVDconst {
14338 break
14339 }
14340 c := auxIntToInt64(v_0.AuxInt)
14341 idx := v_1
14342 mem := v_2
14343 if !(is32Bit(c)) {
14344 break
14345 }
14346 v.reset(OpARM64MOVWstorezero)
14347 v.AuxInt = int32ToAuxInt(int32(c))
14348 v.AddArg2(idx, mem)
14349 return true
14350 }
14351
14352
14353 for {
14354 ptr := v_0
14355 if v_1.Op != OpARM64SLLconst || auxIntToInt64(v_1.AuxInt) != 2 {
14356 break
14357 }
14358 idx := v_1.Args[0]
14359 mem := v_2
14360 v.reset(OpARM64MOVWstorezeroidx4)
14361 v.AddArg3(ptr, idx, mem)
14362 return true
14363 }
14364
14365
14366 for {
14367 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 2 {
14368 break
14369 }
14370 idx := v_0.Args[0]
14371 ptr := v_1
14372 mem := v_2
14373 v.reset(OpARM64MOVWstorezeroidx4)
14374 v.AddArg3(ptr, idx, mem)
14375 return true
14376 }
14377
14378
14379
14380 for {
14381 ptr := v_0
14382 if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 4 {
14383 break
14384 }
14385 idx := v_1.Args[0]
14386 x := v_2
14387 if x.Op != OpARM64MOVWstorezeroidx {
14388 break
14389 }
14390 mem := x.Args[2]
14391 if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) {
14392 break
14393 }
14394 v.reset(OpARM64MOVDstorezeroidx)
14395 v.AddArg3(ptr, idx, mem)
14396 return true
14397 }
14398 return false
14399 }
14400 func rewriteValueARM64_OpARM64MOVWstorezeroidx4(v *Value) bool {
14401 v_2 := v.Args[2]
14402 v_1 := v.Args[1]
14403 v_0 := v.Args[0]
14404
14405
14406
14407 for {
14408 ptr := v_0
14409 if v_1.Op != OpARM64MOVDconst {
14410 break
14411 }
14412 c := auxIntToInt64(v_1.AuxInt)
14413 mem := v_2
14414 if !(is32Bit(c << 2)) {
14415 break
14416 }
14417 v.reset(OpARM64MOVWstorezero)
14418 v.AuxInt = int32ToAuxInt(int32(c << 2))
14419 v.AddArg2(ptr, mem)
14420 return true
14421 }
14422 return false
14423 }
14424 func rewriteValueARM64_OpARM64MSUB(v *Value) bool {
14425 v_2 := v.Args[2]
14426 v_1 := v.Args[1]
14427 v_0 := v.Args[0]
14428 b := v.Block
14429
14430
14431 for {
14432 a := v_0
14433 x := v_1
14434 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != -1 {
14435 break
14436 }
14437 v.reset(OpARM64ADD)
14438 v.AddArg2(a, x)
14439 return true
14440 }
14441
14442
14443 for {
14444 a := v_0
14445 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
14446 break
14447 }
14448 v.copyOf(a)
14449 return true
14450 }
14451
14452
14453 for {
14454 a := v_0
14455 x := v_1
14456 if v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 1 {
14457 break
14458 }
14459 v.reset(OpARM64SUB)
14460 v.AddArg2(a, x)
14461 return true
14462 }
14463
14464
14465
14466 for {
14467 a := v_0
14468 x := v_1
14469 if v_2.Op != OpARM64MOVDconst {
14470 break
14471 }
14472 c := auxIntToInt64(v_2.AuxInt)
14473 if !(isPowerOfTwo64(c)) {
14474 break
14475 }
14476 v.reset(OpARM64SUBshiftLL)
14477 v.AuxInt = int64ToAuxInt(log64(c))
14478 v.AddArg2(a, x)
14479 return true
14480 }
14481
14482
14483
14484 for {
14485 a := v_0
14486 x := v_1
14487 if v_2.Op != OpARM64MOVDconst {
14488 break
14489 }
14490 c := auxIntToInt64(v_2.AuxInt)
14491 if !(isPowerOfTwo64(c-1) && c >= 3) {
14492 break
14493 }
14494 v.reset(OpARM64SUB)
14495 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14496 v0.AuxInt = int64ToAuxInt(log64(c - 1))
14497 v0.AddArg2(x, x)
14498 v.AddArg2(a, v0)
14499 return true
14500 }
14501
14502
14503
14504 for {
14505 a := v_0
14506 x := v_1
14507 if v_2.Op != OpARM64MOVDconst {
14508 break
14509 }
14510 c := auxIntToInt64(v_2.AuxInt)
14511 if !(isPowerOfTwo64(c+1) && c >= 7) {
14512 break
14513 }
14514 v.reset(OpARM64ADD)
14515 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14516 v0.AuxInt = int64ToAuxInt(log64(c + 1))
14517 v0.AddArg2(x, x)
14518 v.AddArg2(a, v0)
14519 return true
14520 }
14521
14522
14523
14524 for {
14525 a := v_0
14526 x := v_1
14527 if v_2.Op != OpARM64MOVDconst {
14528 break
14529 }
14530 c := auxIntToInt64(v_2.AuxInt)
14531 if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
14532 break
14533 }
14534 v.reset(OpARM64ADDshiftLL)
14535 v.AuxInt = int64ToAuxInt(log64(c / 3))
14536 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14537 v0.AuxInt = int64ToAuxInt(2)
14538 v0.AddArg2(x, x)
14539 v.AddArg2(a, v0)
14540 return true
14541 }
14542
14543
14544
14545 for {
14546 a := v_0
14547 x := v_1
14548 if v_2.Op != OpARM64MOVDconst {
14549 break
14550 }
14551 c := auxIntToInt64(v_2.AuxInt)
14552 if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
14553 break
14554 }
14555 v.reset(OpARM64SUBshiftLL)
14556 v.AuxInt = int64ToAuxInt(log64(c / 5))
14557 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14558 v0.AuxInt = int64ToAuxInt(2)
14559 v0.AddArg2(x, x)
14560 v.AddArg2(a, v0)
14561 return true
14562 }
14563
14564
14565
14566 for {
14567 a := v_0
14568 x := v_1
14569 if v_2.Op != OpARM64MOVDconst {
14570 break
14571 }
14572 c := auxIntToInt64(v_2.AuxInt)
14573 if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
14574 break
14575 }
14576 v.reset(OpARM64ADDshiftLL)
14577 v.AuxInt = int64ToAuxInt(log64(c / 7))
14578 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14579 v0.AuxInt = int64ToAuxInt(3)
14580 v0.AddArg2(x, x)
14581 v.AddArg2(a, v0)
14582 return true
14583 }
14584
14585
14586
14587 for {
14588 a := v_0
14589 x := v_1
14590 if v_2.Op != OpARM64MOVDconst {
14591 break
14592 }
14593 c := auxIntToInt64(v_2.AuxInt)
14594 if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
14595 break
14596 }
14597 v.reset(OpARM64SUBshiftLL)
14598 v.AuxInt = int64ToAuxInt(log64(c / 9))
14599 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14600 v0.AuxInt = int64ToAuxInt(3)
14601 v0.AddArg2(x, x)
14602 v.AddArg2(a, v0)
14603 return true
14604 }
14605
14606
14607 for {
14608 a := v_0
14609 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
14610 break
14611 }
14612 x := v_2
14613 v.reset(OpARM64ADD)
14614 v.AddArg2(a, x)
14615 return true
14616 }
14617
14618
14619 for {
14620 a := v_0
14621 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
14622 break
14623 }
14624 v.copyOf(a)
14625 return true
14626 }
14627
14628
14629 for {
14630 a := v_0
14631 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
14632 break
14633 }
14634 x := v_2
14635 v.reset(OpARM64SUB)
14636 v.AddArg2(a, x)
14637 return true
14638 }
14639
14640
14641
14642 for {
14643 a := v_0
14644 if v_1.Op != OpARM64MOVDconst {
14645 break
14646 }
14647 c := auxIntToInt64(v_1.AuxInt)
14648 x := v_2
14649 if !(isPowerOfTwo64(c)) {
14650 break
14651 }
14652 v.reset(OpARM64SUBshiftLL)
14653 v.AuxInt = int64ToAuxInt(log64(c))
14654 v.AddArg2(a, x)
14655 return true
14656 }
14657
14658
14659
14660 for {
14661 a := v_0
14662 if v_1.Op != OpARM64MOVDconst {
14663 break
14664 }
14665 c := auxIntToInt64(v_1.AuxInt)
14666 x := v_2
14667 if !(isPowerOfTwo64(c-1) && c >= 3) {
14668 break
14669 }
14670 v.reset(OpARM64SUB)
14671 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14672 v0.AuxInt = int64ToAuxInt(log64(c - 1))
14673 v0.AddArg2(x, x)
14674 v.AddArg2(a, v0)
14675 return true
14676 }
14677
14678
14679
14680 for {
14681 a := v_0
14682 if v_1.Op != OpARM64MOVDconst {
14683 break
14684 }
14685 c := auxIntToInt64(v_1.AuxInt)
14686 x := v_2
14687 if !(isPowerOfTwo64(c+1) && c >= 7) {
14688 break
14689 }
14690 v.reset(OpARM64ADD)
14691 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14692 v0.AuxInt = int64ToAuxInt(log64(c + 1))
14693 v0.AddArg2(x, x)
14694 v.AddArg2(a, v0)
14695 return true
14696 }
14697
14698
14699
14700 for {
14701 a := v_0
14702 if v_1.Op != OpARM64MOVDconst {
14703 break
14704 }
14705 c := auxIntToInt64(v_1.AuxInt)
14706 x := v_2
14707 if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
14708 break
14709 }
14710 v.reset(OpARM64ADDshiftLL)
14711 v.AuxInt = int64ToAuxInt(log64(c / 3))
14712 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14713 v0.AuxInt = int64ToAuxInt(2)
14714 v0.AddArg2(x, x)
14715 v.AddArg2(a, v0)
14716 return true
14717 }
14718
14719
14720
14721 for {
14722 a := v_0
14723 if v_1.Op != OpARM64MOVDconst {
14724 break
14725 }
14726 c := auxIntToInt64(v_1.AuxInt)
14727 x := v_2
14728 if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
14729 break
14730 }
14731 v.reset(OpARM64SUBshiftLL)
14732 v.AuxInt = int64ToAuxInt(log64(c / 5))
14733 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14734 v0.AuxInt = int64ToAuxInt(2)
14735 v0.AddArg2(x, x)
14736 v.AddArg2(a, v0)
14737 return true
14738 }
14739
14740
14741
14742 for {
14743 a := v_0
14744 if v_1.Op != OpARM64MOVDconst {
14745 break
14746 }
14747 c := auxIntToInt64(v_1.AuxInt)
14748 x := v_2
14749 if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
14750 break
14751 }
14752 v.reset(OpARM64ADDshiftLL)
14753 v.AuxInt = int64ToAuxInt(log64(c / 7))
14754 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14755 v0.AuxInt = int64ToAuxInt(3)
14756 v0.AddArg2(x, x)
14757 v.AddArg2(a, v0)
14758 return true
14759 }
14760
14761
14762
14763 for {
14764 a := v_0
14765 if v_1.Op != OpARM64MOVDconst {
14766 break
14767 }
14768 c := auxIntToInt64(v_1.AuxInt)
14769 x := v_2
14770 if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
14771 break
14772 }
14773 v.reset(OpARM64SUBshiftLL)
14774 v.AuxInt = int64ToAuxInt(log64(c / 9))
14775 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14776 v0.AuxInt = int64ToAuxInt(3)
14777 v0.AddArg2(x, x)
14778 v.AddArg2(a, v0)
14779 return true
14780 }
14781
14782
14783 for {
14784 if v_0.Op != OpARM64MOVDconst {
14785 break
14786 }
14787 c := auxIntToInt64(v_0.AuxInt)
14788 x := v_1
14789 y := v_2
14790 v.reset(OpARM64ADDconst)
14791 v.AuxInt = int64ToAuxInt(c)
14792 v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type)
14793 v0.AddArg2(x, y)
14794 v.AddArg(v0)
14795 return true
14796 }
14797
14798
14799 for {
14800 a := v_0
14801 if v_1.Op != OpARM64MOVDconst {
14802 break
14803 }
14804 c := auxIntToInt64(v_1.AuxInt)
14805 if v_2.Op != OpARM64MOVDconst {
14806 break
14807 }
14808 d := auxIntToInt64(v_2.AuxInt)
14809 v.reset(OpARM64SUBconst)
14810 v.AuxInt = int64ToAuxInt(c * d)
14811 v.AddArg(a)
14812 return true
14813 }
14814 return false
14815 }
14816 func rewriteValueARM64_OpARM64MSUBW(v *Value) bool {
14817 v_2 := v.Args[2]
14818 v_1 := v.Args[1]
14819 v_0 := v.Args[0]
14820 b := v.Block
14821
14822
14823
14824 for {
14825 a := v_0
14826 x := v_1
14827 if v_2.Op != OpARM64MOVDconst {
14828 break
14829 }
14830 c := auxIntToInt64(v_2.AuxInt)
14831 if !(int32(c) == -1) {
14832 break
14833 }
14834 v.reset(OpARM64ADD)
14835 v.AddArg2(a, x)
14836 return true
14837 }
14838
14839
14840
14841 for {
14842 a := v_0
14843 if v_2.Op != OpARM64MOVDconst {
14844 break
14845 }
14846 c := auxIntToInt64(v_2.AuxInt)
14847 if !(int32(c) == 0) {
14848 break
14849 }
14850 v.copyOf(a)
14851 return true
14852 }
14853
14854
14855
14856 for {
14857 a := v_0
14858 x := v_1
14859 if v_2.Op != OpARM64MOVDconst {
14860 break
14861 }
14862 c := auxIntToInt64(v_2.AuxInt)
14863 if !(int32(c) == 1) {
14864 break
14865 }
14866 v.reset(OpARM64SUB)
14867 v.AddArg2(a, x)
14868 return true
14869 }
14870
14871
14872
14873 for {
14874 a := v_0
14875 x := v_1
14876 if v_2.Op != OpARM64MOVDconst {
14877 break
14878 }
14879 c := auxIntToInt64(v_2.AuxInt)
14880 if !(isPowerOfTwo64(c)) {
14881 break
14882 }
14883 v.reset(OpARM64SUBshiftLL)
14884 v.AuxInt = int64ToAuxInt(log64(c))
14885 v.AddArg2(a, x)
14886 return true
14887 }
14888
14889
14890
14891 for {
14892 a := v_0
14893 x := v_1
14894 if v_2.Op != OpARM64MOVDconst {
14895 break
14896 }
14897 c := auxIntToInt64(v_2.AuxInt)
14898 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
14899 break
14900 }
14901 v.reset(OpARM64SUB)
14902 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14903 v0.AuxInt = int64ToAuxInt(log64(c - 1))
14904 v0.AddArg2(x, x)
14905 v.AddArg2(a, v0)
14906 return true
14907 }
14908
14909
14910
14911 for {
14912 a := v_0
14913 x := v_1
14914 if v_2.Op != OpARM64MOVDconst {
14915 break
14916 }
14917 c := auxIntToInt64(v_2.AuxInt)
14918 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
14919 break
14920 }
14921 v.reset(OpARM64ADD)
14922 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14923 v0.AuxInt = int64ToAuxInt(log64(c + 1))
14924 v0.AddArg2(x, x)
14925 v.AddArg2(a, v0)
14926 return true
14927 }
14928
14929
14930
14931 for {
14932 a := v_0
14933 x := v_1
14934 if v_2.Op != OpARM64MOVDconst {
14935 break
14936 }
14937 c := auxIntToInt64(v_2.AuxInt)
14938 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
14939 break
14940 }
14941 v.reset(OpARM64ADDshiftLL)
14942 v.AuxInt = int64ToAuxInt(log64(c / 3))
14943 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14944 v0.AuxInt = int64ToAuxInt(2)
14945 v0.AddArg2(x, x)
14946 v.AddArg2(a, v0)
14947 return true
14948 }
14949
14950
14951
14952 for {
14953 a := v_0
14954 x := v_1
14955 if v_2.Op != OpARM64MOVDconst {
14956 break
14957 }
14958 c := auxIntToInt64(v_2.AuxInt)
14959 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
14960 break
14961 }
14962 v.reset(OpARM64SUBshiftLL)
14963 v.AuxInt = int64ToAuxInt(log64(c / 5))
14964 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
14965 v0.AuxInt = int64ToAuxInt(2)
14966 v0.AddArg2(x, x)
14967 v.AddArg2(a, v0)
14968 return true
14969 }
14970
14971
14972
14973 for {
14974 a := v_0
14975 x := v_1
14976 if v_2.Op != OpARM64MOVDconst {
14977 break
14978 }
14979 c := auxIntToInt64(v_2.AuxInt)
14980 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
14981 break
14982 }
14983 v.reset(OpARM64ADDshiftLL)
14984 v.AuxInt = int64ToAuxInt(log64(c / 7))
14985 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
14986 v0.AuxInt = int64ToAuxInt(3)
14987 v0.AddArg2(x, x)
14988 v.AddArg2(a, v0)
14989 return true
14990 }
14991
14992
14993
14994 for {
14995 a := v_0
14996 x := v_1
14997 if v_2.Op != OpARM64MOVDconst {
14998 break
14999 }
15000 c := auxIntToInt64(v_2.AuxInt)
15001 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
15002 break
15003 }
15004 v.reset(OpARM64SUBshiftLL)
15005 v.AuxInt = int64ToAuxInt(log64(c / 9))
15006 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15007 v0.AuxInt = int64ToAuxInt(3)
15008 v0.AddArg2(x, x)
15009 v.AddArg2(a, v0)
15010 return true
15011 }
15012
15013
15014
15015 for {
15016 a := v_0
15017 if v_1.Op != OpARM64MOVDconst {
15018 break
15019 }
15020 c := auxIntToInt64(v_1.AuxInt)
15021 x := v_2
15022 if !(int32(c) == -1) {
15023 break
15024 }
15025 v.reset(OpARM64ADD)
15026 v.AddArg2(a, x)
15027 return true
15028 }
15029
15030
15031
15032 for {
15033 a := v_0
15034 if v_1.Op != OpARM64MOVDconst {
15035 break
15036 }
15037 c := auxIntToInt64(v_1.AuxInt)
15038 if !(int32(c) == 0) {
15039 break
15040 }
15041 v.copyOf(a)
15042 return true
15043 }
15044
15045
15046
15047 for {
15048 a := v_0
15049 if v_1.Op != OpARM64MOVDconst {
15050 break
15051 }
15052 c := auxIntToInt64(v_1.AuxInt)
15053 x := v_2
15054 if !(int32(c) == 1) {
15055 break
15056 }
15057 v.reset(OpARM64SUB)
15058 v.AddArg2(a, x)
15059 return true
15060 }
15061
15062
15063
15064 for {
15065 a := v_0
15066 if v_1.Op != OpARM64MOVDconst {
15067 break
15068 }
15069 c := auxIntToInt64(v_1.AuxInt)
15070 x := v_2
15071 if !(isPowerOfTwo64(c)) {
15072 break
15073 }
15074 v.reset(OpARM64SUBshiftLL)
15075 v.AuxInt = int64ToAuxInt(log64(c))
15076 v.AddArg2(a, x)
15077 return true
15078 }
15079
15080
15081
15082 for {
15083 a := v_0
15084 if v_1.Op != OpARM64MOVDconst {
15085 break
15086 }
15087 c := auxIntToInt64(v_1.AuxInt)
15088 x := v_2
15089 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
15090 break
15091 }
15092 v.reset(OpARM64SUB)
15093 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15094 v0.AuxInt = int64ToAuxInt(log64(c - 1))
15095 v0.AddArg2(x, x)
15096 v.AddArg2(a, v0)
15097 return true
15098 }
15099
15100
15101
15102 for {
15103 a := v_0
15104 if v_1.Op != OpARM64MOVDconst {
15105 break
15106 }
15107 c := auxIntToInt64(v_1.AuxInt)
15108 x := v_2
15109 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
15110 break
15111 }
15112 v.reset(OpARM64ADD)
15113 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
15114 v0.AuxInt = int64ToAuxInt(log64(c + 1))
15115 v0.AddArg2(x, x)
15116 v.AddArg2(a, v0)
15117 return true
15118 }
15119
15120
15121
15122 for {
15123 a := v_0
15124 if v_1.Op != OpARM64MOVDconst {
15125 break
15126 }
15127 c := auxIntToInt64(v_1.AuxInt)
15128 x := v_2
15129 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
15130 break
15131 }
15132 v.reset(OpARM64ADDshiftLL)
15133 v.AuxInt = int64ToAuxInt(log64(c / 3))
15134 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
15135 v0.AuxInt = int64ToAuxInt(2)
15136 v0.AddArg2(x, x)
15137 v.AddArg2(a, v0)
15138 return true
15139 }
15140
15141
15142
15143 for {
15144 a := v_0
15145 if v_1.Op != OpARM64MOVDconst {
15146 break
15147 }
15148 c := auxIntToInt64(v_1.AuxInt)
15149 x := v_2
15150 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
15151 break
15152 }
15153 v.reset(OpARM64SUBshiftLL)
15154 v.AuxInt = int64ToAuxInt(log64(c / 5))
15155 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15156 v0.AuxInt = int64ToAuxInt(2)
15157 v0.AddArg2(x, x)
15158 v.AddArg2(a, v0)
15159 return true
15160 }
15161
15162
15163
15164 for {
15165 a := v_0
15166 if v_1.Op != OpARM64MOVDconst {
15167 break
15168 }
15169 c := auxIntToInt64(v_1.AuxInt)
15170 x := v_2
15171 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
15172 break
15173 }
15174 v.reset(OpARM64ADDshiftLL)
15175 v.AuxInt = int64ToAuxInt(log64(c / 7))
15176 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
15177 v0.AuxInt = int64ToAuxInt(3)
15178 v0.AddArg2(x, x)
15179 v.AddArg2(a, v0)
15180 return true
15181 }
15182
15183
15184
15185 for {
15186 a := v_0
15187 if v_1.Op != OpARM64MOVDconst {
15188 break
15189 }
15190 c := auxIntToInt64(v_1.AuxInt)
15191 x := v_2
15192 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
15193 break
15194 }
15195 v.reset(OpARM64SUBshiftLL)
15196 v.AuxInt = int64ToAuxInt(log64(c / 9))
15197 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15198 v0.AuxInt = int64ToAuxInt(3)
15199 v0.AddArg2(x, x)
15200 v.AddArg2(a, v0)
15201 return true
15202 }
15203
15204
15205 for {
15206 if v_0.Op != OpARM64MOVDconst {
15207 break
15208 }
15209 c := auxIntToInt64(v_0.AuxInt)
15210 x := v_1
15211 y := v_2
15212 v.reset(OpARM64ADDconst)
15213 v.AuxInt = int64ToAuxInt(c)
15214 v0 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type)
15215 v0.AddArg2(x, y)
15216 v.AddArg(v0)
15217 return true
15218 }
15219
15220
15221 for {
15222 a := v_0
15223 if v_1.Op != OpARM64MOVDconst {
15224 break
15225 }
15226 c := auxIntToInt64(v_1.AuxInt)
15227 if v_2.Op != OpARM64MOVDconst {
15228 break
15229 }
15230 d := auxIntToInt64(v_2.AuxInt)
15231 v.reset(OpARM64SUBconst)
15232 v.AuxInt = int64ToAuxInt(int64(int32(c) * int32(d)))
15233 v.AddArg(a)
15234 return true
15235 }
15236 return false
15237 }
15238 func rewriteValueARM64_OpARM64MUL(v *Value) bool {
15239 v_1 := v.Args[1]
15240 v_0 := v.Args[0]
15241 b := v.Block
15242
15243
15244 for {
15245 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15246 if v_0.Op != OpARM64NEG {
15247 continue
15248 }
15249 x := v_0.Args[0]
15250 y := v_1
15251 v.reset(OpARM64MNEG)
15252 v.AddArg2(x, y)
15253 return true
15254 }
15255 break
15256 }
15257
15258
15259 for {
15260 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15261 x := v_0
15262 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
15263 continue
15264 }
15265 v.reset(OpARM64NEG)
15266 v.AddArg(x)
15267 return true
15268 }
15269 break
15270 }
15271
15272
15273 for {
15274 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15275 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
15276 continue
15277 }
15278 v.reset(OpARM64MOVDconst)
15279 v.AuxInt = int64ToAuxInt(0)
15280 return true
15281 }
15282 break
15283 }
15284
15285
15286 for {
15287 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15288 x := v_0
15289 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
15290 continue
15291 }
15292 v.copyOf(x)
15293 return true
15294 }
15295 break
15296 }
15297
15298
15299
15300 for {
15301 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15302 x := v_0
15303 if v_1.Op != OpARM64MOVDconst {
15304 continue
15305 }
15306 c := auxIntToInt64(v_1.AuxInt)
15307 if !(isPowerOfTwo64(c)) {
15308 continue
15309 }
15310 v.reset(OpARM64SLLconst)
15311 v.AuxInt = int64ToAuxInt(log64(c))
15312 v.AddArg(x)
15313 return true
15314 }
15315 break
15316 }
15317
15318
15319
15320 for {
15321 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15322 x := v_0
15323 if v_1.Op != OpARM64MOVDconst {
15324 continue
15325 }
15326 c := auxIntToInt64(v_1.AuxInt)
15327 if !(isPowerOfTwo64(c-1) && c >= 3) {
15328 continue
15329 }
15330 v.reset(OpARM64ADDshiftLL)
15331 v.AuxInt = int64ToAuxInt(log64(c - 1))
15332 v.AddArg2(x, x)
15333 return true
15334 }
15335 break
15336 }
15337
15338
15339
15340 for {
15341 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15342 x := v_0
15343 if v_1.Op != OpARM64MOVDconst {
15344 continue
15345 }
15346 c := auxIntToInt64(v_1.AuxInt)
15347 if !(isPowerOfTwo64(c+1) && c >= 7) {
15348 continue
15349 }
15350 v.reset(OpARM64ADDshiftLL)
15351 v.AuxInt = int64ToAuxInt(log64(c + 1))
15352 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
15353 v0.AddArg(x)
15354 v.AddArg2(v0, x)
15355 return true
15356 }
15357 break
15358 }
15359
15360
15361
15362 for {
15363 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15364 x := v_0
15365 if v_1.Op != OpARM64MOVDconst {
15366 continue
15367 }
15368 c := auxIntToInt64(v_1.AuxInt)
15369 if !(c%3 == 0 && isPowerOfTwo64(c/3)) {
15370 continue
15371 }
15372 v.reset(OpARM64SLLconst)
15373 v.AuxInt = int64ToAuxInt(log64(c / 3))
15374 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15375 v0.AuxInt = int64ToAuxInt(1)
15376 v0.AddArg2(x, x)
15377 v.AddArg(v0)
15378 return true
15379 }
15380 break
15381 }
15382
15383
15384
15385 for {
15386 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15387 x := v_0
15388 if v_1.Op != OpARM64MOVDconst {
15389 continue
15390 }
15391 c := auxIntToInt64(v_1.AuxInt)
15392 if !(c%5 == 0 && isPowerOfTwo64(c/5)) {
15393 continue
15394 }
15395 v.reset(OpARM64SLLconst)
15396 v.AuxInt = int64ToAuxInt(log64(c / 5))
15397 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15398 v0.AuxInt = int64ToAuxInt(2)
15399 v0.AddArg2(x, x)
15400 v.AddArg(v0)
15401 return true
15402 }
15403 break
15404 }
15405
15406
15407
15408 for {
15409 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15410 x := v_0
15411 if v_1.Op != OpARM64MOVDconst {
15412 continue
15413 }
15414 c := auxIntToInt64(v_1.AuxInt)
15415 if !(c%7 == 0 && isPowerOfTwo64(c/7)) {
15416 continue
15417 }
15418 v.reset(OpARM64SLLconst)
15419 v.AuxInt = int64ToAuxInt(log64(c / 7))
15420 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15421 v0.AuxInt = int64ToAuxInt(3)
15422 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
15423 v1.AddArg(x)
15424 v0.AddArg2(v1, x)
15425 v.AddArg(v0)
15426 return true
15427 }
15428 break
15429 }
15430
15431
15432
15433 for {
15434 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15435 x := v_0
15436 if v_1.Op != OpARM64MOVDconst {
15437 continue
15438 }
15439 c := auxIntToInt64(v_1.AuxInt)
15440 if !(c%9 == 0 && isPowerOfTwo64(c/9)) {
15441 continue
15442 }
15443 v.reset(OpARM64SLLconst)
15444 v.AuxInt = int64ToAuxInt(log64(c / 9))
15445 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15446 v0.AuxInt = int64ToAuxInt(3)
15447 v0.AddArg2(x, x)
15448 v.AddArg(v0)
15449 return true
15450 }
15451 break
15452 }
15453
15454
15455 for {
15456 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15457 if v_0.Op != OpARM64MOVDconst {
15458 continue
15459 }
15460 c := auxIntToInt64(v_0.AuxInt)
15461 if v_1.Op != OpARM64MOVDconst {
15462 continue
15463 }
15464 d := auxIntToInt64(v_1.AuxInt)
15465 v.reset(OpARM64MOVDconst)
15466 v.AuxInt = int64ToAuxInt(c * d)
15467 return true
15468 }
15469 break
15470 }
15471 return false
15472 }
15473 func rewriteValueARM64_OpARM64MULW(v *Value) bool {
15474 v_1 := v.Args[1]
15475 v_0 := v.Args[0]
15476 b := v.Block
15477
15478
15479 for {
15480 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15481 if v_0.Op != OpARM64NEG {
15482 continue
15483 }
15484 x := v_0.Args[0]
15485 y := v_1
15486 v.reset(OpARM64MNEGW)
15487 v.AddArg2(x, y)
15488 return true
15489 }
15490 break
15491 }
15492
15493
15494
15495 for {
15496 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15497 x := v_0
15498 if v_1.Op != OpARM64MOVDconst {
15499 continue
15500 }
15501 c := auxIntToInt64(v_1.AuxInt)
15502 if !(int32(c) == -1) {
15503 continue
15504 }
15505 v.reset(OpARM64NEG)
15506 v.AddArg(x)
15507 return true
15508 }
15509 break
15510 }
15511
15512
15513
15514 for {
15515 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15516 if v_1.Op != OpARM64MOVDconst {
15517 continue
15518 }
15519 c := auxIntToInt64(v_1.AuxInt)
15520 if !(int32(c) == 0) {
15521 continue
15522 }
15523 v.reset(OpARM64MOVDconst)
15524 v.AuxInt = int64ToAuxInt(0)
15525 return true
15526 }
15527 break
15528 }
15529
15530
15531
15532 for {
15533 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15534 x := v_0
15535 if v_1.Op != OpARM64MOVDconst {
15536 continue
15537 }
15538 c := auxIntToInt64(v_1.AuxInt)
15539 if !(int32(c) == 1) {
15540 continue
15541 }
15542 v.copyOf(x)
15543 return true
15544 }
15545 break
15546 }
15547
15548
15549
15550 for {
15551 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15552 x := v_0
15553 if v_1.Op != OpARM64MOVDconst {
15554 continue
15555 }
15556 c := auxIntToInt64(v_1.AuxInt)
15557 if !(isPowerOfTwo64(c)) {
15558 continue
15559 }
15560 v.reset(OpARM64SLLconst)
15561 v.AuxInt = int64ToAuxInt(log64(c))
15562 v.AddArg(x)
15563 return true
15564 }
15565 break
15566 }
15567
15568
15569
15570 for {
15571 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15572 x := v_0
15573 if v_1.Op != OpARM64MOVDconst {
15574 continue
15575 }
15576 c := auxIntToInt64(v_1.AuxInt)
15577 if !(isPowerOfTwo64(c-1) && int32(c) >= 3) {
15578 continue
15579 }
15580 v.reset(OpARM64ADDshiftLL)
15581 v.AuxInt = int64ToAuxInt(log64(c - 1))
15582 v.AddArg2(x, x)
15583 return true
15584 }
15585 break
15586 }
15587
15588
15589
15590 for {
15591 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15592 x := v_0
15593 if v_1.Op != OpARM64MOVDconst {
15594 continue
15595 }
15596 c := auxIntToInt64(v_1.AuxInt)
15597 if !(isPowerOfTwo64(c+1) && int32(c) >= 7) {
15598 continue
15599 }
15600 v.reset(OpARM64ADDshiftLL)
15601 v.AuxInt = int64ToAuxInt(log64(c + 1))
15602 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
15603 v0.AddArg(x)
15604 v.AddArg2(v0, x)
15605 return true
15606 }
15607 break
15608 }
15609
15610
15611
15612 for {
15613 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15614 x := v_0
15615 if v_1.Op != OpARM64MOVDconst {
15616 continue
15617 }
15618 c := auxIntToInt64(v_1.AuxInt)
15619 if !(c%3 == 0 && isPowerOfTwo64(c/3) && is32Bit(c)) {
15620 continue
15621 }
15622 v.reset(OpARM64SLLconst)
15623 v.AuxInt = int64ToAuxInt(log64(c / 3))
15624 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15625 v0.AuxInt = int64ToAuxInt(1)
15626 v0.AddArg2(x, x)
15627 v.AddArg(v0)
15628 return true
15629 }
15630 break
15631 }
15632
15633
15634
15635 for {
15636 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15637 x := v_0
15638 if v_1.Op != OpARM64MOVDconst {
15639 continue
15640 }
15641 c := auxIntToInt64(v_1.AuxInt)
15642 if !(c%5 == 0 && isPowerOfTwo64(c/5) && is32Bit(c)) {
15643 continue
15644 }
15645 v.reset(OpARM64SLLconst)
15646 v.AuxInt = int64ToAuxInt(log64(c / 5))
15647 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15648 v0.AuxInt = int64ToAuxInt(2)
15649 v0.AddArg2(x, x)
15650 v.AddArg(v0)
15651 return true
15652 }
15653 break
15654 }
15655
15656
15657
15658 for {
15659 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15660 x := v_0
15661 if v_1.Op != OpARM64MOVDconst {
15662 continue
15663 }
15664 c := auxIntToInt64(v_1.AuxInt)
15665 if !(c%7 == 0 && isPowerOfTwo64(c/7) && is32Bit(c)) {
15666 continue
15667 }
15668 v.reset(OpARM64SLLconst)
15669 v.AuxInt = int64ToAuxInt(log64(c / 7))
15670 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15671 v0.AuxInt = int64ToAuxInt(3)
15672 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
15673 v1.AddArg(x)
15674 v0.AddArg2(v1, x)
15675 v.AddArg(v0)
15676 return true
15677 }
15678 break
15679 }
15680
15681
15682
15683 for {
15684 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15685 x := v_0
15686 if v_1.Op != OpARM64MOVDconst {
15687 continue
15688 }
15689 c := auxIntToInt64(v_1.AuxInt)
15690 if !(c%9 == 0 && isPowerOfTwo64(c/9) && is32Bit(c)) {
15691 continue
15692 }
15693 v.reset(OpARM64SLLconst)
15694 v.AuxInt = int64ToAuxInt(log64(c / 9))
15695 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
15696 v0.AuxInt = int64ToAuxInt(3)
15697 v0.AddArg2(x, x)
15698 v.AddArg(v0)
15699 return true
15700 }
15701 break
15702 }
15703
15704
15705 for {
15706 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
15707 if v_0.Op != OpARM64MOVDconst {
15708 continue
15709 }
15710 c := auxIntToInt64(v_0.AuxInt)
15711 if v_1.Op != OpARM64MOVDconst {
15712 continue
15713 }
15714 d := auxIntToInt64(v_1.AuxInt)
15715 v.reset(OpARM64MOVDconst)
15716 v.AuxInt = int64ToAuxInt(int64(int32(c) * int32(d)))
15717 return true
15718 }
15719 break
15720 }
15721 return false
15722 }
15723 func rewriteValueARM64_OpARM64MVN(v *Value) bool {
15724 v_0 := v.Args[0]
15725
15726
15727 for {
15728 if v_0.Op != OpARM64XOR {
15729 break
15730 }
15731 y := v_0.Args[1]
15732 x := v_0.Args[0]
15733 v.reset(OpARM64EON)
15734 v.AddArg2(x, y)
15735 return true
15736 }
15737
15738
15739 for {
15740 if v_0.Op != OpARM64MOVDconst {
15741 break
15742 }
15743 c := auxIntToInt64(v_0.AuxInt)
15744 v.reset(OpARM64MOVDconst)
15745 v.AuxInt = int64ToAuxInt(^c)
15746 return true
15747 }
15748
15749
15750
15751 for {
15752 x := v_0
15753 if x.Op != OpARM64SLLconst {
15754 break
15755 }
15756 c := auxIntToInt64(x.AuxInt)
15757 y := x.Args[0]
15758 if !(clobberIfDead(x)) {
15759 break
15760 }
15761 v.reset(OpARM64MVNshiftLL)
15762 v.AuxInt = int64ToAuxInt(c)
15763 v.AddArg(y)
15764 return true
15765 }
15766
15767
15768
15769 for {
15770 x := v_0
15771 if x.Op != OpARM64SRLconst {
15772 break
15773 }
15774 c := auxIntToInt64(x.AuxInt)
15775 y := x.Args[0]
15776 if !(clobberIfDead(x)) {
15777 break
15778 }
15779 v.reset(OpARM64MVNshiftRL)
15780 v.AuxInt = int64ToAuxInt(c)
15781 v.AddArg(y)
15782 return true
15783 }
15784
15785
15786
15787 for {
15788 x := v_0
15789 if x.Op != OpARM64SRAconst {
15790 break
15791 }
15792 c := auxIntToInt64(x.AuxInt)
15793 y := x.Args[0]
15794 if !(clobberIfDead(x)) {
15795 break
15796 }
15797 v.reset(OpARM64MVNshiftRA)
15798 v.AuxInt = int64ToAuxInt(c)
15799 v.AddArg(y)
15800 return true
15801 }
15802
15803
15804
15805 for {
15806 x := v_0
15807 if x.Op != OpARM64RORconst {
15808 break
15809 }
15810 c := auxIntToInt64(x.AuxInt)
15811 y := x.Args[0]
15812 if !(clobberIfDead(x)) {
15813 break
15814 }
15815 v.reset(OpARM64MVNshiftRO)
15816 v.AuxInt = int64ToAuxInt(c)
15817 v.AddArg(y)
15818 return true
15819 }
15820 return false
15821 }
15822 func rewriteValueARM64_OpARM64MVNshiftLL(v *Value) bool {
15823 v_0 := v.Args[0]
15824
15825
15826 for {
15827 d := auxIntToInt64(v.AuxInt)
15828 if v_0.Op != OpARM64MOVDconst {
15829 break
15830 }
15831 c := auxIntToInt64(v_0.AuxInt)
15832 v.reset(OpARM64MOVDconst)
15833 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
15834 return true
15835 }
15836 return false
15837 }
15838 func rewriteValueARM64_OpARM64MVNshiftRA(v *Value) bool {
15839 v_0 := v.Args[0]
15840
15841
15842 for {
15843 d := auxIntToInt64(v.AuxInt)
15844 if v_0.Op != OpARM64MOVDconst {
15845 break
15846 }
15847 c := auxIntToInt64(v_0.AuxInt)
15848 v.reset(OpARM64MOVDconst)
15849 v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
15850 return true
15851 }
15852 return false
15853 }
15854 func rewriteValueARM64_OpARM64MVNshiftRL(v *Value) bool {
15855 v_0 := v.Args[0]
15856
15857
15858 for {
15859 d := auxIntToInt64(v.AuxInt)
15860 if v_0.Op != OpARM64MOVDconst {
15861 break
15862 }
15863 c := auxIntToInt64(v_0.AuxInt)
15864 v.reset(OpARM64MOVDconst)
15865 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
15866 return true
15867 }
15868 return false
15869 }
15870 func rewriteValueARM64_OpARM64MVNshiftRO(v *Value) bool {
15871 v_0 := v.Args[0]
15872
15873
15874 for {
15875 d := auxIntToInt64(v.AuxInt)
15876 if v_0.Op != OpARM64MOVDconst {
15877 break
15878 }
15879 c := auxIntToInt64(v_0.AuxInt)
15880 v.reset(OpARM64MOVDconst)
15881 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
15882 return true
15883 }
15884 return false
15885 }
15886 func rewriteValueARM64_OpARM64NEG(v *Value) bool {
15887 v_0 := v.Args[0]
15888
15889
15890 for {
15891 if v_0.Op != OpARM64MUL {
15892 break
15893 }
15894 y := v_0.Args[1]
15895 x := v_0.Args[0]
15896 v.reset(OpARM64MNEG)
15897 v.AddArg2(x, y)
15898 return true
15899 }
15900
15901
15902 for {
15903 if v_0.Op != OpARM64MULW {
15904 break
15905 }
15906 y := v_0.Args[1]
15907 x := v_0.Args[0]
15908 v.reset(OpARM64MNEGW)
15909 v.AddArg2(x, y)
15910 return true
15911 }
15912
15913
15914 for {
15915 if v_0.Op != OpARM64NEG {
15916 break
15917 }
15918 x := v_0.Args[0]
15919 v.copyOf(x)
15920 return true
15921 }
15922
15923
15924 for {
15925 if v_0.Op != OpARM64MOVDconst {
15926 break
15927 }
15928 c := auxIntToInt64(v_0.AuxInt)
15929 v.reset(OpARM64MOVDconst)
15930 v.AuxInt = int64ToAuxInt(-c)
15931 return true
15932 }
15933
15934
15935
15936 for {
15937 x := v_0
15938 if x.Op != OpARM64SLLconst {
15939 break
15940 }
15941 c := auxIntToInt64(x.AuxInt)
15942 y := x.Args[0]
15943 if !(clobberIfDead(x)) {
15944 break
15945 }
15946 v.reset(OpARM64NEGshiftLL)
15947 v.AuxInt = int64ToAuxInt(c)
15948 v.AddArg(y)
15949 return true
15950 }
15951
15952
15953
15954 for {
15955 x := v_0
15956 if x.Op != OpARM64SRLconst {
15957 break
15958 }
15959 c := auxIntToInt64(x.AuxInt)
15960 y := x.Args[0]
15961 if !(clobberIfDead(x)) {
15962 break
15963 }
15964 v.reset(OpARM64NEGshiftRL)
15965 v.AuxInt = int64ToAuxInt(c)
15966 v.AddArg(y)
15967 return true
15968 }
15969
15970
15971
15972 for {
15973 x := v_0
15974 if x.Op != OpARM64SRAconst {
15975 break
15976 }
15977 c := auxIntToInt64(x.AuxInt)
15978 y := x.Args[0]
15979 if !(clobberIfDead(x)) {
15980 break
15981 }
15982 v.reset(OpARM64NEGshiftRA)
15983 v.AuxInt = int64ToAuxInt(c)
15984 v.AddArg(y)
15985 return true
15986 }
15987 return false
15988 }
15989 func rewriteValueARM64_OpARM64NEGshiftLL(v *Value) bool {
15990 v_0 := v.Args[0]
15991
15992
15993 for {
15994 d := auxIntToInt64(v.AuxInt)
15995 if v_0.Op != OpARM64MOVDconst {
15996 break
15997 }
15998 c := auxIntToInt64(v_0.AuxInt)
15999 v.reset(OpARM64MOVDconst)
16000 v.AuxInt = int64ToAuxInt(-int64(uint64(c) << uint64(d)))
16001 return true
16002 }
16003 return false
16004 }
16005 func rewriteValueARM64_OpARM64NEGshiftRA(v *Value) bool {
16006 v_0 := v.Args[0]
16007
16008
16009 for {
16010 d := auxIntToInt64(v.AuxInt)
16011 if v_0.Op != OpARM64MOVDconst {
16012 break
16013 }
16014 c := auxIntToInt64(v_0.AuxInt)
16015 v.reset(OpARM64MOVDconst)
16016 v.AuxInt = int64ToAuxInt(-(c >> uint64(d)))
16017 return true
16018 }
16019 return false
16020 }
16021 func rewriteValueARM64_OpARM64NEGshiftRL(v *Value) bool {
16022 v_0 := v.Args[0]
16023
16024
16025 for {
16026 d := auxIntToInt64(v.AuxInt)
16027 if v_0.Op != OpARM64MOVDconst {
16028 break
16029 }
16030 c := auxIntToInt64(v_0.AuxInt)
16031 v.reset(OpARM64MOVDconst)
16032 v.AuxInt = int64ToAuxInt(-int64(uint64(c) >> uint64(d)))
16033 return true
16034 }
16035 return false
16036 }
16037 func rewriteValueARM64_OpARM64NotEqual(v *Value) bool {
16038 v_0 := v.Args[0]
16039
16040
16041 for {
16042 if v_0.Op != OpARM64FlagConstant {
16043 break
16044 }
16045 fc := auxIntToFlagConstant(v_0.AuxInt)
16046 v.reset(OpARM64MOVDconst)
16047 v.AuxInt = int64ToAuxInt(b2i(fc.ne()))
16048 return true
16049 }
16050
16051
16052 for {
16053 if v_0.Op != OpARM64InvertFlags {
16054 break
16055 }
16056 x := v_0.Args[0]
16057 v.reset(OpARM64NotEqual)
16058 v.AddArg(x)
16059 return true
16060 }
16061 return false
16062 }
16063 func rewriteValueARM64_OpARM64OR(v *Value) bool {
16064 v_1 := v.Args[1]
16065 v_0 := v.Args[0]
16066 b := v.Block
16067 typ := &b.Func.Config.Types
16068
16069
16070 for {
16071 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16072 x := v_0
16073 if v_1.Op != OpARM64MOVDconst {
16074 continue
16075 }
16076 c := auxIntToInt64(v_1.AuxInt)
16077 v.reset(OpARM64ORconst)
16078 v.AuxInt = int64ToAuxInt(c)
16079 v.AddArg(x)
16080 return true
16081 }
16082 break
16083 }
16084
16085
16086 for {
16087 x := v_0
16088 if x != v_1 {
16089 break
16090 }
16091 v.copyOf(x)
16092 return true
16093 }
16094
16095
16096 for {
16097 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16098 x := v_0
16099 if v_1.Op != OpARM64MVN {
16100 continue
16101 }
16102 y := v_1.Args[0]
16103 v.reset(OpARM64ORN)
16104 v.AddArg2(x, y)
16105 return true
16106 }
16107 break
16108 }
16109
16110
16111
16112 for {
16113 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16114 x0 := v_0
16115 x1 := v_1
16116 if x1.Op != OpARM64SLLconst {
16117 continue
16118 }
16119 c := auxIntToInt64(x1.AuxInt)
16120 y := x1.Args[0]
16121 if !(clobberIfDead(x1)) {
16122 continue
16123 }
16124 v.reset(OpARM64ORshiftLL)
16125 v.AuxInt = int64ToAuxInt(c)
16126 v.AddArg2(x0, y)
16127 return true
16128 }
16129 break
16130 }
16131
16132
16133
16134 for {
16135 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16136 x0 := v_0
16137 x1 := v_1
16138 if x1.Op != OpARM64SRLconst {
16139 continue
16140 }
16141 c := auxIntToInt64(x1.AuxInt)
16142 y := x1.Args[0]
16143 if !(clobberIfDead(x1)) {
16144 continue
16145 }
16146 v.reset(OpARM64ORshiftRL)
16147 v.AuxInt = int64ToAuxInt(c)
16148 v.AddArg2(x0, y)
16149 return true
16150 }
16151 break
16152 }
16153
16154
16155
16156 for {
16157 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16158 x0 := v_0
16159 x1 := v_1
16160 if x1.Op != OpARM64SRAconst {
16161 continue
16162 }
16163 c := auxIntToInt64(x1.AuxInt)
16164 y := x1.Args[0]
16165 if !(clobberIfDead(x1)) {
16166 continue
16167 }
16168 v.reset(OpARM64ORshiftRA)
16169 v.AuxInt = int64ToAuxInt(c)
16170 v.AddArg2(x0, y)
16171 return true
16172 }
16173 break
16174 }
16175
16176
16177
16178 for {
16179 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16180 x0 := v_0
16181 x1 := v_1
16182 if x1.Op != OpARM64RORconst {
16183 continue
16184 }
16185 c := auxIntToInt64(x1.AuxInt)
16186 y := x1.Args[0]
16187 if !(clobberIfDead(x1)) {
16188 continue
16189 }
16190 v.reset(OpARM64ORshiftRO)
16191 v.AuxInt = int64ToAuxInt(c)
16192 v.AddArg2(x0, y)
16193 return true
16194 }
16195 break
16196 }
16197
16198
16199
16200 for {
16201 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16202 if v_0.Op != OpARM64SLL {
16203 continue
16204 }
16205 _ = v_0.Args[1]
16206 x := v_0.Args[0]
16207 v_0_1 := v_0.Args[1]
16208 if v_0_1.Op != OpARM64ANDconst {
16209 continue
16210 }
16211 t := v_0_1.Type
16212 if auxIntToInt64(v_0_1.AuxInt) != 63 {
16213 continue
16214 }
16215 y := v_0_1.Args[0]
16216 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
16217 continue
16218 }
16219 cc := auxIntToOp(v_1.AuxInt)
16220 _ = v_1.Args[1]
16221 v_1_0 := v_1.Args[0]
16222 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 {
16223 continue
16224 }
16225 _ = v_1_0.Args[1]
16226 if x != v_1_0.Args[0] {
16227 continue
16228 }
16229 v_1_0_1 := v_1_0.Args[1]
16230 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
16231 continue
16232 }
16233 _ = v_1_0_1.Args[1]
16234 v_1_0_1_0 := v_1_0_1.Args[0]
16235 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 64 {
16236 continue
16237 }
16238 v_1_0_1_1 := v_1_0_1.Args[1]
16239 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 63 || y != v_1_0_1_1.Args[0] {
16240 continue
16241 }
16242 v_1_1 := v_1.Args[1]
16243 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
16244 continue
16245 }
16246 v_1_1_0 := v_1_1.Args[0]
16247 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
16248 continue
16249 }
16250 _ = v_1_1_0.Args[1]
16251 v_1_1_0_0 := v_1_1_0.Args[0]
16252 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 64 {
16253 continue
16254 }
16255 v_1_1_0_1 := v_1_1_0.Args[1]
16256 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
16257 continue
16258 }
16259 v.reset(OpARM64ROR)
16260 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
16261 v0.AddArg(y)
16262 v.AddArg2(x, v0)
16263 return true
16264 }
16265 break
16266 }
16267
16268
16269
16270 for {
16271 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16272 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 {
16273 continue
16274 }
16275 _ = v_0.Args[1]
16276 x := v_0.Args[0]
16277 v_0_1 := v_0.Args[1]
16278 if v_0_1.Op != OpARM64ANDconst {
16279 continue
16280 }
16281 t := v_0_1.Type
16282 if auxIntToInt64(v_0_1.AuxInt) != 63 {
16283 continue
16284 }
16285 y := v_0_1.Args[0]
16286 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
16287 continue
16288 }
16289 cc := auxIntToOp(v_1.AuxInt)
16290 _ = v_1.Args[1]
16291 v_1_0 := v_1.Args[0]
16292 if v_1_0.Op != OpARM64SLL {
16293 continue
16294 }
16295 _ = v_1_0.Args[1]
16296 if x != v_1_0.Args[0] {
16297 continue
16298 }
16299 v_1_0_1 := v_1_0.Args[1]
16300 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
16301 continue
16302 }
16303 _ = v_1_0_1.Args[1]
16304 v_1_0_1_0 := v_1_0_1.Args[0]
16305 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 64 {
16306 continue
16307 }
16308 v_1_0_1_1 := v_1_0_1.Args[1]
16309 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 63 || y != v_1_0_1_1.Args[0] {
16310 continue
16311 }
16312 v_1_1 := v_1.Args[1]
16313 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
16314 continue
16315 }
16316 v_1_1_0 := v_1_1.Args[0]
16317 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
16318 continue
16319 }
16320 _ = v_1_1_0.Args[1]
16321 v_1_1_0_0 := v_1_1_0.Args[0]
16322 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 64 {
16323 continue
16324 }
16325 v_1_1_0_1 := v_1_1_0.Args[1]
16326 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
16327 continue
16328 }
16329 v.reset(OpARM64ROR)
16330 v.AddArg2(x, y)
16331 return true
16332 }
16333 break
16334 }
16335
16336
16337
16338 for {
16339 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16340 if v_0.Op != OpARM64SLL {
16341 continue
16342 }
16343 _ = v_0.Args[1]
16344 x := v_0.Args[0]
16345 v_0_1 := v_0.Args[1]
16346 if v_0_1.Op != OpARM64ANDconst {
16347 continue
16348 }
16349 t := v_0_1.Type
16350 if auxIntToInt64(v_0_1.AuxInt) != 31 {
16351 continue
16352 }
16353 y := v_0_1.Args[0]
16354 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
16355 continue
16356 }
16357 cc := auxIntToOp(v_1.AuxInt)
16358 _ = v_1.Args[1]
16359 v_1_0 := v_1.Args[0]
16360 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 {
16361 continue
16362 }
16363 _ = v_1_0.Args[1]
16364 v_1_0_0 := v_1_0.Args[0]
16365 if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] {
16366 continue
16367 }
16368 v_1_0_1 := v_1_0.Args[1]
16369 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
16370 continue
16371 }
16372 _ = v_1_0_1.Args[1]
16373 v_1_0_1_0 := v_1_0_1.Args[0]
16374 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 32 {
16375 continue
16376 }
16377 v_1_0_1_1 := v_1_0_1.Args[1]
16378 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 31 || y != v_1_0_1_1.Args[0] {
16379 continue
16380 }
16381 v_1_1 := v_1.Args[1]
16382 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
16383 continue
16384 }
16385 v_1_1_0 := v_1_1.Args[0]
16386 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
16387 continue
16388 }
16389 _ = v_1_1_0.Args[1]
16390 v_1_1_0_0 := v_1_1_0.Args[0]
16391 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 32 {
16392 continue
16393 }
16394 v_1_1_0_1 := v_1_1_0.Args[1]
16395 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
16396 continue
16397 }
16398 v.reset(OpARM64RORW)
16399 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
16400 v0.AddArg(y)
16401 v.AddArg2(x, v0)
16402 return true
16403 }
16404 break
16405 }
16406
16407
16408
16409 for {
16410 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16411 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 {
16412 continue
16413 }
16414 _ = v_0.Args[1]
16415 v_0_0 := v_0.Args[0]
16416 if v_0_0.Op != OpARM64MOVWUreg {
16417 continue
16418 }
16419 x := v_0_0.Args[0]
16420 v_0_1 := v_0.Args[1]
16421 if v_0_1.Op != OpARM64ANDconst {
16422 continue
16423 }
16424 t := v_0_1.Type
16425 if auxIntToInt64(v_0_1.AuxInt) != 31 {
16426 continue
16427 }
16428 y := v_0_1.Args[0]
16429 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
16430 continue
16431 }
16432 cc := auxIntToOp(v_1.AuxInt)
16433 _ = v_1.Args[1]
16434 v_1_0 := v_1.Args[0]
16435 if v_1_0.Op != OpARM64SLL {
16436 continue
16437 }
16438 _ = v_1_0.Args[1]
16439 if x != v_1_0.Args[0] {
16440 continue
16441 }
16442 v_1_0_1 := v_1_0.Args[1]
16443 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
16444 continue
16445 }
16446 _ = v_1_0_1.Args[1]
16447 v_1_0_1_0 := v_1_0_1.Args[0]
16448 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 32 {
16449 continue
16450 }
16451 v_1_0_1_1 := v_1_0_1.Args[1]
16452 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 31 || y != v_1_0_1_1.Args[0] {
16453 continue
16454 }
16455 v_1_1 := v_1.Args[1]
16456 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
16457 continue
16458 }
16459 v_1_1_0 := v_1_1.Args[0]
16460 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
16461 continue
16462 }
16463 _ = v_1_1_0.Args[1]
16464 v_1_1_0_0 := v_1_1_0.Args[0]
16465 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 32 {
16466 continue
16467 }
16468 v_1_1_0_1 := v_1_1_0.Args[1]
16469 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
16470 continue
16471 }
16472 v.reset(OpARM64RORW)
16473 v.AddArg2(x, y)
16474 return true
16475 }
16476 break
16477 }
16478
16479
16480
16481 for {
16482 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16483 if v_0.Op != OpARM64UBFIZ {
16484 continue
16485 }
16486 bfc := auxIntToArm64BitField(v_0.AuxInt)
16487 x := v_0.Args[0]
16488 if v_1.Op != OpARM64ANDconst {
16489 continue
16490 }
16491 ac := auxIntToInt64(v_1.AuxInt)
16492 y := v_1.Args[0]
16493 if !(ac == ^((1<<uint(bfc.getARM64BFwidth()) - 1) << uint(bfc.getARM64BFlsb()))) {
16494 continue
16495 }
16496 v.reset(OpARM64BFI)
16497 v.AuxInt = arm64BitFieldToAuxInt(bfc)
16498 v.AddArg2(y, x)
16499 return true
16500 }
16501 break
16502 }
16503
16504
16505
16506 for {
16507 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16508 if v_0.Op != OpARM64UBFX {
16509 continue
16510 }
16511 bfc := auxIntToArm64BitField(v_0.AuxInt)
16512 x := v_0.Args[0]
16513 if v_1.Op != OpARM64ANDconst {
16514 continue
16515 }
16516 ac := auxIntToInt64(v_1.AuxInt)
16517 y := v_1.Args[0]
16518 if !(ac == ^(1<<uint(bfc.getARM64BFwidth()) - 1)) {
16519 continue
16520 }
16521 v.reset(OpARM64BFXIL)
16522 v.AuxInt = arm64BitFieldToAuxInt(bfc)
16523 v.AddArg2(y, x)
16524 return true
16525 }
16526 break
16527 }
16528
16529
16530
16531 for {
16532 t := v.Type
16533 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16534 o0 := v_0
16535 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
16536 continue
16537 }
16538 _ = o0.Args[1]
16539 o1 := o0.Args[0]
16540 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
16541 continue
16542 }
16543 _ = o1.Args[1]
16544 s0 := o1.Args[0]
16545 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
16546 continue
16547 }
16548 y0 := s0.Args[0]
16549 if y0.Op != OpARM64MOVDnop {
16550 continue
16551 }
16552 x0 := y0.Args[0]
16553 if x0.Op != OpARM64MOVBUload {
16554 continue
16555 }
16556 i3 := auxIntToInt32(x0.AuxInt)
16557 s := auxToSym(x0.Aux)
16558 mem := x0.Args[1]
16559 p := x0.Args[0]
16560 y1 := o1.Args[1]
16561 if y1.Op != OpARM64MOVDnop {
16562 continue
16563 }
16564 x1 := y1.Args[0]
16565 if x1.Op != OpARM64MOVBUload {
16566 continue
16567 }
16568 i2 := auxIntToInt32(x1.AuxInt)
16569 if auxToSym(x1.Aux) != s {
16570 continue
16571 }
16572 _ = x1.Args[1]
16573 if p != x1.Args[0] || mem != x1.Args[1] {
16574 continue
16575 }
16576 y2 := o0.Args[1]
16577 if y2.Op != OpARM64MOVDnop {
16578 continue
16579 }
16580 x2 := y2.Args[0]
16581 if x2.Op != OpARM64MOVBUload {
16582 continue
16583 }
16584 i1 := auxIntToInt32(x2.AuxInt)
16585 if auxToSym(x2.Aux) != s {
16586 continue
16587 }
16588 _ = x2.Args[1]
16589 if p != x2.Args[0] || mem != x2.Args[1] {
16590 continue
16591 }
16592 y3 := v_1
16593 if y3.Op != OpARM64MOVDnop {
16594 continue
16595 }
16596 x3 := y3.Args[0]
16597 if x3.Op != OpARM64MOVBUload {
16598 continue
16599 }
16600 i0 := auxIntToInt32(x3.AuxInt)
16601 if auxToSym(x3.Aux) != s {
16602 continue
16603 }
16604 _ = x3.Args[1]
16605 if p != x3.Args[0] || mem != x3.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
16606 continue
16607 }
16608 b = mergePoint(b, x0, x1, x2, x3)
16609 v0 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t)
16610 v.copyOf(v0)
16611 v0.Aux = symToAux(s)
16612 v1 := b.NewValue0(x3.Pos, OpOffPtr, p.Type)
16613 v1.AuxInt = int64ToAuxInt(int64(i0))
16614 v1.AddArg(p)
16615 v0.AddArg2(v1, mem)
16616 return true
16617 }
16618 break
16619 }
16620
16621
16622
16623 for {
16624 t := v.Type
16625 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16626 o0 := v_0
16627 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
16628 continue
16629 }
16630 _ = o0.Args[1]
16631 o1 := o0.Args[0]
16632 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
16633 continue
16634 }
16635 _ = o1.Args[1]
16636 s0 := o1.Args[0]
16637 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
16638 continue
16639 }
16640 y0 := s0.Args[0]
16641 if y0.Op != OpARM64MOVDnop {
16642 continue
16643 }
16644 x0 := y0.Args[0]
16645 if x0.Op != OpARM64MOVBUload || auxIntToInt32(x0.AuxInt) != 3 {
16646 continue
16647 }
16648 s := auxToSym(x0.Aux)
16649 mem := x0.Args[1]
16650 p := x0.Args[0]
16651 y1 := o1.Args[1]
16652 if y1.Op != OpARM64MOVDnop {
16653 continue
16654 }
16655 x1 := y1.Args[0]
16656 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 2 || auxToSym(x1.Aux) != s {
16657 continue
16658 }
16659 _ = x1.Args[1]
16660 if p != x1.Args[0] || mem != x1.Args[1] {
16661 continue
16662 }
16663 y2 := o0.Args[1]
16664 if y2.Op != OpARM64MOVDnop {
16665 continue
16666 }
16667 x2 := y2.Args[0]
16668 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 1 || auxToSym(x2.Aux) != s {
16669 continue
16670 }
16671 _ = x2.Args[1]
16672 p1 := x2.Args[0]
16673 if p1.Op != OpARM64ADD {
16674 continue
16675 }
16676 _ = p1.Args[1]
16677 p1_0 := p1.Args[0]
16678 p1_1 := p1.Args[1]
16679 for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
16680 ptr1 := p1_0
16681 idx1 := p1_1
16682 if mem != x2.Args[1] {
16683 continue
16684 }
16685 y3 := v_1
16686 if y3.Op != OpARM64MOVDnop {
16687 continue
16688 }
16689 x3 := y3.Args[0]
16690 if x3.Op != OpARM64MOVBUloadidx {
16691 continue
16692 }
16693 _ = x3.Args[2]
16694 ptr0 := x3.Args[0]
16695 idx0 := x3.Args[1]
16696 if mem != x3.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
16697 continue
16698 }
16699 b = mergePoint(b, x0, x1, x2, x3)
16700 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
16701 v.copyOf(v0)
16702 v0.AddArg3(ptr0, idx0, mem)
16703 return true
16704 }
16705 }
16706 break
16707 }
16708
16709
16710
16711 for {
16712 t := v.Type
16713 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16714 o0 := v_0
16715 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
16716 continue
16717 }
16718 _ = o0.Args[1]
16719 o1 := o0.Args[0]
16720 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
16721 continue
16722 }
16723 _ = o1.Args[1]
16724 s0 := o1.Args[0]
16725 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
16726 continue
16727 }
16728 y0 := s0.Args[0]
16729 if y0.Op != OpARM64MOVDnop {
16730 continue
16731 }
16732 x0 := y0.Args[0]
16733 if x0.Op != OpARM64MOVBUloadidx {
16734 continue
16735 }
16736 mem := x0.Args[2]
16737 ptr := x0.Args[0]
16738 x0_1 := x0.Args[1]
16739 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 3 {
16740 continue
16741 }
16742 idx := x0_1.Args[0]
16743 y1 := o1.Args[1]
16744 if y1.Op != OpARM64MOVDnop {
16745 continue
16746 }
16747 x1 := y1.Args[0]
16748 if x1.Op != OpARM64MOVBUloadidx {
16749 continue
16750 }
16751 _ = x1.Args[2]
16752 if ptr != x1.Args[0] {
16753 continue
16754 }
16755 x1_1 := x1.Args[1]
16756 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] {
16757 continue
16758 }
16759 y2 := o0.Args[1]
16760 if y2.Op != OpARM64MOVDnop {
16761 continue
16762 }
16763 x2 := y2.Args[0]
16764 if x2.Op != OpARM64MOVBUloadidx {
16765 continue
16766 }
16767 _ = x2.Args[2]
16768 if ptr != x2.Args[0] {
16769 continue
16770 }
16771 x2_1 := x2.Args[1]
16772 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 1 || idx != x2_1.Args[0] || mem != x2.Args[2] {
16773 continue
16774 }
16775 y3 := v_1
16776 if y3.Op != OpARM64MOVDnop {
16777 continue
16778 }
16779 x3 := y3.Args[0]
16780 if x3.Op != OpARM64MOVBUloadidx {
16781 continue
16782 }
16783 _ = x3.Args[2]
16784 if ptr != x3.Args[0] || idx != x3.Args[1] || mem != x3.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
16785 continue
16786 }
16787 b = mergePoint(b, x0, x1, x2, x3)
16788 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
16789 v.copyOf(v0)
16790 v0.AddArg3(ptr, idx, mem)
16791 return true
16792 }
16793 break
16794 }
16795
16796
16797
16798 for {
16799 t := v.Type
16800 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16801 o0 := v_0
16802 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
16803 continue
16804 }
16805 _ = o0.Args[1]
16806 o1 := o0.Args[0]
16807 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
16808 continue
16809 }
16810 _ = o1.Args[1]
16811 o2 := o1.Args[0]
16812 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
16813 continue
16814 }
16815 _ = o2.Args[1]
16816 o3 := o2.Args[0]
16817 if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
16818 continue
16819 }
16820 _ = o3.Args[1]
16821 o4 := o3.Args[0]
16822 if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
16823 continue
16824 }
16825 _ = o4.Args[1]
16826 o5 := o4.Args[0]
16827 if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
16828 continue
16829 }
16830 _ = o5.Args[1]
16831 s0 := o5.Args[0]
16832 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
16833 continue
16834 }
16835 y0 := s0.Args[0]
16836 if y0.Op != OpARM64MOVDnop {
16837 continue
16838 }
16839 x0 := y0.Args[0]
16840 if x0.Op != OpARM64MOVBUload {
16841 continue
16842 }
16843 i7 := auxIntToInt32(x0.AuxInt)
16844 s := auxToSym(x0.Aux)
16845 mem := x0.Args[1]
16846 p := x0.Args[0]
16847 y1 := o5.Args[1]
16848 if y1.Op != OpARM64MOVDnop {
16849 continue
16850 }
16851 x1 := y1.Args[0]
16852 if x1.Op != OpARM64MOVBUload {
16853 continue
16854 }
16855 i6 := auxIntToInt32(x1.AuxInt)
16856 if auxToSym(x1.Aux) != s {
16857 continue
16858 }
16859 _ = x1.Args[1]
16860 if p != x1.Args[0] || mem != x1.Args[1] {
16861 continue
16862 }
16863 y2 := o4.Args[1]
16864 if y2.Op != OpARM64MOVDnop {
16865 continue
16866 }
16867 x2 := y2.Args[0]
16868 if x2.Op != OpARM64MOVBUload {
16869 continue
16870 }
16871 i5 := auxIntToInt32(x2.AuxInt)
16872 if auxToSym(x2.Aux) != s {
16873 continue
16874 }
16875 _ = x2.Args[1]
16876 if p != x2.Args[0] || mem != x2.Args[1] {
16877 continue
16878 }
16879 y3 := o3.Args[1]
16880 if y3.Op != OpARM64MOVDnop {
16881 continue
16882 }
16883 x3 := y3.Args[0]
16884 if x3.Op != OpARM64MOVBUload {
16885 continue
16886 }
16887 i4 := auxIntToInt32(x3.AuxInt)
16888 if auxToSym(x3.Aux) != s {
16889 continue
16890 }
16891 _ = x3.Args[1]
16892 if p != x3.Args[0] || mem != x3.Args[1] {
16893 continue
16894 }
16895 y4 := o2.Args[1]
16896 if y4.Op != OpARM64MOVDnop {
16897 continue
16898 }
16899 x4 := y4.Args[0]
16900 if x4.Op != OpARM64MOVBUload {
16901 continue
16902 }
16903 i3 := auxIntToInt32(x4.AuxInt)
16904 if auxToSym(x4.Aux) != s {
16905 continue
16906 }
16907 _ = x4.Args[1]
16908 if p != x4.Args[0] || mem != x4.Args[1] {
16909 continue
16910 }
16911 y5 := o1.Args[1]
16912 if y5.Op != OpARM64MOVDnop {
16913 continue
16914 }
16915 x5 := y5.Args[0]
16916 if x5.Op != OpARM64MOVBUload {
16917 continue
16918 }
16919 i2 := auxIntToInt32(x5.AuxInt)
16920 if auxToSym(x5.Aux) != s {
16921 continue
16922 }
16923 _ = x5.Args[1]
16924 if p != x5.Args[0] || mem != x5.Args[1] {
16925 continue
16926 }
16927 y6 := o0.Args[1]
16928 if y6.Op != OpARM64MOVDnop {
16929 continue
16930 }
16931 x6 := y6.Args[0]
16932 if x6.Op != OpARM64MOVBUload {
16933 continue
16934 }
16935 i1 := auxIntToInt32(x6.AuxInt)
16936 if auxToSym(x6.Aux) != s {
16937 continue
16938 }
16939 _ = x6.Args[1]
16940 if p != x6.Args[0] || mem != x6.Args[1] {
16941 continue
16942 }
16943 y7 := v_1
16944 if y7.Op != OpARM64MOVDnop {
16945 continue
16946 }
16947 x7 := y7.Args[0]
16948 if x7.Op != OpARM64MOVBUload {
16949 continue
16950 }
16951 i0 := auxIntToInt32(x7.AuxInt)
16952 if auxToSym(x7.Aux) != s {
16953 continue
16954 }
16955 _ = x7.Args[1]
16956 if p != x7.Args[0] || mem != x7.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
16957 continue
16958 }
16959 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
16960 v0 := b.NewValue0(x7.Pos, OpARM64MOVDload, t)
16961 v.copyOf(v0)
16962 v0.Aux = symToAux(s)
16963 v1 := b.NewValue0(x7.Pos, OpOffPtr, p.Type)
16964 v1.AuxInt = int64ToAuxInt(int64(i0))
16965 v1.AddArg(p)
16966 v0.AddArg2(v1, mem)
16967 return true
16968 }
16969 break
16970 }
16971
16972
16973
16974 for {
16975 t := v.Type
16976 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16977 o0 := v_0
16978 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
16979 continue
16980 }
16981 _ = o0.Args[1]
16982 o1 := o0.Args[0]
16983 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
16984 continue
16985 }
16986 _ = o1.Args[1]
16987 o2 := o1.Args[0]
16988 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
16989 continue
16990 }
16991 _ = o2.Args[1]
16992 o3 := o2.Args[0]
16993 if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
16994 continue
16995 }
16996 _ = o3.Args[1]
16997 o4 := o3.Args[0]
16998 if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
16999 continue
17000 }
17001 _ = o4.Args[1]
17002 o5 := o4.Args[0]
17003 if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
17004 continue
17005 }
17006 _ = o5.Args[1]
17007 s0 := o5.Args[0]
17008 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
17009 continue
17010 }
17011 y0 := s0.Args[0]
17012 if y0.Op != OpARM64MOVDnop {
17013 continue
17014 }
17015 x0 := y0.Args[0]
17016 if x0.Op != OpARM64MOVBUload || auxIntToInt32(x0.AuxInt) != 7 {
17017 continue
17018 }
17019 s := auxToSym(x0.Aux)
17020 mem := x0.Args[1]
17021 p := x0.Args[0]
17022 y1 := o5.Args[1]
17023 if y1.Op != OpARM64MOVDnop {
17024 continue
17025 }
17026 x1 := y1.Args[0]
17027 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 6 || auxToSym(x1.Aux) != s {
17028 continue
17029 }
17030 _ = x1.Args[1]
17031 if p != x1.Args[0] || mem != x1.Args[1] {
17032 continue
17033 }
17034 y2 := o4.Args[1]
17035 if y2.Op != OpARM64MOVDnop {
17036 continue
17037 }
17038 x2 := y2.Args[0]
17039 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 5 || auxToSym(x2.Aux) != s {
17040 continue
17041 }
17042 _ = x2.Args[1]
17043 if p != x2.Args[0] || mem != x2.Args[1] {
17044 continue
17045 }
17046 y3 := o3.Args[1]
17047 if y3.Op != OpARM64MOVDnop {
17048 continue
17049 }
17050 x3 := y3.Args[0]
17051 if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 4 || auxToSym(x3.Aux) != s {
17052 continue
17053 }
17054 _ = x3.Args[1]
17055 if p != x3.Args[0] || mem != x3.Args[1] {
17056 continue
17057 }
17058 y4 := o2.Args[1]
17059 if y4.Op != OpARM64MOVDnop {
17060 continue
17061 }
17062 x4 := y4.Args[0]
17063 if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 3 || auxToSym(x4.Aux) != s {
17064 continue
17065 }
17066 _ = x4.Args[1]
17067 if p != x4.Args[0] || mem != x4.Args[1] {
17068 continue
17069 }
17070 y5 := o1.Args[1]
17071 if y5.Op != OpARM64MOVDnop {
17072 continue
17073 }
17074 x5 := y5.Args[0]
17075 if x5.Op != OpARM64MOVBUload || auxIntToInt32(x5.AuxInt) != 2 || auxToSym(x5.Aux) != s {
17076 continue
17077 }
17078 _ = x5.Args[1]
17079 if p != x5.Args[0] || mem != x5.Args[1] {
17080 continue
17081 }
17082 y6 := o0.Args[1]
17083 if y6.Op != OpARM64MOVDnop {
17084 continue
17085 }
17086 x6 := y6.Args[0]
17087 if x6.Op != OpARM64MOVBUload || auxIntToInt32(x6.AuxInt) != 1 || auxToSym(x6.Aux) != s {
17088 continue
17089 }
17090 _ = x6.Args[1]
17091 p1 := x6.Args[0]
17092 if p1.Op != OpARM64ADD {
17093 continue
17094 }
17095 _ = p1.Args[1]
17096 p1_0 := p1.Args[0]
17097 p1_1 := p1.Args[1]
17098 for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
17099 ptr1 := p1_0
17100 idx1 := p1_1
17101 if mem != x6.Args[1] {
17102 continue
17103 }
17104 y7 := v_1
17105 if y7.Op != OpARM64MOVDnop {
17106 continue
17107 }
17108 x7 := y7.Args[0]
17109 if x7.Op != OpARM64MOVBUloadidx {
17110 continue
17111 }
17112 _ = x7.Args[2]
17113 ptr0 := x7.Args[0]
17114 idx0 := x7.Args[1]
17115 if mem != x7.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
17116 continue
17117 }
17118 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
17119 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t)
17120 v.copyOf(v0)
17121 v0.AddArg3(ptr0, idx0, mem)
17122 return true
17123 }
17124 }
17125 break
17126 }
17127
17128
17129
17130 for {
17131 t := v.Type
17132 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17133 o0 := v_0
17134 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17135 continue
17136 }
17137 _ = o0.Args[1]
17138 o1 := o0.Args[0]
17139 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17140 continue
17141 }
17142 _ = o1.Args[1]
17143 o2 := o1.Args[0]
17144 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
17145 continue
17146 }
17147 _ = o2.Args[1]
17148 o3 := o2.Args[0]
17149 if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
17150 continue
17151 }
17152 _ = o3.Args[1]
17153 o4 := o3.Args[0]
17154 if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
17155 continue
17156 }
17157 _ = o4.Args[1]
17158 o5 := o4.Args[0]
17159 if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
17160 continue
17161 }
17162 _ = o5.Args[1]
17163 s0 := o5.Args[0]
17164 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
17165 continue
17166 }
17167 y0 := s0.Args[0]
17168 if y0.Op != OpARM64MOVDnop {
17169 continue
17170 }
17171 x0 := y0.Args[0]
17172 if x0.Op != OpARM64MOVBUloadidx {
17173 continue
17174 }
17175 mem := x0.Args[2]
17176 ptr := x0.Args[0]
17177 x0_1 := x0.Args[1]
17178 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 7 {
17179 continue
17180 }
17181 idx := x0_1.Args[0]
17182 y1 := o5.Args[1]
17183 if y1.Op != OpARM64MOVDnop {
17184 continue
17185 }
17186 x1 := y1.Args[0]
17187 if x1.Op != OpARM64MOVBUloadidx {
17188 continue
17189 }
17190 _ = x1.Args[2]
17191 if ptr != x1.Args[0] {
17192 continue
17193 }
17194 x1_1 := x1.Args[1]
17195 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 6 || idx != x1_1.Args[0] || mem != x1.Args[2] {
17196 continue
17197 }
17198 y2 := o4.Args[1]
17199 if y2.Op != OpARM64MOVDnop {
17200 continue
17201 }
17202 x2 := y2.Args[0]
17203 if x2.Op != OpARM64MOVBUloadidx {
17204 continue
17205 }
17206 _ = x2.Args[2]
17207 if ptr != x2.Args[0] {
17208 continue
17209 }
17210 x2_1 := x2.Args[1]
17211 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] {
17212 continue
17213 }
17214 y3 := o3.Args[1]
17215 if y3.Op != OpARM64MOVDnop {
17216 continue
17217 }
17218 x3 := y3.Args[0]
17219 if x3.Op != OpARM64MOVBUloadidx {
17220 continue
17221 }
17222 _ = x3.Args[2]
17223 if ptr != x3.Args[0] {
17224 continue
17225 }
17226 x3_1 := x3.Args[1]
17227 if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 4 || idx != x3_1.Args[0] || mem != x3.Args[2] {
17228 continue
17229 }
17230 y4 := o2.Args[1]
17231 if y4.Op != OpARM64MOVDnop {
17232 continue
17233 }
17234 x4 := y4.Args[0]
17235 if x4.Op != OpARM64MOVBUloadidx {
17236 continue
17237 }
17238 _ = x4.Args[2]
17239 if ptr != x4.Args[0] {
17240 continue
17241 }
17242 x4_1 := x4.Args[1]
17243 if x4_1.Op != OpARM64ADDconst || auxIntToInt64(x4_1.AuxInt) != 3 || idx != x4_1.Args[0] || mem != x4.Args[2] {
17244 continue
17245 }
17246 y5 := o1.Args[1]
17247 if y5.Op != OpARM64MOVDnop {
17248 continue
17249 }
17250 x5 := y5.Args[0]
17251 if x5.Op != OpARM64MOVBUloadidx {
17252 continue
17253 }
17254 _ = x5.Args[2]
17255 if ptr != x5.Args[0] {
17256 continue
17257 }
17258 x5_1 := x5.Args[1]
17259 if x5_1.Op != OpARM64ADDconst || auxIntToInt64(x5_1.AuxInt) != 2 || idx != x5_1.Args[0] || mem != x5.Args[2] {
17260 continue
17261 }
17262 y6 := o0.Args[1]
17263 if y6.Op != OpARM64MOVDnop {
17264 continue
17265 }
17266 x6 := y6.Args[0]
17267 if x6.Op != OpARM64MOVBUloadidx {
17268 continue
17269 }
17270 _ = x6.Args[2]
17271 if ptr != x6.Args[0] {
17272 continue
17273 }
17274 x6_1 := x6.Args[1]
17275 if x6_1.Op != OpARM64ADDconst || auxIntToInt64(x6_1.AuxInt) != 1 || idx != x6_1.Args[0] || mem != x6.Args[2] {
17276 continue
17277 }
17278 y7 := v_1
17279 if y7.Op != OpARM64MOVDnop {
17280 continue
17281 }
17282 x7 := y7.Args[0]
17283 if x7.Op != OpARM64MOVBUloadidx {
17284 continue
17285 }
17286 _ = x7.Args[2]
17287 if ptr != x7.Args[0] || idx != x7.Args[1] || mem != x7.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
17288 continue
17289 }
17290 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
17291 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
17292 v.copyOf(v0)
17293 v0.AddArg3(ptr, idx, mem)
17294 return true
17295 }
17296 break
17297 }
17298
17299
17300
17301 for {
17302 t := v.Type
17303 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17304 o0 := v_0
17305 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17306 continue
17307 }
17308 _ = o0.Args[1]
17309 o1 := o0.Args[0]
17310 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17311 continue
17312 }
17313 _ = o1.Args[1]
17314 s0 := o1.Args[0]
17315 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
17316 continue
17317 }
17318 y0 := s0.Args[0]
17319 if y0.Op != OpARM64MOVDnop {
17320 continue
17321 }
17322 x0 := y0.Args[0]
17323 if x0.Op != OpARM64MOVBUload {
17324 continue
17325 }
17326 i0 := auxIntToInt32(x0.AuxInt)
17327 s := auxToSym(x0.Aux)
17328 mem := x0.Args[1]
17329 p := x0.Args[0]
17330 y1 := o1.Args[1]
17331 if y1.Op != OpARM64MOVDnop {
17332 continue
17333 }
17334 x1 := y1.Args[0]
17335 if x1.Op != OpARM64MOVBUload {
17336 continue
17337 }
17338 i1 := auxIntToInt32(x1.AuxInt)
17339 if auxToSym(x1.Aux) != s {
17340 continue
17341 }
17342 _ = x1.Args[1]
17343 if p != x1.Args[0] || mem != x1.Args[1] {
17344 continue
17345 }
17346 y2 := o0.Args[1]
17347 if y2.Op != OpARM64MOVDnop {
17348 continue
17349 }
17350 x2 := y2.Args[0]
17351 if x2.Op != OpARM64MOVBUload {
17352 continue
17353 }
17354 i2 := auxIntToInt32(x2.AuxInt)
17355 if auxToSym(x2.Aux) != s {
17356 continue
17357 }
17358 _ = x2.Args[1]
17359 if p != x2.Args[0] || mem != x2.Args[1] {
17360 continue
17361 }
17362 y3 := v_1
17363 if y3.Op != OpARM64MOVDnop {
17364 continue
17365 }
17366 x3 := y3.Args[0]
17367 if x3.Op != OpARM64MOVBUload {
17368 continue
17369 }
17370 i3 := auxIntToInt32(x3.AuxInt)
17371 if auxToSym(x3.Aux) != s {
17372 continue
17373 }
17374 _ = x3.Args[1]
17375 if p != x3.Args[0] || mem != x3.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
17376 continue
17377 }
17378 b = mergePoint(b, x0, x1, x2, x3)
17379 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t)
17380 v.copyOf(v0)
17381 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t)
17382 v1.Aux = symToAux(s)
17383 v2 := b.NewValue0(x3.Pos, OpOffPtr, p.Type)
17384 v2.AuxInt = int64ToAuxInt(int64(i0))
17385 v2.AddArg(p)
17386 v1.AddArg2(v2, mem)
17387 v0.AddArg(v1)
17388 return true
17389 }
17390 break
17391 }
17392
17393
17394
17395 for {
17396 t := v.Type
17397 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17398 o0 := v_0
17399 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17400 continue
17401 }
17402 _ = o0.Args[1]
17403 o1 := o0.Args[0]
17404 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17405 continue
17406 }
17407 _ = o1.Args[1]
17408 s0 := o1.Args[0]
17409 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
17410 continue
17411 }
17412 y0 := s0.Args[0]
17413 if y0.Op != OpARM64MOVDnop {
17414 continue
17415 }
17416 x0 := y0.Args[0]
17417 if x0.Op != OpARM64MOVBUloadidx {
17418 continue
17419 }
17420 mem := x0.Args[2]
17421 ptr0 := x0.Args[0]
17422 idx0 := x0.Args[1]
17423 y1 := o1.Args[1]
17424 if y1.Op != OpARM64MOVDnop {
17425 continue
17426 }
17427 x1 := y1.Args[0]
17428 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 {
17429 continue
17430 }
17431 s := auxToSym(x1.Aux)
17432 _ = x1.Args[1]
17433 p1 := x1.Args[0]
17434 if p1.Op != OpARM64ADD {
17435 continue
17436 }
17437 _ = p1.Args[1]
17438 p1_0 := p1.Args[0]
17439 p1_1 := p1.Args[1]
17440 for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
17441 ptr1 := p1_0
17442 idx1 := p1_1
17443 if mem != x1.Args[1] {
17444 continue
17445 }
17446 y2 := o0.Args[1]
17447 if y2.Op != OpARM64MOVDnop {
17448 continue
17449 }
17450 x2 := y2.Args[0]
17451 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 2 || auxToSym(x2.Aux) != s {
17452 continue
17453 }
17454 _ = x2.Args[1]
17455 p := x2.Args[0]
17456 if mem != x2.Args[1] {
17457 continue
17458 }
17459 y3 := v_1
17460 if y3.Op != OpARM64MOVDnop {
17461 continue
17462 }
17463 x3 := y3.Args[0]
17464 if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
17465 continue
17466 }
17467 _ = x3.Args[1]
17468 if p != x3.Args[0] || mem != x3.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
17469 continue
17470 }
17471 b = mergePoint(b, x0, x1, x2, x3)
17472 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t)
17473 v.copyOf(v0)
17474 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUloadidx, t)
17475 v1.AddArg3(ptr0, idx0, mem)
17476 v0.AddArg(v1)
17477 return true
17478 }
17479 }
17480 break
17481 }
17482
17483
17484
17485 for {
17486 t := v.Type
17487 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17488 o0 := v_0
17489 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17490 continue
17491 }
17492 _ = o0.Args[1]
17493 o1 := o0.Args[0]
17494 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17495 continue
17496 }
17497 _ = o1.Args[1]
17498 s0 := o1.Args[0]
17499 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 24 {
17500 continue
17501 }
17502 y0 := s0.Args[0]
17503 if y0.Op != OpARM64MOVDnop {
17504 continue
17505 }
17506 x0 := y0.Args[0]
17507 if x0.Op != OpARM64MOVBUloadidx {
17508 continue
17509 }
17510 mem := x0.Args[2]
17511 ptr := x0.Args[0]
17512 idx := x0.Args[1]
17513 y1 := o1.Args[1]
17514 if y1.Op != OpARM64MOVDnop {
17515 continue
17516 }
17517 x1 := y1.Args[0]
17518 if x1.Op != OpARM64MOVBUloadidx {
17519 continue
17520 }
17521 _ = x1.Args[2]
17522 if ptr != x1.Args[0] {
17523 continue
17524 }
17525 x1_1 := x1.Args[1]
17526 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] {
17527 continue
17528 }
17529 y2 := o0.Args[1]
17530 if y2.Op != OpARM64MOVDnop {
17531 continue
17532 }
17533 x2 := y2.Args[0]
17534 if x2.Op != OpARM64MOVBUloadidx {
17535 continue
17536 }
17537 _ = x2.Args[2]
17538 if ptr != x2.Args[0] {
17539 continue
17540 }
17541 x2_1 := x2.Args[1]
17542 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] {
17543 continue
17544 }
17545 y3 := v_1
17546 if y3.Op != OpARM64MOVDnop {
17547 continue
17548 }
17549 x3 := y3.Args[0]
17550 if x3.Op != OpARM64MOVBUloadidx {
17551 continue
17552 }
17553 _ = x3.Args[2]
17554 if ptr != x3.Args[0] {
17555 continue
17556 }
17557 x3_1 := x3.Args[1]
17558 if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0, x1, x2, x3, y0, y1, y2, y3, o0, o1, s0)) {
17559 continue
17560 }
17561 b = mergePoint(b, x0, x1, x2, x3)
17562 v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
17563 v.copyOf(v0)
17564 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
17565 v1.AddArg3(ptr, idx, mem)
17566 v0.AddArg(v1)
17567 return true
17568 }
17569 break
17570 }
17571
17572
17573
17574 for {
17575 t := v.Type
17576 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17577 o0 := v_0
17578 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17579 continue
17580 }
17581 _ = o0.Args[1]
17582 o1 := o0.Args[0]
17583 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17584 continue
17585 }
17586 _ = o1.Args[1]
17587 o2 := o1.Args[0]
17588 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
17589 continue
17590 }
17591 _ = o2.Args[1]
17592 o3 := o2.Args[0]
17593 if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
17594 continue
17595 }
17596 _ = o3.Args[1]
17597 o4 := o3.Args[0]
17598 if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
17599 continue
17600 }
17601 _ = o4.Args[1]
17602 o5 := o4.Args[0]
17603 if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
17604 continue
17605 }
17606 _ = o5.Args[1]
17607 s0 := o5.Args[0]
17608 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
17609 continue
17610 }
17611 y0 := s0.Args[0]
17612 if y0.Op != OpARM64MOVDnop {
17613 continue
17614 }
17615 x0 := y0.Args[0]
17616 if x0.Op != OpARM64MOVBUload {
17617 continue
17618 }
17619 i0 := auxIntToInt32(x0.AuxInt)
17620 s := auxToSym(x0.Aux)
17621 mem := x0.Args[1]
17622 p := x0.Args[0]
17623 y1 := o5.Args[1]
17624 if y1.Op != OpARM64MOVDnop {
17625 continue
17626 }
17627 x1 := y1.Args[0]
17628 if x1.Op != OpARM64MOVBUload {
17629 continue
17630 }
17631 i1 := auxIntToInt32(x1.AuxInt)
17632 if auxToSym(x1.Aux) != s {
17633 continue
17634 }
17635 _ = x1.Args[1]
17636 if p != x1.Args[0] || mem != x1.Args[1] {
17637 continue
17638 }
17639 y2 := o4.Args[1]
17640 if y2.Op != OpARM64MOVDnop {
17641 continue
17642 }
17643 x2 := y2.Args[0]
17644 if x2.Op != OpARM64MOVBUload {
17645 continue
17646 }
17647 i2 := auxIntToInt32(x2.AuxInt)
17648 if auxToSym(x2.Aux) != s {
17649 continue
17650 }
17651 _ = x2.Args[1]
17652 if p != x2.Args[0] || mem != x2.Args[1] {
17653 continue
17654 }
17655 y3 := o3.Args[1]
17656 if y3.Op != OpARM64MOVDnop {
17657 continue
17658 }
17659 x3 := y3.Args[0]
17660 if x3.Op != OpARM64MOVBUload {
17661 continue
17662 }
17663 i3 := auxIntToInt32(x3.AuxInt)
17664 if auxToSym(x3.Aux) != s {
17665 continue
17666 }
17667 _ = x3.Args[1]
17668 if p != x3.Args[0] || mem != x3.Args[1] {
17669 continue
17670 }
17671 y4 := o2.Args[1]
17672 if y4.Op != OpARM64MOVDnop {
17673 continue
17674 }
17675 x4 := y4.Args[0]
17676 if x4.Op != OpARM64MOVBUload {
17677 continue
17678 }
17679 i4 := auxIntToInt32(x4.AuxInt)
17680 if auxToSym(x4.Aux) != s {
17681 continue
17682 }
17683 _ = x4.Args[1]
17684 if p != x4.Args[0] || mem != x4.Args[1] {
17685 continue
17686 }
17687 y5 := o1.Args[1]
17688 if y5.Op != OpARM64MOVDnop {
17689 continue
17690 }
17691 x5 := y5.Args[0]
17692 if x5.Op != OpARM64MOVBUload {
17693 continue
17694 }
17695 i5 := auxIntToInt32(x5.AuxInt)
17696 if auxToSym(x5.Aux) != s {
17697 continue
17698 }
17699 _ = x5.Args[1]
17700 if p != x5.Args[0] || mem != x5.Args[1] {
17701 continue
17702 }
17703 y6 := o0.Args[1]
17704 if y6.Op != OpARM64MOVDnop {
17705 continue
17706 }
17707 x6 := y6.Args[0]
17708 if x6.Op != OpARM64MOVBUload {
17709 continue
17710 }
17711 i6 := auxIntToInt32(x6.AuxInt)
17712 if auxToSym(x6.Aux) != s {
17713 continue
17714 }
17715 _ = x6.Args[1]
17716 if p != x6.Args[0] || mem != x6.Args[1] {
17717 continue
17718 }
17719 y7 := v_1
17720 if y7.Op != OpARM64MOVDnop {
17721 continue
17722 }
17723 x7 := y7.Args[0]
17724 if x7.Op != OpARM64MOVBUload {
17725 continue
17726 }
17727 i7 := auxIntToInt32(x7.AuxInt)
17728 if auxToSym(x7.Aux) != s {
17729 continue
17730 }
17731 _ = x7.Args[1]
17732 if p != x7.Args[0] || mem != x7.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
17733 continue
17734 }
17735 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
17736 v0 := b.NewValue0(x7.Pos, OpARM64REV, t)
17737 v.copyOf(v0)
17738 v1 := b.NewValue0(x7.Pos, OpARM64MOVDload, t)
17739 v1.Aux = symToAux(s)
17740 v2 := b.NewValue0(x7.Pos, OpOffPtr, p.Type)
17741 v2.AuxInt = int64ToAuxInt(int64(i0))
17742 v2.AddArg(p)
17743 v1.AddArg2(v2, mem)
17744 v0.AddArg(v1)
17745 return true
17746 }
17747 break
17748 }
17749
17750
17751
17752 for {
17753 t := v.Type
17754 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17755 o0 := v_0
17756 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17757 continue
17758 }
17759 _ = o0.Args[1]
17760 o1 := o0.Args[0]
17761 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17762 continue
17763 }
17764 _ = o1.Args[1]
17765 o2 := o1.Args[0]
17766 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
17767 continue
17768 }
17769 _ = o2.Args[1]
17770 o3 := o2.Args[0]
17771 if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
17772 continue
17773 }
17774 _ = o3.Args[1]
17775 o4 := o3.Args[0]
17776 if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
17777 continue
17778 }
17779 _ = o4.Args[1]
17780 o5 := o4.Args[0]
17781 if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
17782 continue
17783 }
17784 _ = o5.Args[1]
17785 s0 := o5.Args[0]
17786 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
17787 continue
17788 }
17789 y0 := s0.Args[0]
17790 if y0.Op != OpARM64MOVDnop {
17791 continue
17792 }
17793 x0 := y0.Args[0]
17794 if x0.Op != OpARM64MOVBUloadidx {
17795 continue
17796 }
17797 mem := x0.Args[2]
17798 ptr0 := x0.Args[0]
17799 idx0 := x0.Args[1]
17800 y1 := o5.Args[1]
17801 if y1.Op != OpARM64MOVDnop {
17802 continue
17803 }
17804 x1 := y1.Args[0]
17805 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 {
17806 continue
17807 }
17808 s := auxToSym(x1.Aux)
17809 _ = x1.Args[1]
17810 p1 := x1.Args[0]
17811 if p1.Op != OpARM64ADD {
17812 continue
17813 }
17814 _ = p1.Args[1]
17815 p1_0 := p1.Args[0]
17816 p1_1 := p1.Args[1]
17817 for _i1 := 0; _i1 <= 1; _i1, p1_0, p1_1 = _i1+1, p1_1, p1_0 {
17818 ptr1 := p1_0
17819 idx1 := p1_1
17820 if mem != x1.Args[1] {
17821 continue
17822 }
17823 y2 := o4.Args[1]
17824 if y2.Op != OpARM64MOVDnop {
17825 continue
17826 }
17827 x2 := y2.Args[0]
17828 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 2 || auxToSym(x2.Aux) != s {
17829 continue
17830 }
17831 _ = x2.Args[1]
17832 p := x2.Args[0]
17833 if mem != x2.Args[1] {
17834 continue
17835 }
17836 y3 := o3.Args[1]
17837 if y3.Op != OpARM64MOVDnop {
17838 continue
17839 }
17840 x3 := y3.Args[0]
17841 if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
17842 continue
17843 }
17844 _ = x3.Args[1]
17845 if p != x3.Args[0] || mem != x3.Args[1] {
17846 continue
17847 }
17848 y4 := o2.Args[1]
17849 if y4.Op != OpARM64MOVDnop {
17850 continue
17851 }
17852 x4 := y4.Args[0]
17853 if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 4 || auxToSym(x4.Aux) != s {
17854 continue
17855 }
17856 _ = x4.Args[1]
17857 if p != x4.Args[0] || mem != x4.Args[1] {
17858 continue
17859 }
17860 y5 := o1.Args[1]
17861 if y5.Op != OpARM64MOVDnop {
17862 continue
17863 }
17864 x5 := y5.Args[0]
17865 if x5.Op != OpARM64MOVBUload || auxIntToInt32(x5.AuxInt) != 5 || auxToSym(x5.Aux) != s {
17866 continue
17867 }
17868 _ = x5.Args[1]
17869 if p != x5.Args[0] || mem != x5.Args[1] {
17870 continue
17871 }
17872 y6 := o0.Args[1]
17873 if y6.Op != OpARM64MOVDnop {
17874 continue
17875 }
17876 x6 := y6.Args[0]
17877 if x6.Op != OpARM64MOVBUload || auxIntToInt32(x6.AuxInt) != 6 || auxToSym(x6.Aux) != s {
17878 continue
17879 }
17880 _ = x6.Args[1]
17881 if p != x6.Args[0] || mem != x6.Args[1] {
17882 continue
17883 }
17884 y7 := v_1
17885 if y7.Op != OpARM64MOVDnop {
17886 continue
17887 }
17888 x7 := y7.Args[0]
17889 if x7.Op != OpARM64MOVBUload || auxIntToInt32(x7.AuxInt) != 7 || auxToSym(x7.Aux) != s {
17890 continue
17891 }
17892 _ = x7.Args[1]
17893 if p != x7.Args[0] || mem != x7.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
17894 continue
17895 }
17896 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
17897 v0 := b.NewValue0(x7.Pos, OpARM64REV, t)
17898 v.copyOf(v0)
17899 v1 := b.NewValue0(x7.Pos, OpARM64MOVDloadidx, t)
17900 v1.AddArg3(ptr0, idx0, mem)
17901 v0.AddArg(v1)
17902 return true
17903 }
17904 }
17905 break
17906 }
17907
17908
17909
17910 for {
17911 t := v.Type
17912 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17913 o0 := v_0
17914 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 8 {
17915 continue
17916 }
17917 _ = o0.Args[1]
17918 o1 := o0.Args[0]
17919 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 16 {
17920 continue
17921 }
17922 _ = o1.Args[1]
17923 o2 := o1.Args[0]
17924 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 24 {
17925 continue
17926 }
17927 _ = o2.Args[1]
17928 o3 := o2.Args[0]
17929 if o3.Op != OpARM64ORshiftLL || auxIntToInt64(o3.AuxInt) != 32 {
17930 continue
17931 }
17932 _ = o3.Args[1]
17933 o4 := o3.Args[0]
17934 if o4.Op != OpARM64ORshiftLL || auxIntToInt64(o4.AuxInt) != 40 {
17935 continue
17936 }
17937 _ = o4.Args[1]
17938 o5 := o4.Args[0]
17939 if o5.Op != OpARM64ORshiftLL || auxIntToInt64(o5.AuxInt) != 48 {
17940 continue
17941 }
17942 _ = o5.Args[1]
17943 s0 := o5.Args[0]
17944 if s0.Op != OpARM64SLLconst || auxIntToInt64(s0.AuxInt) != 56 {
17945 continue
17946 }
17947 y0 := s0.Args[0]
17948 if y0.Op != OpARM64MOVDnop {
17949 continue
17950 }
17951 x0 := y0.Args[0]
17952 if x0.Op != OpARM64MOVBUloadidx {
17953 continue
17954 }
17955 mem := x0.Args[2]
17956 ptr := x0.Args[0]
17957 idx := x0.Args[1]
17958 y1 := o5.Args[1]
17959 if y1.Op != OpARM64MOVDnop {
17960 continue
17961 }
17962 x1 := y1.Args[0]
17963 if x1.Op != OpARM64MOVBUloadidx {
17964 continue
17965 }
17966 _ = x1.Args[2]
17967 if ptr != x1.Args[0] {
17968 continue
17969 }
17970 x1_1 := x1.Args[1]
17971 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] {
17972 continue
17973 }
17974 y2 := o4.Args[1]
17975 if y2.Op != OpARM64MOVDnop {
17976 continue
17977 }
17978 x2 := y2.Args[0]
17979 if x2.Op != OpARM64MOVBUloadidx {
17980 continue
17981 }
17982 _ = x2.Args[2]
17983 if ptr != x2.Args[0] {
17984 continue
17985 }
17986 x2_1 := x2.Args[1]
17987 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] {
17988 continue
17989 }
17990 y3 := o3.Args[1]
17991 if y3.Op != OpARM64MOVDnop {
17992 continue
17993 }
17994 x3 := y3.Args[0]
17995 if x3.Op != OpARM64MOVBUloadidx {
17996 continue
17997 }
17998 _ = x3.Args[2]
17999 if ptr != x3.Args[0] {
18000 continue
18001 }
18002 x3_1 := x3.Args[1]
18003 if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] {
18004 continue
18005 }
18006 y4 := o2.Args[1]
18007 if y4.Op != OpARM64MOVDnop {
18008 continue
18009 }
18010 x4 := y4.Args[0]
18011 if x4.Op != OpARM64MOVBUloadidx {
18012 continue
18013 }
18014 _ = x4.Args[2]
18015 if ptr != x4.Args[0] {
18016 continue
18017 }
18018 x4_1 := x4.Args[1]
18019 if x4_1.Op != OpARM64ADDconst || auxIntToInt64(x4_1.AuxInt) != 4 || idx != x4_1.Args[0] || mem != x4.Args[2] {
18020 continue
18021 }
18022 y5 := o1.Args[1]
18023 if y5.Op != OpARM64MOVDnop {
18024 continue
18025 }
18026 x5 := y5.Args[0]
18027 if x5.Op != OpARM64MOVBUloadidx {
18028 continue
18029 }
18030 _ = x5.Args[2]
18031 if ptr != x5.Args[0] {
18032 continue
18033 }
18034 x5_1 := x5.Args[1]
18035 if x5_1.Op != OpARM64ADDconst || auxIntToInt64(x5_1.AuxInt) != 5 || idx != x5_1.Args[0] || mem != x5.Args[2] {
18036 continue
18037 }
18038 y6 := o0.Args[1]
18039 if y6.Op != OpARM64MOVDnop {
18040 continue
18041 }
18042 x6 := y6.Args[0]
18043 if x6.Op != OpARM64MOVBUloadidx {
18044 continue
18045 }
18046 _ = x6.Args[2]
18047 if ptr != x6.Args[0] {
18048 continue
18049 }
18050 x6_1 := x6.Args[1]
18051 if x6_1.Op != OpARM64ADDconst || auxIntToInt64(x6_1.AuxInt) != 6 || idx != x6_1.Args[0] || mem != x6.Args[2] {
18052 continue
18053 }
18054 y7 := v_1
18055 if y7.Op != OpARM64MOVDnop {
18056 continue
18057 }
18058 x7 := y7.Args[0]
18059 if x7.Op != OpARM64MOVBUloadidx {
18060 continue
18061 }
18062 _ = x7.Args[2]
18063 if ptr != x7.Args[0] {
18064 continue
18065 }
18066 x7_1 := x7.Args[1]
18067 if x7_1.Op != OpARM64ADDconst || auxIntToInt64(x7_1.AuxInt) != 7 || idx != x7_1.Args[0] || mem != x7.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0, x1, x2, x3, x4, x5, x6, x7, y0, y1, y2, y3, y4, y5, y6, y7, o0, o1, o2, o3, o4, o5, s0)) {
18068 continue
18069 }
18070 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
18071 v0 := b.NewValue0(v.Pos, OpARM64REV, t)
18072 v.copyOf(v0)
18073 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
18074 v1.AddArg3(ptr, idx, mem)
18075 v0.AddArg(v1)
18076 return true
18077 }
18078 break
18079 }
18080 return false
18081 }
18082 func rewriteValueARM64_OpARM64ORN(v *Value) bool {
18083 v_1 := v.Args[1]
18084 v_0 := v.Args[0]
18085
18086
18087 for {
18088 x := v_0
18089 if v_1.Op != OpARM64MOVDconst {
18090 break
18091 }
18092 c := auxIntToInt64(v_1.AuxInt)
18093 v.reset(OpARM64ORconst)
18094 v.AuxInt = int64ToAuxInt(^c)
18095 v.AddArg(x)
18096 return true
18097 }
18098
18099
18100 for {
18101 x := v_0
18102 if x != v_1 {
18103 break
18104 }
18105 v.reset(OpARM64MOVDconst)
18106 v.AuxInt = int64ToAuxInt(-1)
18107 return true
18108 }
18109
18110
18111
18112 for {
18113 x0 := v_0
18114 x1 := v_1
18115 if x1.Op != OpARM64SLLconst {
18116 break
18117 }
18118 c := auxIntToInt64(x1.AuxInt)
18119 y := x1.Args[0]
18120 if !(clobberIfDead(x1)) {
18121 break
18122 }
18123 v.reset(OpARM64ORNshiftLL)
18124 v.AuxInt = int64ToAuxInt(c)
18125 v.AddArg2(x0, y)
18126 return true
18127 }
18128
18129
18130
18131 for {
18132 x0 := v_0
18133 x1 := v_1
18134 if x1.Op != OpARM64SRLconst {
18135 break
18136 }
18137 c := auxIntToInt64(x1.AuxInt)
18138 y := x1.Args[0]
18139 if !(clobberIfDead(x1)) {
18140 break
18141 }
18142 v.reset(OpARM64ORNshiftRL)
18143 v.AuxInt = int64ToAuxInt(c)
18144 v.AddArg2(x0, y)
18145 return true
18146 }
18147
18148
18149
18150 for {
18151 x0 := v_0
18152 x1 := v_1
18153 if x1.Op != OpARM64SRAconst {
18154 break
18155 }
18156 c := auxIntToInt64(x1.AuxInt)
18157 y := x1.Args[0]
18158 if !(clobberIfDead(x1)) {
18159 break
18160 }
18161 v.reset(OpARM64ORNshiftRA)
18162 v.AuxInt = int64ToAuxInt(c)
18163 v.AddArg2(x0, y)
18164 return true
18165 }
18166
18167
18168
18169 for {
18170 x0 := v_0
18171 x1 := v_1
18172 if x1.Op != OpARM64RORconst {
18173 break
18174 }
18175 c := auxIntToInt64(x1.AuxInt)
18176 y := x1.Args[0]
18177 if !(clobberIfDead(x1)) {
18178 break
18179 }
18180 v.reset(OpARM64ORNshiftRO)
18181 v.AuxInt = int64ToAuxInt(c)
18182 v.AddArg2(x0, y)
18183 return true
18184 }
18185 return false
18186 }
18187 func rewriteValueARM64_OpARM64ORNshiftLL(v *Value) bool {
18188 v_1 := v.Args[1]
18189 v_0 := v.Args[0]
18190
18191
18192 for {
18193 d := auxIntToInt64(v.AuxInt)
18194 x := v_0
18195 if v_1.Op != OpARM64MOVDconst {
18196 break
18197 }
18198 c := auxIntToInt64(v_1.AuxInt)
18199 v.reset(OpARM64ORconst)
18200 v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
18201 v.AddArg(x)
18202 return true
18203 }
18204
18205
18206 for {
18207 c := auxIntToInt64(v.AuxInt)
18208 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
18209 break
18210 }
18211 x := v_0.Args[0]
18212 if x != v_1 {
18213 break
18214 }
18215 v.reset(OpARM64MOVDconst)
18216 v.AuxInt = int64ToAuxInt(-1)
18217 return true
18218 }
18219 return false
18220 }
18221 func rewriteValueARM64_OpARM64ORNshiftRA(v *Value) bool {
18222 v_1 := v.Args[1]
18223 v_0 := v.Args[0]
18224
18225
18226 for {
18227 d := auxIntToInt64(v.AuxInt)
18228 x := v_0
18229 if v_1.Op != OpARM64MOVDconst {
18230 break
18231 }
18232 c := auxIntToInt64(v_1.AuxInt)
18233 v.reset(OpARM64ORconst)
18234 v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
18235 v.AddArg(x)
18236 return true
18237 }
18238
18239
18240 for {
18241 c := auxIntToInt64(v.AuxInt)
18242 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
18243 break
18244 }
18245 x := v_0.Args[0]
18246 if x != v_1 {
18247 break
18248 }
18249 v.reset(OpARM64MOVDconst)
18250 v.AuxInt = int64ToAuxInt(-1)
18251 return true
18252 }
18253 return false
18254 }
18255 func rewriteValueARM64_OpARM64ORNshiftRL(v *Value) bool {
18256 v_1 := v.Args[1]
18257 v_0 := v.Args[0]
18258
18259
18260 for {
18261 d := auxIntToInt64(v.AuxInt)
18262 x := v_0
18263 if v_1.Op != OpARM64MOVDconst {
18264 break
18265 }
18266 c := auxIntToInt64(v_1.AuxInt)
18267 v.reset(OpARM64ORconst)
18268 v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
18269 v.AddArg(x)
18270 return true
18271 }
18272
18273
18274 for {
18275 c := auxIntToInt64(v.AuxInt)
18276 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
18277 break
18278 }
18279 x := v_0.Args[0]
18280 if x != v_1 {
18281 break
18282 }
18283 v.reset(OpARM64MOVDconst)
18284 v.AuxInt = int64ToAuxInt(-1)
18285 return true
18286 }
18287 return false
18288 }
18289 func rewriteValueARM64_OpARM64ORNshiftRO(v *Value) bool {
18290 v_1 := v.Args[1]
18291 v_0 := v.Args[0]
18292
18293
18294 for {
18295 d := auxIntToInt64(v.AuxInt)
18296 x := v_0
18297 if v_1.Op != OpARM64MOVDconst {
18298 break
18299 }
18300 c := auxIntToInt64(v_1.AuxInt)
18301 v.reset(OpARM64ORconst)
18302 v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
18303 v.AddArg(x)
18304 return true
18305 }
18306
18307
18308 for {
18309 c := auxIntToInt64(v.AuxInt)
18310 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
18311 break
18312 }
18313 x := v_0.Args[0]
18314 if x != v_1 {
18315 break
18316 }
18317 v.reset(OpARM64MOVDconst)
18318 v.AuxInt = int64ToAuxInt(-1)
18319 return true
18320 }
18321 return false
18322 }
18323 func rewriteValueARM64_OpARM64ORconst(v *Value) bool {
18324 v_0 := v.Args[0]
18325
18326
18327 for {
18328 if auxIntToInt64(v.AuxInt) != 0 {
18329 break
18330 }
18331 x := v_0
18332 v.copyOf(x)
18333 return true
18334 }
18335
18336
18337 for {
18338 if auxIntToInt64(v.AuxInt) != -1 {
18339 break
18340 }
18341 v.reset(OpARM64MOVDconst)
18342 v.AuxInt = int64ToAuxInt(-1)
18343 return true
18344 }
18345
18346
18347 for {
18348 c := auxIntToInt64(v.AuxInt)
18349 if v_0.Op != OpARM64MOVDconst {
18350 break
18351 }
18352 d := auxIntToInt64(v_0.AuxInt)
18353 v.reset(OpARM64MOVDconst)
18354 v.AuxInt = int64ToAuxInt(c | d)
18355 return true
18356 }
18357
18358
18359 for {
18360 c := auxIntToInt64(v.AuxInt)
18361 if v_0.Op != OpARM64ORconst {
18362 break
18363 }
18364 d := auxIntToInt64(v_0.AuxInt)
18365 x := v_0.Args[0]
18366 v.reset(OpARM64ORconst)
18367 v.AuxInt = int64ToAuxInt(c | d)
18368 v.AddArg(x)
18369 return true
18370 }
18371
18372
18373
18374 for {
18375 c1 := auxIntToInt64(v.AuxInt)
18376 if v_0.Op != OpARM64ANDconst {
18377 break
18378 }
18379 c2 := auxIntToInt64(v_0.AuxInt)
18380 x := v_0.Args[0]
18381 if !(c2|c1 == ^0) {
18382 break
18383 }
18384 v.reset(OpARM64ORconst)
18385 v.AuxInt = int64ToAuxInt(c1)
18386 v.AddArg(x)
18387 return true
18388 }
18389 return false
18390 }
18391 func rewriteValueARM64_OpARM64ORshiftLL(v *Value) bool {
18392 v_1 := v.Args[1]
18393 v_0 := v.Args[0]
18394 b := v.Block
18395 typ := &b.Func.Config.Types
18396
18397
18398 for {
18399 d := auxIntToInt64(v.AuxInt)
18400 if v_0.Op != OpARM64MOVDconst {
18401 break
18402 }
18403 c := auxIntToInt64(v_0.AuxInt)
18404 x := v_1
18405 v.reset(OpARM64ORconst)
18406 v.AuxInt = int64ToAuxInt(c)
18407 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
18408 v0.AuxInt = int64ToAuxInt(d)
18409 v0.AddArg(x)
18410 v.AddArg(v0)
18411 return true
18412 }
18413
18414
18415 for {
18416 d := auxIntToInt64(v.AuxInt)
18417 x := v_0
18418 if v_1.Op != OpARM64MOVDconst {
18419 break
18420 }
18421 c := auxIntToInt64(v_1.AuxInt)
18422 v.reset(OpARM64ORconst)
18423 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
18424 v.AddArg(x)
18425 return true
18426 }
18427
18428
18429 for {
18430 c := auxIntToInt64(v.AuxInt)
18431 y := v_0
18432 if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
18433 break
18434 }
18435 x := y.Args[0]
18436 if x != v_1 {
18437 break
18438 }
18439 v.copyOf(y)
18440 return true
18441 }
18442
18443
18444 for {
18445 c := auxIntToInt64(v.AuxInt)
18446 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
18447 break
18448 }
18449 x := v_0.Args[0]
18450 if x != v_1 {
18451 break
18452 }
18453 v.reset(OpARM64RORconst)
18454 v.AuxInt = int64ToAuxInt(64 - c)
18455 v.AddArg(x)
18456 return true
18457 }
18458
18459
18460
18461 for {
18462 t := v.Type
18463 c := auxIntToInt64(v.AuxInt)
18464 if v_0.Op != OpARM64UBFX {
18465 break
18466 }
18467 bfc := auxIntToArm64BitField(v_0.AuxInt)
18468 x := v_0.Args[0]
18469 if x != v_1 || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
18470 break
18471 }
18472 v.reset(OpARM64RORWconst)
18473 v.AuxInt = int64ToAuxInt(32 - c)
18474 v.AddArg(x)
18475 return true
18476 }
18477
18478
18479 for {
18480 if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
18481 break
18482 }
18483 x := v_0.Args[0]
18484 if x != v_1 {
18485 break
18486 }
18487 v.reset(OpARM64REV16W)
18488 v.AddArg(x)
18489 return true
18490 }
18491
18492
18493
18494 for {
18495 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
18496 break
18497 }
18498 v_0_0 := v_0.Args[0]
18499 if v_0_0.Op != OpARM64ANDconst {
18500 break
18501 }
18502 c1 := auxIntToInt64(v_0_0.AuxInt)
18503 x := v_0_0.Args[0]
18504 if v_1.Op != OpARM64ANDconst {
18505 break
18506 }
18507 c2 := auxIntToInt64(v_1.AuxInt)
18508 if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
18509 break
18510 }
18511 v.reset(OpARM64REV16W)
18512 v.AddArg(x)
18513 return true
18514 }
18515
18516
18517
18518 for {
18519 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
18520 break
18521 }
18522 v_0_0 := v_0.Args[0]
18523 if v_0_0.Op != OpARM64ANDconst {
18524 break
18525 }
18526 c1 := auxIntToInt64(v_0_0.AuxInt)
18527 x := v_0_0.Args[0]
18528 if v_1.Op != OpARM64ANDconst {
18529 break
18530 }
18531 c2 := auxIntToInt64(v_1.AuxInt)
18532 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
18533 break
18534 }
18535 v.reset(OpARM64REV16)
18536 v.AddArg(x)
18537 return true
18538 }
18539
18540
18541
18542 for {
18543 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
18544 break
18545 }
18546 v_0_0 := v_0.Args[0]
18547 if v_0_0.Op != OpARM64ANDconst {
18548 break
18549 }
18550 c1 := auxIntToInt64(v_0_0.AuxInt)
18551 x := v_0_0.Args[0]
18552 if v_1.Op != OpARM64ANDconst {
18553 break
18554 }
18555 c2 := auxIntToInt64(v_1.AuxInt)
18556 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
18557 break
18558 }
18559 v.reset(OpARM64REV16)
18560 v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
18561 v0.AuxInt = int64ToAuxInt(0xffffffff)
18562 v0.AddArg(x)
18563 v.AddArg(v0)
18564 return true
18565 }
18566
18567
18568 for {
18569 c := auxIntToInt64(v.AuxInt)
18570 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
18571 break
18572 }
18573 x := v_0.Args[0]
18574 x2 := v_1
18575 v.reset(OpARM64EXTRconst)
18576 v.AuxInt = int64ToAuxInt(64 - c)
18577 v.AddArg2(x2, x)
18578 return true
18579 }
18580
18581
18582
18583 for {
18584 t := v.Type
18585 c := auxIntToInt64(v.AuxInt)
18586 if v_0.Op != OpARM64UBFX {
18587 break
18588 }
18589 bfc := auxIntToArm64BitField(v_0.AuxInt)
18590 x := v_0.Args[0]
18591 x2 := v_1
18592 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
18593 break
18594 }
18595 v.reset(OpARM64EXTRWconst)
18596 v.AuxInt = int64ToAuxInt(32 - c)
18597 v.AddArg2(x2, x)
18598 return true
18599 }
18600
18601
18602
18603 for {
18604 sc := auxIntToInt64(v.AuxInt)
18605 if v_0.Op != OpARM64UBFX {
18606 break
18607 }
18608 bfc := auxIntToArm64BitField(v_0.AuxInt)
18609 x := v_0.Args[0]
18610 if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != sc {
18611 break
18612 }
18613 y := v_1.Args[0]
18614 if !(sc == bfc.getARM64BFwidth()) {
18615 break
18616 }
18617 v.reset(OpARM64BFXIL)
18618 v.AuxInt = arm64BitFieldToAuxInt(bfc)
18619 v.AddArg2(y, x)
18620 return true
18621 }
18622
18623
18624
18625 for {
18626 t := v.Type
18627 if auxIntToInt64(v.AuxInt) != 8 {
18628 break
18629 }
18630 y0 := v_0
18631 if y0.Op != OpARM64MOVDnop {
18632 break
18633 }
18634 x0 := y0.Args[0]
18635 if x0.Op != OpARM64MOVBUload {
18636 break
18637 }
18638 i0 := auxIntToInt32(x0.AuxInt)
18639 s := auxToSym(x0.Aux)
18640 mem := x0.Args[1]
18641 p := x0.Args[0]
18642 y1 := v_1
18643 if y1.Op != OpARM64MOVDnop {
18644 break
18645 }
18646 x1 := y1.Args[0]
18647 if x1.Op != OpARM64MOVBUload {
18648 break
18649 }
18650 i1 := auxIntToInt32(x1.AuxInt)
18651 if auxToSym(x1.Aux) != s {
18652 break
18653 }
18654 _ = x1.Args[1]
18655 if p != x1.Args[0] || mem != x1.Args[1] || !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
18656 break
18657 }
18658 b = mergePoint(b, x0, x1)
18659 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t)
18660 v.copyOf(v0)
18661 v0.Aux = symToAux(s)
18662 v1 := b.NewValue0(x1.Pos, OpOffPtr, p.Type)
18663 v1.AuxInt = int64ToAuxInt(int64(i0))
18664 v1.AddArg(p)
18665 v0.AddArg2(v1, mem)
18666 return true
18667 }
18668
18669
18670
18671 for {
18672 t := v.Type
18673 if auxIntToInt64(v.AuxInt) != 8 {
18674 break
18675 }
18676 y0 := v_0
18677 if y0.Op != OpARM64MOVDnop {
18678 break
18679 }
18680 x0 := y0.Args[0]
18681 if x0.Op != OpARM64MOVBUloadidx {
18682 break
18683 }
18684 mem := x0.Args[2]
18685 ptr0 := x0.Args[0]
18686 idx0 := x0.Args[1]
18687 y1 := v_1
18688 if y1.Op != OpARM64MOVDnop {
18689 break
18690 }
18691 x1 := y1.Args[0]
18692 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 {
18693 break
18694 }
18695 s := auxToSym(x1.Aux)
18696 _ = x1.Args[1]
18697 p1 := x1.Args[0]
18698 if p1.Op != OpARM64ADD {
18699 break
18700 }
18701 _ = p1.Args[1]
18702 p1_0 := p1.Args[0]
18703 p1_1 := p1.Args[1]
18704 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
18705 ptr1 := p1_0
18706 idx1 := p1_1
18707 if mem != x1.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0, x1, y0, y1)) {
18708 continue
18709 }
18710 b = mergePoint(b, x0, x1)
18711 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUloadidx, t)
18712 v.copyOf(v0)
18713 v0.AddArg3(ptr0, idx0, mem)
18714 return true
18715 }
18716 break
18717 }
18718
18719
18720
18721 for {
18722 t := v.Type
18723 if auxIntToInt64(v.AuxInt) != 8 {
18724 break
18725 }
18726 y0 := v_0
18727 if y0.Op != OpARM64MOVDnop {
18728 break
18729 }
18730 x0 := y0.Args[0]
18731 if x0.Op != OpARM64MOVBUloadidx {
18732 break
18733 }
18734 mem := x0.Args[2]
18735 ptr := x0.Args[0]
18736 idx := x0.Args[1]
18737 y1 := v_1
18738 if y1.Op != OpARM64MOVDnop {
18739 break
18740 }
18741 x1 := y1.Args[0]
18742 if x1.Op != OpARM64MOVBUloadidx {
18743 break
18744 }
18745 _ = x1.Args[2]
18746 if ptr != x1.Args[0] {
18747 break
18748 }
18749 x1_1 := x1.Args[1]
18750 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
18751 break
18752 }
18753 b = mergePoint(b, x0, x1)
18754 v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t)
18755 v.copyOf(v0)
18756 v0.AddArg3(ptr, idx, mem)
18757 return true
18758 }
18759
18760
18761
18762 for {
18763 t := v.Type
18764 if auxIntToInt64(v.AuxInt) != 24 {
18765 break
18766 }
18767 o0 := v_0
18768 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
18769 break
18770 }
18771 _ = o0.Args[1]
18772 x0 := o0.Args[0]
18773 if x0.Op != OpARM64MOVHUload {
18774 break
18775 }
18776 i0 := auxIntToInt32(x0.AuxInt)
18777 s := auxToSym(x0.Aux)
18778 mem := x0.Args[1]
18779 p := x0.Args[0]
18780 y1 := o0.Args[1]
18781 if y1.Op != OpARM64MOVDnop {
18782 break
18783 }
18784 x1 := y1.Args[0]
18785 if x1.Op != OpARM64MOVBUload {
18786 break
18787 }
18788 i2 := auxIntToInt32(x1.AuxInt)
18789 if auxToSym(x1.Aux) != s {
18790 break
18791 }
18792 _ = x1.Args[1]
18793 if p != x1.Args[0] || mem != x1.Args[1] {
18794 break
18795 }
18796 y2 := v_1
18797 if y2.Op != OpARM64MOVDnop {
18798 break
18799 }
18800 x2 := y2.Args[0]
18801 if x2.Op != OpARM64MOVBUload {
18802 break
18803 }
18804 i3 := auxIntToInt32(x2.AuxInt)
18805 if auxToSym(x2.Aux) != s {
18806 break
18807 }
18808 _ = x2.Args[1]
18809 if p != x2.Args[0] || mem != x2.Args[1] || !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y1, y2, o0)) {
18810 break
18811 }
18812 b = mergePoint(b, x0, x1, x2)
18813 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
18814 v.copyOf(v0)
18815 v0.Aux = symToAux(s)
18816 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
18817 v1.AuxInt = int64ToAuxInt(int64(i0))
18818 v1.AddArg(p)
18819 v0.AddArg2(v1, mem)
18820 return true
18821 }
18822
18823
18824
18825 for {
18826 t := v.Type
18827 if auxIntToInt64(v.AuxInt) != 24 {
18828 break
18829 }
18830 o0 := v_0
18831 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
18832 break
18833 }
18834 _ = o0.Args[1]
18835 x0 := o0.Args[0]
18836 if x0.Op != OpARM64MOVHUloadidx {
18837 break
18838 }
18839 mem := x0.Args[2]
18840 ptr0 := x0.Args[0]
18841 idx0 := x0.Args[1]
18842 y1 := o0.Args[1]
18843 if y1.Op != OpARM64MOVDnop {
18844 break
18845 }
18846 x1 := y1.Args[0]
18847 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 2 {
18848 break
18849 }
18850 s := auxToSym(x1.Aux)
18851 _ = x1.Args[1]
18852 p1 := x1.Args[0]
18853 if p1.Op != OpARM64ADD {
18854 break
18855 }
18856 _ = p1.Args[1]
18857 p1_0 := p1.Args[0]
18858 p1_1 := p1.Args[1]
18859 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
18860 ptr1 := p1_0
18861 idx1 := p1_1
18862 if mem != x1.Args[1] {
18863 continue
18864 }
18865 y2 := v_1
18866 if y2.Op != OpARM64MOVDnop {
18867 continue
18868 }
18869 x2 := y2.Args[0]
18870 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 3 || auxToSym(x2.Aux) != s {
18871 continue
18872 }
18873 _ = x2.Args[1]
18874 p := x2.Args[0]
18875 if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, y1, y2, o0)) {
18876 continue
18877 }
18878 b = mergePoint(b, x0, x1, x2)
18879 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
18880 v.copyOf(v0)
18881 v0.AddArg3(ptr0, idx0, mem)
18882 return true
18883 }
18884 break
18885 }
18886
18887
18888
18889 for {
18890 t := v.Type
18891 if auxIntToInt64(v.AuxInt) != 24 {
18892 break
18893 }
18894 o0 := v_0
18895 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
18896 break
18897 }
18898 _ = o0.Args[1]
18899 x0 := o0.Args[0]
18900 if x0.Op != OpARM64MOVHUloadidx {
18901 break
18902 }
18903 mem := x0.Args[2]
18904 ptr := x0.Args[0]
18905 idx := x0.Args[1]
18906 y1 := o0.Args[1]
18907 if y1.Op != OpARM64MOVDnop {
18908 break
18909 }
18910 x1 := y1.Args[0]
18911 if x1.Op != OpARM64MOVBUloadidx {
18912 break
18913 }
18914 _ = x1.Args[2]
18915 if ptr != x1.Args[0] {
18916 break
18917 }
18918 x1_1 := x1.Args[1]
18919 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] {
18920 break
18921 }
18922 y2 := v_1
18923 if y2.Op != OpARM64MOVDnop {
18924 break
18925 }
18926 x2 := y2.Args[0]
18927 if x2.Op != OpARM64MOVBUloadidx {
18928 break
18929 }
18930 _ = x2.Args[2]
18931 if ptr != x2.Args[0] {
18932 break
18933 }
18934 x2_1 := x2.Args[1]
18935 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 3 || idx != x2_1.Args[0] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y1, y2, o0)) {
18936 break
18937 }
18938 b = mergePoint(b, x0, x1, x2)
18939 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
18940 v.copyOf(v0)
18941 v0.AddArg3(ptr, idx, mem)
18942 return true
18943 }
18944
18945
18946
18947 for {
18948 t := v.Type
18949 if auxIntToInt64(v.AuxInt) != 24 {
18950 break
18951 }
18952 o0 := v_0
18953 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
18954 break
18955 }
18956 _ = o0.Args[1]
18957 x0 := o0.Args[0]
18958 if x0.Op != OpARM64MOVHUloadidx2 {
18959 break
18960 }
18961 mem := x0.Args[2]
18962 ptr0 := x0.Args[0]
18963 idx0 := x0.Args[1]
18964 y1 := o0.Args[1]
18965 if y1.Op != OpARM64MOVDnop {
18966 break
18967 }
18968 x1 := y1.Args[0]
18969 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 2 {
18970 break
18971 }
18972 s := auxToSym(x1.Aux)
18973 _ = x1.Args[1]
18974 p1 := x1.Args[0]
18975 if p1.Op != OpARM64ADDshiftLL || auxIntToInt64(p1.AuxInt) != 1 {
18976 break
18977 }
18978 idx1 := p1.Args[1]
18979 ptr1 := p1.Args[0]
18980 if mem != x1.Args[1] {
18981 break
18982 }
18983 y2 := v_1
18984 if y2.Op != OpARM64MOVDnop {
18985 break
18986 }
18987 x2 := y2.Args[0]
18988 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 3 || auxToSym(x2.Aux) != s {
18989 break
18990 }
18991 _ = x2.Args[1]
18992 p := x2.Args[0]
18993 if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0, x1, x2, y1, y2, o0)) {
18994 break
18995 }
18996 b = mergePoint(b, x0, x1, x2)
18997 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
18998 v.copyOf(v0)
18999 v1 := b.NewValue0(x2.Pos, OpARM64SLLconst, idx0.Type)
19000 v1.AuxInt = int64ToAuxInt(1)
19001 v1.AddArg(idx0)
19002 v0.AddArg3(ptr0, v1, mem)
19003 return true
19004 }
19005
19006
19007
19008 for {
19009 t := v.Type
19010 if auxIntToInt64(v.AuxInt) != 56 {
19011 break
19012 }
19013 o0 := v_0
19014 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19015 break
19016 }
19017 _ = o0.Args[1]
19018 o1 := o0.Args[0]
19019 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19020 break
19021 }
19022 _ = o1.Args[1]
19023 o2 := o1.Args[0]
19024 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19025 break
19026 }
19027 _ = o2.Args[1]
19028 x0 := o2.Args[0]
19029 if x0.Op != OpARM64MOVWUload {
19030 break
19031 }
19032 i0 := auxIntToInt32(x0.AuxInt)
19033 s := auxToSym(x0.Aux)
19034 mem := x0.Args[1]
19035 p := x0.Args[0]
19036 y1 := o2.Args[1]
19037 if y1.Op != OpARM64MOVDnop {
19038 break
19039 }
19040 x1 := y1.Args[0]
19041 if x1.Op != OpARM64MOVBUload {
19042 break
19043 }
19044 i4 := auxIntToInt32(x1.AuxInt)
19045 if auxToSym(x1.Aux) != s {
19046 break
19047 }
19048 _ = x1.Args[1]
19049 if p != x1.Args[0] || mem != x1.Args[1] {
19050 break
19051 }
19052 y2 := o1.Args[1]
19053 if y2.Op != OpARM64MOVDnop {
19054 break
19055 }
19056 x2 := y2.Args[0]
19057 if x2.Op != OpARM64MOVBUload {
19058 break
19059 }
19060 i5 := auxIntToInt32(x2.AuxInt)
19061 if auxToSym(x2.Aux) != s {
19062 break
19063 }
19064 _ = x2.Args[1]
19065 if p != x2.Args[0] || mem != x2.Args[1] {
19066 break
19067 }
19068 y3 := o0.Args[1]
19069 if y3.Op != OpARM64MOVDnop {
19070 break
19071 }
19072 x3 := y3.Args[0]
19073 if x3.Op != OpARM64MOVBUload {
19074 break
19075 }
19076 i6 := auxIntToInt32(x3.AuxInt)
19077 if auxToSym(x3.Aux) != s {
19078 break
19079 }
19080 _ = x3.Args[1]
19081 if p != x3.Args[0] || mem != x3.Args[1] {
19082 break
19083 }
19084 y4 := v_1
19085 if y4.Op != OpARM64MOVDnop {
19086 break
19087 }
19088 x4 := y4.Args[0]
19089 if x4.Op != OpARM64MOVBUload {
19090 break
19091 }
19092 i7 := auxIntToInt32(x4.AuxInt)
19093 if auxToSym(x4.Aux) != s {
19094 break
19095 }
19096 _ = x4.Args[1]
19097 if p != x4.Args[0] || mem != x4.Args[1] || !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
19098 break
19099 }
19100 b = mergePoint(b, x0, x1, x2, x3, x4)
19101 v0 := b.NewValue0(x4.Pos, OpARM64MOVDload, t)
19102 v.copyOf(v0)
19103 v0.Aux = symToAux(s)
19104 v1 := b.NewValue0(x4.Pos, OpOffPtr, p.Type)
19105 v1.AuxInt = int64ToAuxInt(int64(i0))
19106 v1.AddArg(p)
19107 v0.AddArg2(v1, mem)
19108 return true
19109 }
19110
19111
19112
19113 for {
19114 t := v.Type
19115 if auxIntToInt64(v.AuxInt) != 56 {
19116 break
19117 }
19118 o0 := v_0
19119 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19120 break
19121 }
19122 _ = o0.Args[1]
19123 o1 := o0.Args[0]
19124 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19125 break
19126 }
19127 _ = o1.Args[1]
19128 o2 := o1.Args[0]
19129 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19130 break
19131 }
19132 _ = o2.Args[1]
19133 x0 := o2.Args[0]
19134 if x0.Op != OpARM64MOVWUloadidx {
19135 break
19136 }
19137 mem := x0.Args[2]
19138 ptr0 := x0.Args[0]
19139 idx0 := x0.Args[1]
19140 y1 := o2.Args[1]
19141 if y1.Op != OpARM64MOVDnop {
19142 break
19143 }
19144 x1 := y1.Args[0]
19145 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 4 {
19146 break
19147 }
19148 s := auxToSym(x1.Aux)
19149 _ = x1.Args[1]
19150 p1 := x1.Args[0]
19151 if p1.Op != OpARM64ADD {
19152 break
19153 }
19154 _ = p1.Args[1]
19155 p1_0 := p1.Args[0]
19156 p1_1 := p1.Args[1]
19157 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
19158 ptr1 := p1_0
19159 idx1 := p1_1
19160 if mem != x1.Args[1] {
19161 continue
19162 }
19163 y2 := o1.Args[1]
19164 if y2.Op != OpARM64MOVDnop {
19165 continue
19166 }
19167 x2 := y2.Args[0]
19168 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 5 || auxToSym(x2.Aux) != s {
19169 continue
19170 }
19171 _ = x2.Args[1]
19172 p := x2.Args[0]
19173 if mem != x2.Args[1] {
19174 continue
19175 }
19176 y3 := o0.Args[1]
19177 if y3.Op != OpARM64MOVDnop {
19178 continue
19179 }
19180 x3 := y3.Args[0]
19181 if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 6 || auxToSym(x3.Aux) != s {
19182 continue
19183 }
19184 _ = x3.Args[1]
19185 if p != x3.Args[0] || mem != x3.Args[1] {
19186 continue
19187 }
19188 y4 := v_1
19189 if y4.Op != OpARM64MOVDnop {
19190 continue
19191 }
19192 x4 := y4.Args[0]
19193 if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 7 || auxToSym(x4.Aux) != s {
19194 continue
19195 }
19196 _ = x4.Args[1]
19197 if p != x4.Args[0] || mem != x4.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
19198 continue
19199 }
19200 b = mergePoint(b, x0, x1, x2, x3, x4)
19201 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t)
19202 v.copyOf(v0)
19203 v0.AddArg3(ptr0, idx0, mem)
19204 return true
19205 }
19206 break
19207 }
19208
19209
19210
19211 for {
19212 t := v.Type
19213 if auxIntToInt64(v.AuxInt) != 56 {
19214 break
19215 }
19216 o0 := v_0
19217 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19218 break
19219 }
19220 _ = o0.Args[1]
19221 o1 := o0.Args[0]
19222 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19223 break
19224 }
19225 _ = o1.Args[1]
19226 o2 := o1.Args[0]
19227 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19228 break
19229 }
19230 _ = o2.Args[1]
19231 x0 := o2.Args[0]
19232 if x0.Op != OpARM64MOVWUloadidx4 {
19233 break
19234 }
19235 mem := x0.Args[2]
19236 ptr0 := x0.Args[0]
19237 idx0 := x0.Args[1]
19238 y1 := o2.Args[1]
19239 if y1.Op != OpARM64MOVDnop {
19240 break
19241 }
19242 x1 := y1.Args[0]
19243 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 4 {
19244 break
19245 }
19246 s := auxToSym(x1.Aux)
19247 _ = x1.Args[1]
19248 p1 := x1.Args[0]
19249 if p1.Op != OpARM64ADDshiftLL || auxIntToInt64(p1.AuxInt) != 2 {
19250 break
19251 }
19252 idx1 := p1.Args[1]
19253 ptr1 := p1.Args[0]
19254 if mem != x1.Args[1] {
19255 break
19256 }
19257 y2 := o1.Args[1]
19258 if y2.Op != OpARM64MOVDnop {
19259 break
19260 }
19261 x2 := y2.Args[0]
19262 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 5 || auxToSym(x2.Aux) != s {
19263 break
19264 }
19265 _ = x2.Args[1]
19266 p := x2.Args[0]
19267 if mem != x2.Args[1] {
19268 break
19269 }
19270 y3 := o0.Args[1]
19271 if y3.Op != OpARM64MOVDnop {
19272 break
19273 }
19274 x3 := y3.Args[0]
19275 if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 6 || auxToSym(x3.Aux) != s {
19276 break
19277 }
19278 _ = x3.Args[1]
19279 if p != x3.Args[0] || mem != x3.Args[1] {
19280 break
19281 }
19282 y4 := v_1
19283 if y4.Op != OpARM64MOVDnop {
19284 break
19285 }
19286 x4 := y4.Args[0]
19287 if x4.Op != OpARM64MOVBUload || auxIntToInt32(x4.AuxInt) != 7 || auxToSym(x4.Aux) != s {
19288 break
19289 }
19290 _ = x4.Args[1]
19291 if p != x4.Args[0] || mem != x4.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
19292 break
19293 }
19294 b = mergePoint(b, x0, x1, x2, x3, x4)
19295 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t)
19296 v.copyOf(v0)
19297 v1 := b.NewValue0(x4.Pos, OpARM64SLLconst, idx0.Type)
19298 v1.AuxInt = int64ToAuxInt(2)
19299 v1.AddArg(idx0)
19300 v0.AddArg3(ptr0, v1, mem)
19301 return true
19302 }
19303
19304
19305
19306 for {
19307 t := v.Type
19308 if auxIntToInt64(v.AuxInt) != 56 {
19309 break
19310 }
19311 o0 := v_0
19312 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19313 break
19314 }
19315 _ = o0.Args[1]
19316 o1 := o0.Args[0]
19317 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19318 break
19319 }
19320 _ = o1.Args[1]
19321 o2 := o1.Args[0]
19322 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19323 break
19324 }
19325 _ = o2.Args[1]
19326 x0 := o2.Args[0]
19327 if x0.Op != OpARM64MOVWUloadidx {
19328 break
19329 }
19330 mem := x0.Args[2]
19331 ptr := x0.Args[0]
19332 idx := x0.Args[1]
19333 y1 := o2.Args[1]
19334 if y1.Op != OpARM64MOVDnop {
19335 break
19336 }
19337 x1 := y1.Args[0]
19338 if x1.Op != OpARM64MOVBUloadidx {
19339 break
19340 }
19341 _ = x1.Args[2]
19342 if ptr != x1.Args[0] {
19343 break
19344 }
19345 x1_1 := x1.Args[1]
19346 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 4 || idx != x1_1.Args[0] || mem != x1.Args[2] {
19347 break
19348 }
19349 y2 := o1.Args[1]
19350 if y2.Op != OpARM64MOVDnop {
19351 break
19352 }
19353 x2 := y2.Args[0]
19354 if x2.Op != OpARM64MOVBUloadidx {
19355 break
19356 }
19357 _ = x2.Args[2]
19358 if ptr != x2.Args[0] {
19359 break
19360 }
19361 x2_1 := x2.Args[1]
19362 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] {
19363 break
19364 }
19365 y3 := o0.Args[1]
19366 if y3.Op != OpARM64MOVDnop {
19367 break
19368 }
19369 x3 := y3.Args[0]
19370 if x3.Op != OpARM64MOVBUloadidx {
19371 break
19372 }
19373 _ = x3.Args[2]
19374 if ptr != x3.Args[0] {
19375 break
19376 }
19377 x3_1 := x3.Args[1]
19378 if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 6 || idx != x3_1.Args[0] || mem != x3.Args[2] {
19379 break
19380 }
19381 y4 := v_1
19382 if y4.Op != OpARM64MOVDnop {
19383 break
19384 }
19385 x4 := y4.Args[0]
19386 if x4.Op != OpARM64MOVBUloadidx {
19387 break
19388 }
19389 _ = x4.Args[2]
19390 if ptr != x4.Args[0] {
19391 break
19392 }
19393 x4_1 := x4.Args[1]
19394 if x4_1.Op != OpARM64ADDconst || auxIntToInt64(x4_1.AuxInt) != 7 || idx != x4_1.Args[0] || mem != x4.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y1, y2, y3, y4, o0, o1, o2)) {
19395 break
19396 }
19397 b = mergePoint(b, x0, x1, x2, x3, x4)
19398 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
19399 v.copyOf(v0)
19400 v0.AddArg3(ptr, idx, mem)
19401 return true
19402 }
19403
19404
19405
19406 for {
19407 t := v.Type
19408 if auxIntToInt64(v.AuxInt) != 8 {
19409 break
19410 }
19411 y0 := v_0
19412 if y0.Op != OpARM64MOVDnop {
19413 break
19414 }
19415 x0 := y0.Args[0]
19416 if x0.Op != OpARM64MOVBUload {
19417 break
19418 }
19419 i1 := auxIntToInt32(x0.AuxInt)
19420 s := auxToSym(x0.Aux)
19421 mem := x0.Args[1]
19422 p := x0.Args[0]
19423 y1 := v_1
19424 if y1.Op != OpARM64MOVDnop {
19425 break
19426 }
19427 x1 := y1.Args[0]
19428 if x1.Op != OpARM64MOVBUload {
19429 break
19430 }
19431 i0 := auxIntToInt32(x1.AuxInt)
19432 if auxToSym(x1.Aux) != s {
19433 break
19434 }
19435 _ = x1.Args[1]
19436 if p != x1.Args[0] || mem != x1.Args[1] || !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
19437 break
19438 }
19439 b = mergePoint(b, x0, x1)
19440 v0 := b.NewValue0(x1.Pos, OpARM64REV16W, t)
19441 v.copyOf(v0)
19442 v1 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t)
19443 v1.AuxInt = int32ToAuxInt(i0)
19444 v1.Aux = symToAux(s)
19445 v1.AddArg2(p, mem)
19446 v0.AddArg(v1)
19447 return true
19448 }
19449
19450
19451
19452 for {
19453 t := v.Type
19454 if auxIntToInt64(v.AuxInt) != 8 {
19455 break
19456 }
19457 y0 := v_0
19458 if y0.Op != OpARM64MOVDnop {
19459 break
19460 }
19461 x0 := y0.Args[0]
19462 if x0.Op != OpARM64MOVBUload || auxIntToInt32(x0.AuxInt) != 1 {
19463 break
19464 }
19465 s := auxToSym(x0.Aux)
19466 mem := x0.Args[1]
19467 p1 := x0.Args[0]
19468 if p1.Op != OpARM64ADD {
19469 break
19470 }
19471 _ = p1.Args[1]
19472 p1_0 := p1.Args[0]
19473 p1_1 := p1.Args[1]
19474 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
19475 ptr1 := p1_0
19476 idx1 := p1_1
19477 y1 := v_1
19478 if y1.Op != OpARM64MOVDnop {
19479 continue
19480 }
19481 x1 := y1.Args[0]
19482 if x1.Op != OpARM64MOVBUloadidx {
19483 continue
19484 }
19485 _ = x1.Args[2]
19486 ptr0 := x1.Args[0]
19487 idx0 := x1.Args[1]
19488 if mem != x1.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0, x1, y0, y1)) {
19489 continue
19490 }
19491 b = mergePoint(b, x0, x1)
19492 v0 := b.NewValue0(x0.Pos, OpARM64REV16W, t)
19493 v.copyOf(v0)
19494 v1 := b.NewValue0(x0.Pos, OpARM64MOVHUloadidx, t)
19495 v1.AddArg3(ptr0, idx0, mem)
19496 v0.AddArg(v1)
19497 return true
19498 }
19499 break
19500 }
19501
19502
19503
19504 for {
19505 t := v.Type
19506 if auxIntToInt64(v.AuxInt) != 8 {
19507 break
19508 }
19509 y0 := v_0
19510 if y0.Op != OpARM64MOVDnop {
19511 break
19512 }
19513 x0 := y0.Args[0]
19514 if x0.Op != OpARM64MOVBUloadidx {
19515 break
19516 }
19517 mem := x0.Args[2]
19518 ptr := x0.Args[0]
19519 x0_1 := x0.Args[1]
19520 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 1 {
19521 break
19522 }
19523 idx := x0_1.Args[0]
19524 y1 := v_1
19525 if y1.Op != OpARM64MOVDnop {
19526 break
19527 }
19528 x1 := y1.Args[0]
19529 if x1.Op != OpARM64MOVBUloadidx {
19530 break
19531 }
19532 _ = x1.Args[2]
19533 if ptr != x1.Args[0] || idx != x1.Args[1] || mem != x1.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0, x1, y0, y1)) {
19534 break
19535 }
19536 b = mergePoint(b, x0, x1)
19537 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t)
19538 v.copyOf(v0)
19539 v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t)
19540 v1.AddArg3(ptr, idx, mem)
19541 v0.AddArg(v1)
19542 return true
19543 }
19544
19545
19546
19547 for {
19548 t := v.Type
19549 if auxIntToInt64(v.AuxInt) != 24 {
19550 break
19551 }
19552 o0 := v_0
19553 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
19554 break
19555 }
19556 _ = o0.Args[1]
19557 y0 := o0.Args[0]
19558 if y0.Op != OpARM64REV16W {
19559 break
19560 }
19561 x0 := y0.Args[0]
19562 if x0.Op != OpARM64MOVHUload {
19563 break
19564 }
19565 i2 := auxIntToInt32(x0.AuxInt)
19566 s := auxToSym(x0.Aux)
19567 mem := x0.Args[1]
19568 p := x0.Args[0]
19569 y1 := o0.Args[1]
19570 if y1.Op != OpARM64MOVDnop {
19571 break
19572 }
19573 x1 := y1.Args[0]
19574 if x1.Op != OpARM64MOVBUload {
19575 break
19576 }
19577 i1 := auxIntToInt32(x1.AuxInt)
19578 if auxToSym(x1.Aux) != s {
19579 break
19580 }
19581 _ = x1.Args[1]
19582 if p != x1.Args[0] || mem != x1.Args[1] {
19583 break
19584 }
19585 y2 := v_1
19586 if y2.Op != OpARM64MOVDnop {
19587 break
19588 }
19589 x2 := y2.Args[0]
19590 if x2.Op != OpARM64MOVBUload {
19591 break
19592 }
19593 i0 := auxIntToInt32(x2.AuxInt)
19594 if auxToSym(x2.Aux) != s {
19595 break
19596 }
19597 _ = x2.Args[1]
19598 if p != x2.Args[0] || mem != x2.Args[1] || !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y0, y1, y2, o0)) {
19599 break
19600 }
19601 b = mergePoint(b, x0, x1, x2)
19602 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t)
19603 v.copyOf(v0)
19604 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
19605 v1.Aux = symToAux(s)
19606 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
19607 v2.AuxInt = int64ToAuxInt(int64(i0))
19608 v2.AddArg(p)
19609 v1.AddArg2(v2, mem)
19610 v0.AddArg(v1)
19611 return true
19612 }
19613
19614
19615
19616 for {
19617 t := v.Type
19618 if auxIntToInt64(v.AuxInt) != 24 {
19619 break
19620 }
19621 o0 := v_0
19622 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
19623 break
19624 }
19625 _ = o0.Args[1]
19626 y0 := o0.Args[0]
19627 if y0.Op != OpARM64REV16W {
19628 break
19629 }
19630 x0 := y0.Args[0]
19631 if x0.Op != OpARM64MOVHUload || auxIntToInt32(x0.AuxInt) != 2 {
19632 break
19633 }
19634 s := auxToSym(x0.Aux)
19635 mem := x0.Args[1]
19636 p := x0.Args[0]
19637 y1 := o0.Args[1]
19638 if y1.Op != OpARM64MOVDnop {
19639 break
19640 }
19641 x1 := y1.Args[0]
19642 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
19643 break
19644 }
19645 _ = x1.Args[1]
19646 p1 := x1.Args[0]
19647 if p1.Op != OpARM64ADD {
19648 break
19649 }
19650 _ = p1.Args[1]
19651 p1_0 := p1.Args[0]
19652 p1_1 := p1.Args[1]
19653 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
19654 ptr1 := p1_0
19655 idx1 := p1_1
19656 if mem != x1.Args[1] {
19657 continue
19658 }
19659 y2 := v_1
19660 if y2.Op != OpARM64MOVDnop {
19661 continue
19662 }
19663 x2 := y2.Args[0]
19664 if x2.Op != OpARM64MOVBUloadidx {
19665 continue
19666 }
19667 _ = x2.Args[2]
19668 ptr0 := x2.Args[0]
19669 idx0 := x2.Args[1]
19670 if mem != x2.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, y0, y1, y2, o0)) {
19671 continue
19672 }
19673 b = mergePoint(b, x0, x1, x2)
19674 v0 := b.NewValue0(x1.Pos, OpARM64REVW, t)
19675 v.copyOf(v0)
19676 v1 := b.NewValue0(x1.Pos, OpARM64MOVWUloadidx, t)
19677 v1.AddArg3(ptr0, idx0, mem)
19678 v0.AddArg(v1)
19679 return true
19680 }
19681 break
19682 }
19683
19684
19685
19686 for {
19687 t := v.Type
19688 if auxIntToInt64(v.AuxInt) != 24 {
19689 break
19690 }
19691 o0 := v_0
19692 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 16 {
19693 break
19694 }
19695 _ = o0.Args[1]
19696 y0 := o0.Args[0]
19697 if y0.Op != OpARM64REV16W {
19698 break
19699 }
19700 x0 := y0.Args[0]
19701 if x0.Op != OpARM64MOVHUloadidx {
19702 break
19703 }
19704 mem := x0.Args[2]
19705 ptr := x0.Args[0]
19706 x0_1 := x0.Args[1]
19707 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 2 {
19708 break
19709 }
19710 idx := x0_1.Args[0]
19711 y1 := o0.Args[1]
19712 if y1.Op != OpARM64MOVDnop {
19713 break
19714 }
19715 x1 := y1.Args[0]
19716 if x1.Op != OpARM64MOVBUloadidx {
19717 break
19718 }
19719 _ = x1.Args[2]
19720 if ptr != x1.Args[0] {
19721 break
19722 }
19723 x1_1 := x1.Args[1]
19724 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] {
19725 break
19726 }
19727 y2 := v_1
19728 if y2.Op != OpARM64MOVDnop {
19729 break
19730 }
19731 x2 := y2.Args[0]
19732 if x2.Op != OpARM64MOVBUloadidx {
19733 break
19734 }
19735 _ = x2.Args[2]
19736 if ptr != x2.Args[0] || idx != x2.Args[1] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0, x1, x2, y0, y1, y2, o0)) {
19737 break
19738 }
19739 b = mergePoint(b, x0, x1, x2)
19740 v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
19741 v.copyOf(v0)
19742 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
19743 v1.AddArg3(ptr, idx, mem)
19744 v0.AddArg(v1)
19745 return true
19746 }
19747
19748
19749
19750 for {
19751 t := v.Type
19752 if auxIntToInt64(v.AuxInt) != 56 {
19753 break
19754 }
19755 o0 := v_0
19756 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19757 break
19758 }
19759 _ = o0.Args[1]
19760 o1 := o0.Args[0]
19761 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19762 break
19763 }
19764 _ = o1.Args[1]
19765 o2 := o1.Args[0]
19766 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19767 break
19768 }
19769 _ = o2.Args[1]
19770 y0 := o2.Args[0]
19771 if y0.Op != OpARM64REVW {
19772 break
19773 }
19774 x0 := y0.Args[0]
19775 if x0.Op != OpARM64MOVWUload {
19776 break
19777 }
19778 i4 := auxIntToInt32(x0.AuxInt)
19779 s := auxToSym(x0.Aux)
19780 mem := x0.Args[1]
19781 p := x0.Args[0]
19782 y1 := o2.Args[1]
19783 if y1.Op != OpARM64MOVDnop {
19784 break
19785 }
19786 x1 := y1.Args[0]
19787 if x1.Op != OpARM64MOVBUload {
19788 break
19789 }
19790 i3 := auxIntToInt32(x1.AuxInt)
19791 if auxToSym(x1.Aux) != s {
19792 break
19793 }
19794 _ = x1.Args[1]
19795 if p != x1.Args[0] || mem != x1.Args[1] {
19796 break
19797 }
19798 y2 := o1.Args[1]
19799 if y2.Op != OpARM64MOVDnop {
19800 break
19801 }
19802 x2 := y2.Args[0]
19803 if x2.Op != OpARM64MOVBUload {
19804 break
19805 }
19806 i2 := auxIntToInt32(x2.AuxInt)
19807 if auxToSym(x2.Aux) != s {
19808 break
19809 }
19810 _ = x2.Args[1]
19811 if p != x2.Args[0] || mem != x2.Args[1] {
19812 break
19813 }
19814 y3 := o0.Args[1]
19815 if y3.Op != OpARM64MOVDnop {
19816 break
19817 }
19818 x3 := y3.Args[0]
19819 if x3.Op != OpARM64MOVBUload {
19820 break
19821 }
19822 i1 := auxIntToInt32(x3.AuxInt)
19823 if auxToSym(x3.Aux) != s {
19824 break
19825 }
19826 _ = x3.Args[1]
19827 if p != x3.Args[0] || mem != x3.Args[1] {
19828 break
19829 }
19830 y4 := v_1
19831 if y4.Op != OpARM64MOVDnop {
19832 break
19833 }
19834 x4 := y4.Args[0]
19835 if x4.Op != OpARM64MOVBUload {
19836 break
19837 }
19838 i0 := auxIntToInt32(x4.AuxInt)
19839 if auxToSym(x4.Aux) != s {
19840 break
19841 }
19842 _ = x4.Args[1]
19843 if p != x4.Args[0] || mem != x4.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)) {
19844 break
19845 }
19846 b = mergePoint(b, x0, x1, x2, x3, x4)
19847 v0 := b.NewValue0(x4.Pos, OpARM64REV, t)
19848 v.copyOf(v0)
19849 v1 := b.NewValue0(x4.Pos, OpARM64MOVDload, t)
19850 v1.Aux = symToAux(s)
19851 v2 := b.NewValue0(x4.Pos, OpOffPtr, p.Type)
19852 v2.AuxInt = int64ToAuxInt(int64(i0))
19853 v2.AddArg(p)
19854 v1.AddArg2(v2, mem)
19855 v0.AddArg(v1)
19856 return true
19857 }
19858
19859
19860
19861 for {
19862 t := v.Type
19863 if auxIntToInt64(v.AuxInt) != 56 {
19864 break
19865 }
19866 o0 := v_0
19867 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19868 break
19869 }
19870 _ = o0.Args[1]
19871 o1 := o0.Args[0]
19872 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19873 break
19874 }
19875 _ = o1.Args[1]
19876 o2 := o1.Args[0]
19877 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19878 break
19879 }
19880 _ = o2.Args[1]
19881 y0 := o2.Args[0]
19882 if y0.Op != OpARM64REVW {
19883 break
19884 }
19885 x0 := y0.Args[0]
19886 if x0.Op != OpARM64MOVWUload || auxIntToInt32(x0.AuxInt) != 4 {
19887 break
19888 }
19889 s := auxToSym(x0.Aux)
19890 mem := x0.Args[1]
19891 p := x0.Args[0]
19892 y1 := o2.Args[1]
19893 if y1.Op != OpARM64MOVDnop {
19894 break
19895 }
19896 x1 := y1.Args[0]
19897 if x1.Op != OpARM64MOVBUload || auxIntToInt32(x1.AuxInt) != 3 || auxToSym(x1.Aux) != s {
19898 break
19899 }
19900 _ = x1.Args[1]
19901 if p != x1.Args[0] || mem != x1.Args[1] {
19902 break
19903 }
19904 y2 := o1.Args[1]
19905 if y2.Op != OpARM64MOVDnop {
19906 break
19907 }
19908 x2 := y2.Args[0]
19909 if x2.Op != OpARM64MOVBUload || auxIntToInt32(x2.AuxInt) != 2 || auxToSym(x2.Aux) != s {
19910 break
19911 }
19912 _ = x2.Args[1]
19913 if p != x2.Args[0] || mem != x2.Args[1] {
19914 break
19915 }
19916 y3 := o0.Args[1]
19917 if y3.Op != OpARM64MOVDnop {
19918 break
19919 }
19920 x3 := y3.Args[0]
19921 if x3.Op != OpARM64MOVBUload || auxIntToInt32(x3.AuxInt) != 1 || auxToSym(x3.Aux) != s {
19922 break
19923 }
19924 _ = x3.Args[1]
19925 p1 := x3.Args[0]
19926 if p1.Op != OpARM64ADD {
19927 break
19928 }
19929 _ = p1.Args[1]
19930 p1_0 := p1.Args[0]
19931 p1_1 := p1.Args[1]
19932 for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
19933 ptr1 := p1_0
19934 idx1 := p1_1
19935 if mem != x3.Args[1] {
19936 continue
19937 }
19938 y4 := v_1
19939 if y4.Op != OpARM64MOVDnop {
19940 continue
19941 }
19942 x4 := y4.Args[0]
19943 if x4.Op != OpARM64MOVBUloadidx {
19944 continue
19945 }
19946 _ = x4.Args[2]
19947 ptr0 := x4.Args[0]
19948 idx0 := x4.Args[1]
19949 if mem != x4.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)) {
19950 continue
19951 }
19952 b = mergePoint(b, x0, x1, x2, x3, x4)
19953 v0 := b.NewValue0(x3.Pos, OpARM64REV, t)
19954 v.copyOf(v0)
19955 v1 := b.NewValue0(x3.Pos, OpARM64MOVDloadidx, t)
19956 v1.AddArg3(ptr0, idx0, mem)
19957 v0.AddArg(v1)
19958 return true
19959 }
19960 break
19961 }
19962
19963
19964
19965 for {
19966 t := v.Type
19967 if auxIntToInt64(v.AuxInt) != 56 {
19968 break
19969 }
19970 o0 := v_0
19971 if o0.Op != OpARM64ORshiftLL || auxIntToInt64(o0.AuxInt) != 48 {
19972 break
19973 }
19974 _ = o0.Args[1]
19975 o1 := o0.Args[0]
19976 if o1.Op != OpARM64ORshiftLL || auxIntToInt64(o1.AuxInt) != 40 {
19977 break
19978 }
19979 _ = o1.Args[1]
19980 o2 := o1.Args[0]
19981 if o2.Op != OpARM64ORshiftLL || auxIntToInt64(o2.AuxInt) != 32 {
19982 break
19983 }
19984 _ = o2.Args[1]
19985 y0 := o2.Args[0]
19986 if y0.Op != OpARM64REVW {
19987 break
19988 }
19989 x0 := y0.Args[0]
19990 if x0.Op != OpARM64MOVWUloadidx {
19991 break
19992 }
19993 mem := x0.Args[2]
19994 ptr := x0.Args[0]
19995 x0_1 := x0.Args[1]
19996 if x0_1.Op != OpARM64ADDconst || auxIntToInt64(x0_1.AuxInt) != 4 {
19997 break
19998 }
19999 idx := x0_1.Args[0]
20000 y1 := o2.Args[1]
20001 if y1.Op != OpARM64MOVDnop {
20002 break
20003 }
20004 x1 := y1.Args[0]
20005 if x1.Op != OpARM64MOVBUloadidx {
20006 break
20007 }
20008 _ = x1.Args[2]
20009 if ptr != x1.Args[0] {
20010 break
20011 }
20012 x1_1 := x1.Args[1]
20013 if x1_1.Op != OpARM64ADDconst || auxIntToInt64(x1_1.AuxInt) != 3 || idx != x1_1.Args[0] || mem != x1.Args[2] {
20014 break
20015 }
20016 y2 := o1.Args[1]
20017 if y2.Op != OpARM64MOVDnop {
20018 break
20019 }
20020 x2 := y2.Args[0]
20021 if x2.Op != OpARM64MOVBUloadidx {
20022 break
20023 }
20024 _ = x2.Args[2]
20025 if ptr != x2.Args[0] {
20026 break
20027 }
20028 x2_1 := x2.Args[1]
20029 if x2_1.Op != OpARM64ADDconst || auxIntToInt64(x2_1.AuxInt) != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] {
20030 break
20031 }
20032 y3 := o0.Args[1]
20033 if y3.Op != OpARM64MOVDnop {
20034 break
20035 }
20036 x3 := y3.Args[0]
20037 if x3.Op != OpARM64MOVBUloadidx {
20038 break
20039 }
20040 _ = x3.Args[2]
20041 if ptr != x3.Args[0] {
20042 break
20043 }
20044 x3_1 := x3.Args[1]
20045 if x3_1.Op != OpARM64ADDconst || auxIntToInt64(x3_1.AuxInt) != 1 || idx != x3_1.Args[0] || mem != x3.Args[2] {
20046 break
20047 }
20048 y4 := v_1
20049 if y4.Op != OpARM64MOVDnop {
20050 break
20051 }
20052 x4 := y4.Args[0]
20053 if x4.Op != OpARM64MOVBUloadidx {
20054 break
20055 }
20056 _ = x4.Args[2]
20057 if ptr != x4.Args[0] || idx != x4.Args[1] || mem != x4.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0, x1, x2, x3, x4, y0, y1, y2, y3, y4, o0, o1, o2)) {
20058 break
20059 }
20060 b = mergePoint(b, x0, x1, x2, x3, x4)
20061 v0 := b.NewValue0(v.Pos, OpARM64REV, t)
20062 v.copyOf(v0)
20063 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
20064 v1.AddArg3(ptr, idx, mem)
20065 v0.AddArg(v1)
20066 return true
20067 }
20068 return false
20069 }
20070 func rewriteValueARM64_OpARM64ORshiftRA(v *Value) bool {
20071 v_1 := v.Args[1]
20072 v_0 := v.Args[0]
20073 b := v.Block
20074
20075
20076 for {
20077 d := auxIntToInt64(v.AuxInt)
20078 if v_0.Op != OpARM64MOVDconst {
20079 break
20080 }
20081 c := auxIntToInt64(v_0.AuxInt)
20082 x := v_1
20083 v.reset(OpARM64ORconst)
20084 v.AuxInt = int64ToAuxInt(c)
20085 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
20086 v0.AuxInt = int64ToAuxInt(d)
20087 v0.AddArg(x)
20088 v.AddArg(v0)
20089 return true
20090 }
20091
20092
20093 for {
20094 d := auxIntToInt64(v.AuxInt)
20095 x := v_0
20096 if v_1.Op != OpARM64MOVDconst {
20097 break
20098 }
20099 c := auxIntToInt64(v_1.AuxInt)
20100 v.reset(OpARM64ORconst)
20101 v.AuxInt = int64ToAuxInt(c >> uint64(d))
20102 v.AddArg(x)
20103 return true
20104 }
20105
20106
20107 for {
20108 c := auxIntToInt64(v.AuxInt)
20109 y := v_0
20110 if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
20111 break
20112 }
20113 x := y.Args[0]
20114 if x != v_1 {
20115 break
20116 }
20117 v.copyOf(y)
20118 return true
20119 }
20120 return false
20121 }
20122 func rewriteValueARM64_OpARM64ORshiftRL(v *Value) bool {
20123 v_1 := v.Args[1]
20124 v_0 := v.Args[0]
20125 b := v.Block
20126
20127
20128 for {
20129 d := auxIntToInt64(v.AuxInt)
20130 if v_0.Op != OpARM64MOVDconst {
20131 break
20132 }
20133 c := auxIntToInt64(v_0.AuxInt)
20134 x := v_1
20135 v.reset(OpARM64ORconst)
20136 v.AuxInt = int64ToAuxInt(c)
20137 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
20138 v0.AuxInt = int64ToAuxInt(d)
20139 v0.AddArg(x)
20140 v.AddArg(v0)
20141 return true
20142 }
20143
20144
20145 for {
20146 d := auxIntToInt64(v.AuxInt)
20147 x := v_0
20148 if v_1.Op != OpARM64MOVDconst {
20149 break
20150 }
20151 c := auxIntToInt64(v_1.AuxInt)
20152 v.reset(OpARM64ORconst)
20153 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
20154 v.AddArg(x)
20155 return true
20156 }
20157
20158
20159 for {
20160 c := auxIntToInt64(v.AuxInt)
20161 y := v_0
20162 if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
20163 break
20164 }
20165 x := y.Args[0]
20166 if x != v_1 {
20167 break
20168 }
20169 v.copyOf(y)
20170 return true
20171 }
20172
20173
20174 for {
20175 c := auxIntToInt64(v.AuxInt)
20176 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
20177 break
20178 }
20179 x := v_0.Args[0]
20180 if x != v_1 {
20181 break
20182 }
20183 v.reset(OpARM64RORconst)
20184 v.AuxInt = int64ToAuxInt(c)
20185 v.AddArg(x)
20186 return true
20187 }
20188
20189
20190
20191 for {
20192 t := v.Type
20193 c := auxIntToInt64(v.AuxInt)
20194 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 32-c {
20195 break
20196 }
20197 x := v_0.Args[0]
20198 if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) {
20199 break
20200 }
20201 v.reset(OpARM64RORWconst)
20202 v.AuxInt = int64ToAuxInt(c)
20203 v.AddArg(x)
20204 return true
20205 }
20206
20207
20208
20209 for {
20210 rc := auxIntToInt64(v.AuxInt)
20211 if v_0.Op != OpARM64ANDconst {
20212 break
20213 }
20214 ac := auxIntToInt64(v_0.AuxInt)
20215 x := v_0.Args[0]
20216 if v_1.Op != OpARM64SLLconst {
20217 break
20218 }
20219 lc := auxIntToInt64(v_1.AuxInt)
20220 y := v_1.Args[0]
20221 if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) {
20222 break
20223 }
20224 v.reset(OpARM64BFI)
20225 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
20226 v.AddArg2(x, y)
20227 return true
20228 }
20229
20230
20231
20232 for {
20233 rc := auxIntToInt64(v.AuxInt)
20234 if v_0.Op != OpARM64ANDconst {
20235 break
20236 }
20237 ac := auxIntToInt64(v_0.AuxInt)
20238 y := v_0.Args[0]
20239 if v_1.Op != OpARM64SLLconst {
20240 break
20241 }
20242 lc := auxIntToInt64(v_1.AuxInt)
20243 x := v_1.Args[0]
20244 if !(lc < rc && ac == ^(1<<uint(64-rc)-1)) {
20245 break
20246 }
20247 v.reset(OpARM64BFXIL)
20248 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
20249 v.AddArg2(y, x)
20250 return true
20251 }
20252 return false
20253 }
20254 func rewriteValueARM64_OpARM64ORshiftRO(v *Value) bool {
20255 v_1 := v.Args[1]
20256 v_0 := v.Args[0]
20257 b := v.Block
20258
20259
20260 for {
20261 d := auxIntToInt64(v.AuxInt)
20262 if v_0.Op != OpARM64MOVDconst {
20263 break
20264 }
20265 c := auxIntToInt64(v_0.AuxInt)
20266 x := v_1
20267 v.reset(OpARM64ORconst)
20268 v.AuxInt = int64ToAuxInt(c)
20269 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
20270 v0.AuxInt = int64ToAuxInt(d)
20271 v0.AddArg(x)
20272 v.AddArg(v0)
20273 return true
20274 }
20275
20276
20277 for {
20278 d := auxIntToInt64(v.AuxInt)
20279 x := v_0
20280 if v_1.Op != OpARM64MOVDconst {
20281 break
20282 }
20283 c := auxIntToInt64(v_1.AuxInt)
20284 v.reset(OpARM64ORconst)
20285 v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
20286 v.AddArg(x)
20287 return true
20288 }
20289
20290
20291 for {
20292 c := auxIntToInt64(v.AuxInt)
20293 y := v_0
20294 if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
20295 break
20296 }
20297 x := y.Args[0]
20298 if x != v_1 {
20299 break
20300 }
20301 v.copyOf(y)
20302 return true
20303 }
20304 return false
20305 }
20306 func rewriteValueARM64_OpARM64REV(v *Value) bool {
20307 v_0 := v.Args[0]
20308
20309
20310 for {
20311 if v_0.Op != OpARM64REV {
20312 break
20313 }
20314 p := v_0.Args[0]
20315 v.copyOf(p)
20316 return true
20317 }
20318 return false
20319 }
20320 func rewriteValueARM64_OpARM64REVW(v *Value) bool {
20321 v_0 := v.Args[0]
20322
20323
20324 for {
20325 if v_0.Op != OpARM64REVW {
20326 break
20327 }
20328 p := v_0.Args[0]
20329 v.copyOf(p)
20330 return true
20331 }
20332 return false
20333 }
20334 func rewriteValueARM64_OpARM64ROR(v *Value) bool {
20335 v_1 := v.Args[1]
20336 v_0 := v.Args[0]
20337
20338
20339 for {
20340 x := v_0
20341 if v_1.Op != OpARM64MOVDconst {
20342 break
20343 }
20344 c := auxIntToInt64(v_1.AuxInt)
20345 v.reset(OpARM64RORconst)
20346 v.AuxInt = int64ToAuxInt(c & 63)
20347 v.AddArg(x)
20348 return true
20349 }
20350 return false
20351 }
20352 func rewriteValueARM64_OpARM64RORW(v *Value) bool {
20353 v_1 := v.Args[1]
20354 v_0 := v.Args[0]
20355
20356
20357 for {
20358 x := v_0
20359 if v_1.Op != OpARM64MOVDconst {
20360 break
20361 }
20362 c := auxIntToInt64(v_1.AuxInt)
20363 v.reset(OpARM64RORWconst)
20364 v.AuxInt = int64ToAuxInt(c & 31)
20365 v.AddArg(x)
20366 return true
20367 }
20368 return false
20369 }
20370 func rewriteValueARM64_OpARM64RORWconst(v *Value) bool {
20371 v_0 := v.Args[0]
20372
20373
20374 for {
20375 c := auxIntToInt64(v.AuxInt)
20376 if v_0.Op != OpARM64RORWconst {
20377 break
20378 }
20379 d := auxIntToInt64(v_0.AuxInt)
20380 x := v_0.Args[0]
20381 v.reset(OpARM64RORWconst)
20382 v.AuxInt = int64ToAuxInt((c + d) & 31)
20383 v.AddArg(x)
20384 return true
20385 }
20386 return false
20387 }
20388 func rewriteValueARM64_OpARM64RORconst(v *Value) bool {
20389 v_0 := v.Args[0]
20390
20391
20392 for {
20393 c := auxIntToInt64(v.AuxInt)
20394 if v_0.Op != OpARM64RORconst {
20395 break
20396 }
20397 d := auxIntToInt64(v_0.AuxInt)
20398 x := v_0.Args[0]
20399 v.reset(OpARM64RORconst)
20400 v.AuxInt = int64ToAuxInt((c + d) & 63)
20401 v.AddArg(x)
20402 return true
20403 }
20404 return false
20405 }
20406 func rewriteValueARM64_OpARM64SBCSflags(v *Value) bool {
20407 v_2 := v.Args[2]
20408 v_1 := v.Args[1]
20409 v_0 := v.Args[0]
20410 b := v.Block
20411 typ := &b.Func.Config.Types
20412
20413
20414 for {
20415 x := v_0
20416 y := v_1
20417 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
20418 break
20419 }
20420 v_2_0 := v_2.Args[0]
20421 if v_2_0.Op != OpARM64NEGSflags {
20422 break
20423 }
20424 v_2_0_0 := v_2_0.Args[0]
20425 if v_2_0_0.Op != OpARM64NEG || v_2_0_0.Type != typ.UInt64 {
20426 break
20427 }
20428 v_2_0_0_0 := v_2_0_0.Args[0]
20429 if v_2_0_0_0.Op != OpARM64NGCzerocarry || v_2_0_0_0.Type != typ.UInt64 {
20430 break
20431 }
20432 bo := v_2_0_0_0.Args[0]
20433 v.reset(OpARM64SBCSflags)
20434 v.AddArg3(x, y, bo)
20435 return true
20436 }
20437
20438
20439 for {
20440 x := v_0
20441 y := v_1
20442 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
20443 break
20444 }
20445 v_2_0 := v_2.Args[0]
20446 if v_2_0.Op != OpARM64NEGSflags {
20447 break
20448 }
20449 v_2_0_0 := v_2_0.Args[0]
20450 if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
20451 break
20452 }
20453 v.reset(OpARM64SUBSflags)
20454 v.AddArg2(x, y)
20455 return true
20456 }
20457 return false
20458 }
20459 func rewriteValueARM64_OpARM64SLL(v *Value) bool {
20460 v_1 := v.Args[1]
20461 v_0 := v.Args[0]
20462
20463
20464 for {
20465 x := v_0
20466 if v_1.Op != OpARM64MOVDconst {
20467 break
20468 }
20469 c := auxIntToInt64(v_1.AuxInt)
20470 v.reset(OpARM64SLLconst)
20471 v.AuxInt = int64ToAuxInt(c & 63)
20472 v.AddArg(x)
20473 return true
20474 }
20475 return false
20476 }
20477 func rewriteValueARM64_OpARM64SLLconst(v *Value) bool {
20478 v_0 := v.Args[0]
20479
20480
20481 for {
20482 c := auxIntToInt64(v.AuxInt)
20483 if v_0.Op != OpARM64MOVDconst {
20484 break
20485 }
20486 d := auxIntToInt64(v_0.AuxInt)
20487 v.reset(OpARM64MOVDconst)
20488 v.AuxInt = int64ToAuxInt(d << uint64(c))
20489 return true
20490 }
20491
20492
20493
20494 for {
20495 c := auxIntToInt64(v.AuxInt)
20496 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
20497 break
20498 }
20499 x := v_0.Args[0]
20500 if !(0 < c && c < 64) {
20501 break
20502 }
20503 v.reset(OpARM64ANDconst)
20504 v.AuxInt = int64ToAuxInt(^(1<<uint(c) - 1))
20505 v.AddArg(x)
20506 return true
20507 }
20508
20509
20510 for {
20511 lc := auxIntToInt64(v.AuxInt)
20512 if v_0.Op != OpARM64MOVWreg {
20513 break
20514 }
20515 x := v_0.Args[0]
20516 v.reset(OpARM64SBFIZ)
20517 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc)))
20518 v.AddArg(x)
20519 return true
20520 }
20521
20522
20523 for {
20524 lc := auxIntToInt64(v.AuxInt)
20525 if v_0.Op != OpARM64MOVHreg {
20526 break
20527 }
20528 x := v_0.Args[0]
20529 v.reset(OpARM64SBFIZ)
20530 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc)))
20531 v.AddArg(x)
20532 return true
20533 }
20534
20535
20536 for {
20537 lc := auxIntToInt64(v.AuxInt)
20538 if v_0.Op != OpARM64MOVBreg {
20539 break
20540 }
20541 x := v_0.Args[0]
20542 v.reset(OpARM64SBFIZ)
20543 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc)))
20544 v.AddArg(x)
20545 return true
20546 }
20547
20548
20549 for {
20550 lc := auxIntToInt64(v.AuxInt)
20551 if v_0.Op != OpARM64MOVWUreg {
20552 break
20553 }
20554 x := v_0.Args[0]
20555 v.reset(OpARM64UBFIZ)
20556 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(32, 64-lc)))
20557 v.AddArg(x)
20558 return true
20559 }
20560
20561
20562 for {
20563 lc := auxIntToInt64(v.AuxInt)
20564 if v_0.Op != OpARM64MOVHUreg {
20565 break
20566 }
20567 x := v_0.Args[0]
20568 v.reset(OpARM64UBFIZ)
20569 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(16, 64-lc)))
20570 v.AddArg(x)
20571 return true
20572 }
20573
20574
20575 for {
20576 lc := auxIntToInt64(v.AuxInt)
20577 if v_0.Op != OpARM64MOVBUreg {
20578 break
20579 }
20580 x := v_0.Args[0]
20581 v.reset(OpARM64UBFIZ)
20582 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc, min(8, 64-lc)))
20583 v.AddArg(x)
20584 return true
20585 }
20586
20587
20588
20589 for {
20590 sc := auxIntToInt64(v.AuxInt)
20591 if v_0.Op != OpARM64ANDconst {
20592 break
20593 }
20594 ac := auxIntToInt64(v_0.AuxInt)
20595 x := v_0.Args[0]
20596 if !(isARM64BFMask(sc, ac, 0)) {
20597 break
20598 }
20599 v.reset(OpARM64UBFIZ)
20600 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
20601 v.AddArg(x)
20602 return true
20603 }
20604
20605
20606
20607 for {
20608 sc := auxIntToInt64(v.AuxInt)
20609 if v_0.Op != OpARM64UBFIZ {
20610 break
20611 }
20612 bfc := auxIntToArm64BitField(v_0.AuxInt)
20613 x := v_0.Args[0]
20614 if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) {
20615 break
20616 }
20617 v.reset(OpARM64UBFIZ)
20618 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()))
20619 v.AddArg(x)
20620 return true
20621 }
20622 return false
20623 }
20624 func rewriteValueARM64_OpARM64SRA(v *Value) bool {
20625 v_1 := v.Args[1]
20626 v_0 := v.Args[0]
20627
20628
20629 for {
20630 x := v_0
20631 if v_1.Op != OpARM64MOVDconst {
20632 break
20633 }
20634 c := auxIntToInt64(v_1.AuxInt)
20635 v.reset(OpARM64SRAconst)
20636 v.AuxInt = int64ToAuxInt(c & 63)
20637 v.AddArg(x)
20638 return true
20639 }
20640 return false
20641 }
20642 func rewriteValueARM64_OpARM64SRAconst(v *Value) bool {
20643 v_0 := v.Args[0]
20644
20645
20646 for {
20647 c := auxIntToInt64(v.AuxInt)
20648 if v_0.Op != OpARM64MOVDconst {
20649 break
20650 }
20651 d := auxIntToInt64(v_0.AuxInt)
20652 v.reset(OpARM64MOVDconst)
20653 v.AuxInt = int64ToAuxInt(d >> uint64(c))
20654 return true
20655 }
20656
20657
20658
20659 for {
20660 rc := auxIntToInt64(v.AuxInt)
20661 if v_0.Op != OpARM64SLLconst {
20662 break
20663 }
20664 lc := auxIntToInt64(v_0.AuxInt)
20665 x := v_0.Args[0]
20666 if !(lc > rc) {
20667 break
20668 }
20669 v.reset(OpARM64SBFIZ)
20670 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
20671 v.AddArg(x)
20672 return true
20673 }
20674
20675
20676
20677 for {
20678 rc := auxIntToInt64(v.AuxInt)
20679 if v_0.Op != OpARM64SLLconst {
20680 break
20681 }
20682 lc := auxIntToInt64(v_0.AuxInt)
20683 x := v_0.Args[0]
20684 if !(lc <= rc) {
20685 break
20686 }
20687 v.reset(OpARM64SBFX)
20688 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
20689 v.AddArg(x)
20690 return true
20691 }
20692
20693
20694
20695 for {
20696 rc := auxIntToInt64(v.AuxInt)
20697 if v_0.Op != OpARM64MOVWreg {
20698 break
20699 }
20700 x := v_0.Args[0]
20701 if !(rc < 32) {
20702 break
20703 }
20704 v.reset(OpARM64SBFX)
20705 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc))
20706 v.AddArg(x)
20707 return true
20708 }
20709
20710
20711
20712 for {
20713 rc := auxIntToInt64(v.AuxInt)
20714 if v_0.Op != OpARM64MOVHreg {
20715 break
20716 }
20717 x := v_0.Args[0]
20718 if !(rc < 16) {
20719 break
20720 }
20721 v.reset(OpARM64SBFX)
20722 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc))
20723 v.AddArg(x)
20724 return true
20725 }
20726
20727
20728
20729 for {
20730 rc := auxIntToInt64(v.AuxInt)
20731 if v_0.Op != OpARM64MOVBreg {
20732 break
20733 }
20734 x := v_0.Args[0]
20735 if !(rc < 8) {
20736 break
20737 }
20738 v.reset(OpARM64SBFX)
20739 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc))
20740 v.AddArg(x)
20741 return true
20742 }
20743
20744
20745
20746 for {
20747 sc := auxIntToInt64(v.AuxInt)
20748 if v_0.Op != OpARM64SBFIZ {
20749 break
20750 }
20751 bfc := auxIntToArm64BitField(v_0.AuxInt)
20752 x := v_0.Args[0]
20753 if !(sc < bfc.getARM64BFlsb()) {
20754 break
20755 }
20756 v.reset(OpARM64SBFIZ)
20757 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
20758 v.AddArg(x)
20759 return true
20760 }
20761
20762
20763
20764 for {
20765 sc := auxIntToInt64(v.AuxInt)
20766 if v_0.Op != OpARM64SBFIZ {
20767 break
20768 }
20769 bfc := auxIntToArm64BitField(v_0.AuxInt)
20770 x := v_0.Args[0]
20771 if !(sc >= bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
20772 break
20773 }
20774 v.reset(OpARM64SBFX)
20775 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
20776 v.AddArg(x)
20777 return true
20778 }
20779 return false
20780 }
20781 func rewriteValueARM64_OpARM64SRL(v *Value) bool {
20782 v_1 := v.Args[1]
20783 v_0 := v.Args[0]
20784
20785
20786 for {
20787 x := v_0
20788 if v_1.Op != OpARM64MOVDconst {
20789 break
20790 }
20791 c := auxIntToInt64(v_1.AuxInt)
20792 v.reset(OpARM64SRLconst)
20793 v.AuxInt = int64ToAuxInt(c & 63)
20794 v.AddArg(x)
20795 return true
20796 }
20797 return false
20798 }
20799 func rewriteValueARM64_OpARM64SRLconst(v *Value) bool {
20800 v_0 := v.Args[0]
20801
20802
20803 for {
20804 c := auxIntToInt64(v.AuxInt)
20805 if v_0.Op != OpARM64MOVDconst {
20806 break
20807 }
20808 d := auxIntToInt64(v_0.AuxInt)
20809 v.reset(OpARM64MOVDconst)
20810 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
20811 return true
20812 }
20813
20814
20815
20816 for {
20817 c := auxIntToInt64(v.AuxInt)
20818 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
20819 break
20820 }
20821 x := v_0.Args[0]
20822 if !(0 < c && c < 64) {
20823 break
20824 }
20825 v.reset(OpARM64ANDconst)
20826 v.AuxInt = int64ToAuxInt(1<<uint(64-c) - 1)
20827 v.AddArg(x)
20828 return true
20829 }
20830
20831
20832
20833 for {
20834 rc := auxIntToInt64(v.AuxInt)
20835 if v_0.Op != OpARM64MOVWUreg {
20836 break
20837 }
20838 if !(rc >= 32) {
20839 break
20840 }
20841 v.reset(OpARM64MOVDconst)
20842 v.AuxInt = int64ToAuxInt(0)
20843 return true
20844 }
20845
20846
20847
20848 for {
20849 rc := auxIntToInt64(v.AuxInt)
20850 if v_0.Op != OpARM64MOVHUreg {
20851 break
20852 }
20853 if !(rc >= 16) {
20854 break
20855 }
20856 v.reset(OpARM64MOVDconst)
20857 v.AuxInt = int64ToAuxInt(0)
20858 return true
20859 }
20860
20861
20862
20863 for {
20864 rc := auxIntToInt64(v.AuxInt)
20865 if v_0.Op != OpARM64MOVBUreg {
20866 break
20867 }
20868 if !(rc >= 8) {
20869 break
20870 }
20871 v.reset(OpARM64MOVDconst)
20872 v.AuxInt = int64ToAuxInt(0)
20873 return true
20874 }
20875
20876
20877
20878 for {
20879 rc := auxIntToInt64(v.AuxInt)
20880 if v_0.Op != OpARM64SLLconst {
20881 break
20882 }
20883 lc := auxIntToInt64(v_0.AuxInt)
20884 x := v_0.Args[0]
20885 if !(lc > rc) {
20886 break
20887 }
20888 v.reset(OpARM64UBFIZ)
20889 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(lc-rc, 64-lc))
20890 v.AddArg(x)
20891 return true
20892 }
20893
20894
20895
20896 for {
20897 rc := auxIntToInt64(v.AuxInt)
20898 if v_0.Op != OpARM64SLLconst {
20899 break
20900 }
20901 lc := auxIntToInt64(v_0.AuxInt)
20902 x := v_0.Args[0]
20903 if !(lc < rc) {
20904 break
20905 }
20906 v.reset(OpARM64UBFX)
20907 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc-lc, 64-rc))
20908 v.AddArg(x)
20909 return true
20910 }
20911
20912
20913
20914 for {
20915 rc := auxIntToInt64(v.AuxInt)
20916 if v_0.Op != OpARM64MOVWUreg {
20917 break
20918 }
20919 x := v_0.Args[0]
20920 if !(rc < 32) {
20921 break
20922 }
20923 v.reset(OpARM64UBFX)
20924 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 32-rc))
20925 v.AddArg(x)
20926 return true
20927 }
20928
20929
20930
20931 for {
20932 rc := auxIntToInt64(v.AuxInt)
20933 if v_0.Op != OpARM64MOVHUreg {
20934 break
20935 }
20936 x := v_0.Args[0]
20937 if !(rc < 16) {
20938 break
20939 }
20940 v.reset(OpARM64UBFX)
20941 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 16-rc))
20942 v.AddArg(x)
20943 return true
20944 }
20945
20946
20947
20948 for {
20949 rc := auxIntToInt64(v.AuxInt)
20950 if v_0.Op != OpARM64MOVBUreg {
20951 break
20952 }
20953 x := v_0.Args[0]
20954 if !(rc < 8) {
20955 break
20956 }
20957 v.reset(OpARM64UBFX)
20958 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(rc, 8-rc))
20959 v.AddArg(x)
20960 return true
20961 }
20962
20963
20964
20965 for {
20966 sc := auxIntToInt64(v.AuxInt)
20967 if v_0.Op != OpARM64ANDconst {
20968 break
20969 }
20970 ac := auxIntToInt64(v_0.AuxInt)
20971 x := v_0.Args[0]
20972 if !(isARM64BFMask(sc, ac, sc)) {
20973 break
20974 }
20975 v.reset(OpARM64UBFX)
20976 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
20977 v.AddArg(x)
20978 return true
20979 }
20980
20981
20982
20983 for {
20984 sc := auxIntToInt64(v.AuxInt)
20985 if v_0.Op != OpARM64UBFX {
20986 break
20987 }
20988 bfc := auxIntToArm64BitField(v_0.AuxInt)
20989 x := v_0.Args[0]
20990 if !(sc < bfc.getARM64BFwidth()) {
20991 break
20992 }
20993 v.reset(OpARM64UBFX)
20994 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc))
20995 v.AddArg(x)
20996 return true
20997 }
20998
20999
21000
21001 for {
21002 sc := auxIntToInt64(v.AuxInt)
21003 if v_0.Op != OpARM64UBFIZ {
21004 break
21005 }
21006 bfc := auxIntToArm64BitField(v_0.AuxInt)
21007 x := v_0.Args[0]
21008 if !(sc == bfc.getARM64BFlsb()) {
21009 break
21010 }
21011 v.reset(OpARM64ANDconst)
21012 v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1)
21013 v.AddArg(x)
21014 return true
21015 }
21016
21017
21018
21019 for {
21020 sc := auxIntToInt64(v.AuxInt)
21021 if v_0.Op != OpARM64UBFIZ {
21022 break
21023 }
21024 bfc := auxIntToArm64BitField(v_0.AuxInt)
21025 x := v_0.Args[0]
21026 if !(sc < bfc.getARM64BFlsb()) {
21027 break
21028 }
21029 v.reset(OpARM64UBFIZ)
21030 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
21031 v.AddArg(x)
21032 return true
21033 }
21034
21035
21036
21037 for {
21038 sc := auxIntToInt64(v.AuxInt)
21039 if v_0.Op != OpARM64UBFIZ {
21040 break
21041 }
21042 bfc := auxIntToArm64BitField(v_0.AuxInt)
21043 x := v_0.Args[0]
21044 if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
21045 break
21046 }
21047 v.reset(OpARM64UBFX)
21048 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
21049 v.AddArg(x)
21050 return true
21051 }
21052 return false
21053 }
21054 func rewriteValueARM64_OpARM64STP(v *Value) bool {
21055 v_3 := v.Args[3]
21056 v_2 := v.Args[2]
21057 v_1 := v.Args[1]
21058 v_0 := v.Args[0]
21059 b := v.Block
21060 config := b.Func.Config
21061
21062
21063
21064 for {
21065 off1 := auxIntToInt32(v.AuxInt)
21066 sym := auxToSym(v.Aux)
21067 if v_0.Op != OpARM64ADDconst {
21068 break
21069 }
21070 off2 := auxIntToInt64(v_0.AuxInt)
21071 ptr := v_0.Args[0]
21072 val1 := v_1
21073 val2 := v_2
21074 mem := v_3
21075 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
21076 break
21077 }
21078 v.reset(OpARM64STP)
21079 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
21080 v.Aux = symToAux(sym)
21081 v.AddArg4(ptr, val1, val2, mem)
21082 return true
21083 }
21084
21085
21086
21087 for {
21088 off1 := auxIntToInt32(v.AuxInt)
21089 sym1 := auxToSym(v.Aux)
21090 if v_0.Op != OpARM64MOVDaddr {
21091 break
21092 }
21093 off2 := auxIntToInt32(v_0.AuxInt)
21094 sym2 := auxToSym(v_0.Aux)
21095 ptr := v_0.Args[0]
21096 val1 := v_1
21097 val2 := v_2
21098 mem := v_3
21099 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
21100 break
21101 }
21102 v.reset(OpARM64STP)
21103 v.AuxInt = int32ToAuxInt(off1 + off2)
21104 v.Aux = symToAux(mergeSym(sym1, sym2))
21105 v.AddArg4(ptr, val1, val2, mem)
21106 return true
21107 }
21108
21109
21110 for {
21111 off := auxIntToInt32(v.AuxInt)
21112 sym := auxToSym(v.Aux)
21113 ptr := v_0
21114 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 || v_2.Op != OpARM64MOVDconst || auxIntToInt64(v_2.AuxInt) != 0 {
21115 break
21116 }
21117 mem := v_3
21118 v.reset(OpARM64MOVQstorezero)
21119 v.AuxInt = int32ToAuxInt(off)
21120 v.Aux = symToAux(sym)
21121 v.AddArg2(ptr, mem)
21122 return true
21123 }
21124 return false
21125 }
21126 func rewriteValueARM64_OpARM64SUB(v *Value) bool {
21127 v_1 := v.Args[1]
21128 v_0 := v.Args[0]
21129 b := v.Block
21130
21131
21132 for {
21133 x := v_0
21134 if v_1.Op != OpARM64MOVDconst {
21135 break
21136 }
21137 c := auxIntToInt64(v_1.AuxInt)
21138 v.reset(OpARM64SUBconst)
21139 v.AuxInt = int64ToAuxInt(c)
21140 v.AddArg(x)
21141 return true
21142 }
21143
21144
21145
21146 for {
21147 a := v_0
21148 l := v_1
21149 if l.Op != OpARM64MUL {
21150 break
21151 }
21152 y := l.Args[1]
21153 x := l.Args[0]
21154 if !(l.Uses == 1 && clobber(l)) {
21155 break
21156 }
21157 v.reset(OpARM64MSUB)
21158 v.AddArg3(a, x, y)
21159 return true
21160 }
21161
21162
21163
21164 for {
21165 a := v_0
21166 l := v_1
21167 if l.Op != OpARM64MNEG {
21168 break
21169 }
21170 y := l.Args[1]
21171 x := l.Args[0]
21172 if !(l.Uses == 1 && clobber(l)) {
21173 break
21174 }
21175 v.reset(OpARM64MADD)
21176 v.AddArg3(a, x, y)
21177 return true
21178 }
21179
21180
21181
21182 for {
21183 a := v_0
21184 l := v_1
21185 if l.Op != OpARM64MULW {
21186 break
21187 }
21188 y := l.Args[1]
21189 x := l.Args[0]
21190 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
21191 break
21192 }
21193 v.reset(OpARM64MSUBW)
21194 v.AddArg3(a, x, y)
21195 return true
21196 }
21197
21198
21199
21200 for {
21201 a := v_0
21202 l := v_1
21203 if l.Op != OpARM64MNEGW {
21204 break
21205 }
21206 y := l.Args[1]
21207 x := l.Args[0]
21208 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
21209 break
21210 }
21211 v.reset(OpARM64MADDW)
21212 v.AddArg3(a, x, y)
21213 return true
21214 }
21215
21216
21217 for {
21218 x := v_0
21219 if x != v_1 {
21220 break
21221 }
21222 v.reset(OpARM64MOVDconst)
21223 v.AuxInt = int64ToAuxInt(0)
21224 return true
21225 }
21226
21227
21228 for {
21229 x := v_0
21230 if v_1.Op != OpARM64SUB {
21231 break
21232 }
21233 z := v_1.Args[1]
21234 y := v_1.Args[0]
21235 v.reset(OpARM64SUB)
21236 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type)
21237 v0.AddArg2(x, z)
21238 v.AddArg2(v0, y)
21239 return true
21240 }
21241
21242
21243 for {
21244 if v_0.Op != OpARM64SUB {
21245 break
21246 }
21247 y := v_0.Args[1]
21248 x := v_0.Args[0]
21249 z := v_1
21250 v.reset(OpARM64SUB)
21251 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type)
21252 v0.AddArg2(y, z)
21253 v.AddArg2(x, v0)
21254 return true
21255 }
21256
21257
21258
21259 for {
21260 x0 := v_0
21261 x1 := v_1
21262 if x1.Op != OpARM64SLLconst {
21263 break
21264 }
21265 c := auxIntToInt64(x1.AuxInt)
21266 y := x1.Args[0]
21267 if !(clobberIfDead(x1)) {
21268 break
21269 }
21270 v.reset(OpARM64SUBshiftLL)
21271 v.AuxInt = int64ToAuxInt(c)
21272 v.AddArg2(x0, y)
21273 return true
21274 }
21275
21276
21277
21278 for {
21279 x0 := v_0
21280 x1 := v_1
21281 if x1.Op != OpARM64SRLconst {
21282 break
21283 }
21284 c := auxIntToInt64(x1.AuxInt)
21285 y := x1.Args[0]
21286 if !(clobberIfDead(x1)) {
21287 break
21288 }
21289 v.reset(OpARM64SUBshiftRL)
21290 v.AuxInt = int64ToAuxInt(c)
21291 v.AddArg2(x0, y)
21292 return true
21293 }
21294
21295
21296
21297 for {
21298 x0 := v_0
21299 x1 := v_1
21300 if x1.Op != OpARM64SRAconst {
21301 break
21302 }
21303 c := auxIntToInt64(x1.AuxInt)
21304 y := x1.Args[0]
21305 if !(clobberIfDead(x1)) {
21306 break
21307 }
21308 v.reset(OpARM64SUBshiftRA)
21309 v.AuxInt = int64ToAuxInt(c)
21310 v.AddArg2(x0, y)
21311 return true
21312 }
21313 return false
21314 }
21315 func rewriteValueARM64_OpARM64SUBconst(v *Value) bool {
21316 v_0 := v.Args[0]
21317
21318
21319 for {
21320 if auxIntToInt64(v.AuxInt) != 0 {
21321 break
21322 }
21323 x := v_0
21324 v.copyOf(x)
21325 return true
21326 }
21327
21328
21329 for {
21330 c := auxIntToInt64(v.AuxInt)
21331 if v_0.Op != OpARM64MOVDconst {
21332 break
21333 }
21334 d := auxIntToInt64(v_0.AuxInt)
21335 v.reset(OpARM64MOVDconst)
21336 v.AuxInt = int64ToAuxInt(d - c)
21337 return true
21338 }
21339
21340
21341 for {
21342 c := auxIntToInt64(v.AuxInt)
21343 if v_0.Op != OpARM64SUBconst {
21344 break
21345 }
21346 d := auxIntToInt64(v_0.AuxInt)
21347 x := v_0.Args[0]
21348 v.reset(OpARM64ADDconst)
21349 v.AuxInt = int64ToAuxInt(-c - d)
21350 v.AddArg(x)
21351 return true
21352 }
21353
21354
21355 for {
21356 c := auxIntToInt64(v.AuxInt)
21357 if v_0.Op != OpARM64ADDconst {
21358 break
21359 }
21360 d := auxIntToInt64(v_0.AuxInt)
21361 x := v_0.Args[0]
21362 v.reset(OpARM64ADDconst)
21363 v.AuxInt = int64ToAuxInt(-c + d)
21364 v.AddArg(x)
21365 return true
21366 }
21367 return false
21368 }
21369 func rewriteValueARM64_OpARM64SUBshiftLL(v *Value) bool {
21370 v_1 := v.Args[1]
21371 v_0 := v.Args[0]
21372
21373
21374 for {
21375 d := auxIntToInt64(v.AuxInt)
21376 x := v_0
21377 if v_1.Op != OpARM64MOVDconst {
21378 break
21379 }
21380 c := auxIntToInt64(v_1.AuxInt)
21381 v.reset(OpARM64SUBconst)
21382 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
21383 v.AddArg(x)
21384 return true
21385 }
21386
21387
21388 for {
21389 c := auxIntToInt64(v.AuxInt)
21390 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
21391 break
21392 }
21393 x := v_0.Args[0]
21394 if x != v_1 {
21395 break
21396 }
21397 v.reset(OpARM64MOVDconst)
21398 v.AuxInt = int64ToAuxInt(0)
21399 return true
21400 }
21401 return false
21402 }
21403 func rewriteValueARM64_OpARM64SUBshiftRA(v *Value) bool {
21404 v_1 := v.Args[1]
21405 v_0 := v.Args[0]
21406
21407
21408 for {
21409 d := auxIntToInt64(v.AuxInt)
21410 x := v_0
21411 if v_1.Op != OpARM64MOVDconst {
21412 break
21413 }
21414 c := auxIntToInt64(v_1.AuxInt)
21415 v.reset(OpARM64SUBconst)
21416 v.AuxInt = int64ToAuxInt(c >> uint64(d))
21417 v.AddArg(x)
21418 return true
21419 }
21420
21421
21422 for {
21423 c := auxIntToInt64(v.AuxInt)
21424 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
21425 break
21426 }
21427 x := v_0.Args[0]
21428 if x != v_1 {
21429 break
21430 }
21431 v.reset(OpARM64MOVDconst)
21432 v.AuxInt = int64ToAuxInt(0)
21433 return true
21434 }
21435 return false
21436 }
21437 func rewriteValueARM64_OpARM64SUBshiftRL(v *Value) bool {
21438 v_1 := v.Args[1]
21439 v_0 := v.Args[0]
21440
21441
21442 for {
21443 d := auxIntToInt64(v.AuxInt)
21444 x := v_0
21445 if v_1.Op != OpARM64MOVDconst {
21446 break
21447 }
21448 c := auxIntToInt64(v_1.AuxInt)
21449 v.reset(OpARM64SUBconst)
21450 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
21451 v.AddArg(x)
21452 return true
21453 }
21454
21455
21456 for {
21457 c := auxIntToInt64(v.AuxInt)
21458 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
21459 break
21460 }
21461 x := v_0.Args[0]
21462 if x != v_1 {
21463 break
21464 }
21465 v.reset(OpARM64MOVDconst)
21466 v.AuxInt = int64ToAuxInt(0)
21467 return true
21468 }
21469 return false
21470 }
21471 func rewriteValueARM64_OpARM64TST(v *Value) bool {
21472 v_1 := v.Args[1]
21473 v_0 := v.Args[0]
21474
21475
21476 for {
21477 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21478 x := v_0
21479 if v_1.Op != OpARM64MOVDconst {
21480 continue
21481 }
21482 c := auxIntToInt64(v_1.AuxInt)
21483 v.reset(OpARM64TSTconst)
21484 v.AuxInt = int64ToAuxInt(c)
21485 v.AddArg(x)
21486 return true
21487 }
21488 break
21489 }
21490
21491
21492
21493 for {
21494 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21495 x0 := v_0
21496 x1 := v_1
21497 if x1.Op != OpARM64SLLconst {
21498 continue
21499 }
21500 c := auxIntToInt64(x1.AuxInt)
21501 y := x1.Args[0]
21502 if !(clobberIfDead(x1)) {
21503 continue
21504 }
21505 v.reset(OpARM64TSTshiftLL)
21506 v.AuxInt = int64ToAuxInt(c)
21507 v.AddArg2(x0, y)
21508 return true
21509 }
21510 break
21511 }
21512
21513
21514
21515 for {
21516 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21517 x0 := v_0
21518 x1 := v_1
21519 if x1.Op != OpARM64SRLconst {
21520 continue
21521 }
21522 c := auxIntToInt64(x1.AuxInt)
21523 y := x1.Args[0]
21524 if !(clobberIfDead(x1)) {
21525 continue
21526 }
21527 v.reset(OpARM64TSTshiftRL)
21528 v.AuxInt = int64ToAuxInt(c)
21529 v.AddArg2(x0, y)
21530 return true
21531 }
21532 break
21533 }
21534
21535
21536
21537 for {
21538 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21539 x0 := v_0
21540 x1 := v_1
21541 if x1.Op != OpARM64SRAconst {
21542 continue
21543 }
21544 c := auxIntToInt64(x1.AuxInt)
21545 y := x1.Args[0]
21546 if !(clobberIfDead(x1)) {
21547 continue
21548 }
21549 v.reset(OpARM64TSTshiftRA)
21550 v.AuxInt = int64ToAuxInt(c)
21551 v.AddArg2(x0, y)
21552 return true
21553 }
21554 break
21555 }
21556
21557
21558
21559 for {
21560 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21561 x0 := v_0
21562 x1 := v_1
21563 if x1.Op != OpARM64RORconst {
21564 continue
21565 }
21566 c := auxIntToInt64(x1.AuxInt)
21567 y := x1.Args[0]
21568 if !(clobberIfDead(x1)) {
21569 continue
21570 }
21571 v.reset(OpARM64TSTshiftRO)
21572 v.AuxInt = int64ToAuxInt(c)
21573 v.AddArg2(x0, y)
21574 return true
21575 }
21576 break
21577 }
21578 return false
21579 }
21580 func rewriteValueARM64_OpARM64TSTW(v *Value) bool {
21581 v_1 := v.Args[1]
21582 v_0 := v.Args[0]
21583
21584
21585 for {
21586 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21587 x := v_0
21588 if v_1.Op != OpARM64MOVDconst {
21589 continue
21590 }
21591 c := auxIntToInt64(v_1.AuxInt)
21592 v.reset(OpARM64TSTWconst)
21593 v.AuxInt = int32ToAuxInt(int32(c))
21594 v.AddArg(x)
21595 return true
21596 }
21597 break
21598 }
21599 return false
21600 }
21601 func rewriteValueARM64_OpARM64TSTWconst(v *Value) bool {
21602 v_0 := v.Args[0]
21603
21604
21605 for {
21606 y := auxIntToInt32(v.AuxInt)
21607 if v_0.Op != OpARM64MOVDconst {
21608 break
21609 }
21610 x := auxIntToInt64(v_0.AuxInt)
21611 v.reset(OpARM64FlagConstant)
21612 v.AuxInt = flagConstantToAuxInt(logicFlags32(int32(x) & y))
21613 return true
21614 }
21615 return false
21616 }
21617 func rewriteValueARM64_OpARM64TSTconst(v *Value) bool {
21618 v_0 := v.Args[0]
21619
21620
21621 for {
21622 y := auxIntToInt64(v.AuxInt)
21623 if v_0.Op != OpARM64MOVDconst {
21624 break
21625 }
21626 x := auxIntToInt64(v_0.AuxInt)
21627 v.reset(OpARM64FlagConstant)
21628 v.AuxInt = flagConstantToAuxInt(logicFlags64(x & y))
21629 return true
21630 }
21631 return false
21632 }
21633 func rewriteValueARM64_OpARM64TSTshiftLL(v *Value) bool {
21634 v_1 := v.Args[1]
21635 v_0 := v.Args[0]
21636 b := v.Block
21637
21638
21639 for {
21640 d := auxIntToInt64(v.AuxInt)
21641 if v_0.Op != OpARM64MOVDconst {
21642 break
21643 }
21644 c := auxIntToInt64(v_0.AuxInt)
21645 x := v_1
21646 v.reset(OpARM64TSTconst)
21647 v.AuxInt = int64ToAuxInt(c)
21648 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
21649 v0.AuxInt = int64ToAuxInt(d)
21650 v0.AddArg(x)
21651 v.AddArg(v0)
21652 return true
21653 }
21654
21655
21656 for {
21657 d := auxIntToInt64(v.AuxInt)
21658 x := v_0
21659 if v_1.Op != OpARM64MOVDconst {
21660 break
21661 }
21662 c := auxIntToInt64(v_1.AuxInt)
21663 v.reset(OpARM64TSTconst)
21664 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
21665 v.AddArg(x)
21666 return true
21667 }
21668 return false
21669 }
21670 func rewriteValueARM64_OpARM64TSTshiftRA(v *Value) bool {
21671 v_1 := v.Args[1]
21672 v_0 := v.Args[0]
21673 b := v.Block
21674
21675
21676 for {
21677 d := auxIntToInt64(v.AuxInt)
21678 if v_0.Op != OpARM64MOVDconst {
21679 break
21680 }
21681 c := auxIntToInt64(v_0.AuxInt)
21682 x := v_1
21683 v.reset(OpARM64TSTconst)
21684 v.AuxInt = int64ToAuxInt(c)
21685 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
21686 v0.AuxInt = int64ToAuxInt(d)
21687 v0.AddArg(x)
21688 v.AddArg(v0)
21689 return true
21690 }
21691
21692
21693 for {
21694 d := auxIntToInt64(v.AuxInt)
21695 x := v_0
21696 if v_1.Op != OpARM64MOVDconst {
21697 break
21698 }
21699 c := auxIntToInt64(v_1.AuxInt)
21700 v.reset(OpARM64TSTconst)
21701 v.AuxInt = int64ToAuxInt(c >> uint64(d))
21702 v.AddArg(x)
21703 return true
21704 }
21705 return false
21706 }
21707 func rewriteValueARM64_OpARM64TSTshiftRL(v *Value) bool {
21708 v_1 := v.Args[1]
21709 v_0 := v.Args[0]
21710 b := v.Block
21711
21712
21713 for {
21714 d := auxIntToInt64(v.AuxInt)
21715 if v_0.Op != OpARM64MOVDconst {
21716 break
21717 }
21718 c := auxIntToInt64(v_0.AuxInt)
21719 x := v_1
21720 v.reset(OpARM64TSTconst)
21721 v.AuxInt = int64ToAuxInt(c)
21722 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
21723 v0.AuxInt = int64ToAuxInt(d)
21724 v0.AddArg(x)
21725 v.AddArg(v0)
21726 return true
21727 }
21728
21729
21730 for {
21731 d := auxIntToInt64(v.AuxInt)
21732 x := v_0
21733 if v_1.Op != OpARM64MOVDconst {
21734 break
21735 }
21736 c := auxIntToInt64(v_1.AuxInt)
21737 v.reset(OpARM64TSTconst)
21738 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
21739 v.AddArg(x)
21740 return true
21741 }
21742 return false
21743 }
21744 func rewriteValueARM64_OpARM64TSTshiftRO(v *Value) bool {
21745 v_1 := v.Args[1]
21746 v_0 := v.Args[0]
21747 b := v.Block
21748
21749
21750 for {
21751 d := auxIntToInt64(v.AuxInt)
21752 if v_0.Op != OpARM64MOVDconst {
21753 break
21754 }
21755 c := auxIntToInt64(v_0.AuxInt)
21756 x := v_1
21757 v.reset(OpARM64TSTconst)
21758 v.AuxInt = int64ToAuxInt(c)
21759 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
21760 v0.AuxInt = int64ToAuxInt(d)
21761 v0.AddArg(x)
21762 v.AddArg(v0)
21763 return true
21764 }
21765
21766
21767 for {
21768 d := auxIntToInt64(v.AuxInt)
21769 x := v_0
21770 if v_1.Op != OpARM64MOVDconst {
21771 break
21772 }
21773 c := auxIntToInt64(v_1.AuxInt)
21774 v.reset(OpARM64TSTconst)
21775 v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
21776 v.AddArg(x)
21777 return true
21778 }
21779 return false
21780 }
21781 func rewriteValueARM64_OpARM64UBFIZ(v *Value) bool {
21782 v_0 := v.Args[0]
21783
21784
21785
21786 for {
21787 bfc := auxIntToArm64BitField(v.AuxInt)
21788 if v_0.Op != OpARM64SLLconst {
21789 break
21790 }
21791 sc := auxIntToInt64(v_0.AuxInt)
21792 x := v_0.Args[0]
21793 if !(sc < bfc.getARM64BFwidth()) {
21794 break
21795 }
21796 v.reset(OpARM64UBFIZ)
21797 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()-sc))
21798 v.AddArg(x)
21799 return true
21800 }
21801 return false
21802 }
21803 func rewriteValueARM64_OpARM64UBFX(v *Value) bool {
21804 v_0 := v.Args[0]
21805
21806
21807
21808 for {
21809 bfc := auxIntToArm64BitField(v.AuxInt)
21810 if v_0.Op != OpARM64SRLconst {
21811 break
21812 }
21813 sc := auxIntToInt64(v_0.AuxInt)
21814 x := v_0.Args[0]
21815 if !(sc+bfc.getARM64BFwidth()+bfc.getARM64BFlsb() < 64) {
21816 break
21817 }
21818 v.reset(OpARM64UBFX)
21819 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()+sc, bfc.getARM64BFwidth()))
21820 v.AddArg(x)
21821 return true
21822 }
21823
21824
21825
21826 for {
21827 bfc := auxIntToArm64BitField(v.AuxInt)
21828 if v_0.Op != OpARM64SLLconst {
21829 break
21830 }
21831 sc := auxIntToInt64(v_0.AuxInt)
21832 x := v_0.Args[0]
21833 if !(sc == bfc.getARM64BFlsb()) {
21834 break
21835 }
21836 v.reset(OpARM64ANDconst)
21837 v.AuxInt = int64ToAuxInt(1<<uint(bfc.getARM64BFwidth()) - 1)
21838 v.AddArg(x)
21839 return true
21840 }
21841
21842
21843
21844 for {
21845 bfc := auxIntToArm64BitField(v.AuxInt)
21846 if v_0.Op != OpARM64SLLconst {
21847 break
21848 }
21849 sc := auxIntToInt64(v_0.AuxInt)
21850 x := v_0.Args[0]
21851 if !(sc < bfc.getARM64BFlsb()) {
21852 break
21853 }
21854 v.reset(OpARM64UBFX)
21855 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.getARM64BFlsb()-sc, bfc.getARM64BFwidth()))
21856 v.AddArg(x)
21857 return true
21858 }
21859
21860
21861
21862 for {
21863 bfc := auxIntToArm64BitField(v.AuxInt)
21864 if v_0.Op != OpARM64SLLconst {
21865 break
21866 }
21867 sc := auxIntToInt64(v_0.AuxInt)
21868 x := v_0.Args[0]
21869 if !(sc > bfc.getARM64BFlsb() && sc < bfc.getARM64BFlsb()+bfc.getARM64BFwidth()) {
21870 break
21871 }
21872 v.reset(OpARM64UBFIZ)
21873 v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc-bfc.getARM64BFlsb(), bfc.getARM64BFlsb()+bfc.getARM64BFwidth()-sc))
21874 v.AddArg(x)
21875 return true
21876 }
21877 return false
21878 }
21879 func rewriteValueARM64_OpARM64UDIV(v *Value) bool {
21880 v_1 := v.Args[1]
21881 v_0 := v.Args[0]
21882
21883
21884 for {
21885 x := v_0
21886 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
21887 break
21888 }
21889 v.copyOf(x)
21890 return true
21891 }
21892
21893
21894
21895 for {
21896 x := v_0
21897 if v_1.Op != OpARM64MOVDconst {
21898 break
21899 }
21900 c := auxIntToInt64(v_1.AuxInt)
21901 if !(isPowerOfTwo64(c)) {
21902 break
21903 }
21904 v.reset(OpARM64SRLconst)
21905 v.AuxInt = int64ToAuxInt(log64(c))
21906 v.AddArg(x)
21907 return true
21908 }
21909
21910
21911
21912 for {
21913 if v_0.Op != OpARM64MOVDconst {
21914 break
21915 }
21916 c := auxIntToInt64(v_0.AuxInt)
21917 if v_1.Op != OpARM64MOVDconst {
21918 break
21919 }
21920 d := auxIntToInt64(v_1.AuxInt)
21921 if !(d != 0) {
21922 break
21923 }
21924 v.reset(OpARM64MOVDconst)
21925 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
21926 return true
21927 }
21928 return false
21929 }
21930 func rewriteValueARM64_OpARM64UDIVW(v *Value) bool {
21931 v_1 := v.Args[1]
21932 v_0 := v.Args[0]
21933
21934
21935
21936 for {
21937 x := v_0
21938 if v_1.Op != OpARM64MOVDconst {
21939 break
21940 }
21941 c := auxIntToInt64(v_1.AuxInt)
21942 if !(uint32(c) == 1) {
21943 break
21944 }
21945 v.copyOf(x)
21946 return true
21947 }
21948
21949
21950
21951 for {
21952 x := v_0
21953 if v_1.Op != OpARM64MOVDconst {
21954 break
21955 }
21956 c := auxIntToInt64(v_1.AuxInt)
21957 if !(isPowerOfTwo64(c) && is32Bit(c)) {
21958 break
21959 }
21960 v.reset(OpARM64SRLconst)
21961 v.AuxInt = int64ToAuxInt(log64(c))
21962 v.AddArg(x)
21963 return true
21964 }
21965
21966
21967
21968 for {
21969 if v_0.Op != OpARM64MOVDconst {
21970 break
21971 }
21972 c := auxIntToInt64(v_0.AuxInt)
21973 if v_1.Op != OpARM64MOVDconst {
21974 break
21975 }
21976 d := auxIntToInt64(v_1.AuxInt)
21977 if !(d != 0) {
21978 break
21979 }
21980 v.reset(OpARM64MOVDconst)
21981 v.AuxInt = int64ToAuxInt(int64(uint32(c) / uint32(d)))
21982 return true
21983 }
21984 return false
21985 }
21986 func rewriteValueARM64_OpARM64UMOD(v *Value) bool {
21987 v_1 := v.Args[1]
21988 v_0 := v.Args[0]
21989 b := v.Block
21990 typ := &b.Func.Config.Types
21991
21992
21993 for {
21994 if v.Type != typ.UInt64 {
21995 break
21996 }
21997 x := v_0
21998 y := v_1
21999 v.reset(OpARM64MSUB)
22000 v.Type = typ.UInt64
22001 v0 := b.NewValue0(v.Pos, OpARM64UDIV, typ.UInt64)
22002 v0.AddArg2(x, y)
22003 v.AddArg3(x, y, v0)
22004 return true
22005 }
22006
22007
22008 for {
22009 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
22010 break
22011 }
22012 v.reset(OpARM64MOVDconst)
22013 v.AuxInt = int64ToAuxInt(0)
22014 return true
22015 }
22016
22017
22018
22019 for {
22020 x := v_0
22021 if v_1.Op != OpARM64MOVDconst {
22022 break
22023 }
22024 c := auxIntToInt64(v_1.AuxInt)
22025 if !(isPowerOfTwo64(c)) {
22026 break
22027 }
22028 v.reset(OpARM64ANDconst)
22029 v.AuxInt = int64ToAuxInt(c - 1)
22030 v.AddArg(x)
22031 return true
22032 }
22033
22034
22035
22036 for {
22037 if v_0.Op != OpARM64MOVDconst {
22038 break
22039 }
22040 c := auxIntToInt64(v_0.AuxInt)
22041 if v_1.Op != OpARM64MOVDconst {
22042 break
22043 }
22044 d := auxIntToInt64(v_1.AuxInt)
22045 if !(d != 0) {
22046 break
22047 }
22048 v.reset(OpARM64MOVDconst)
22049 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
22050 return true
22051 }
22052 return false
22053 }
22054 func rewriteValueARM64_OpARM64UMODW(v *Value) bool {
22055 v_1 := v.Args[1]
22056 v_0 := v.Args[0]
22057 b := v.Block
22058 typ := &b.Func.Config.Types
22059
22060
22061 for {
22062 if v.Type != typ.UInt32 {
22063 break
22064 }
22065 x := v_0
22066 y := v_1
22067 v.reset(OpARM64MSUBW)
22068 v.Type = typ.UInt32
22069 v0 := b.NewValue0(v.Pos, OpARM64UDIVW, typ.UInt32)
22070 v0.AddArg2(x, y)
22071 v.AddArg3(x, y, v0)
22072 return true
22073 }
22074
22075
22076
22077 for {
22078 if v_1.Op != OpARM64MOVDconst {
22079 break
22080 }
22081 c := auxIntToInt64(v_1.AuxInt)
22082 if !(uint32(c) == 1) {
22083 break
22084 }
22085 v.reset(OpARM64MOVDconst)
22086 v.AuxInt = int64ToAuxInt(0)
22087 return true
22088 }
22089
22090
22091
22092 for {
22093 x := v_0
22094 if v_1.Op != OpARM64MOVDconst {
22095 break
22096 }
22097 c := auxIntToInt64(v_1.AuxInt)
22098 if !(isPowerOfTwo64(c) && is32Bit(c)) {
22099 break
22100 }
22101 v.reset(OpARM64ANDconst)
22102 v.AuxInt = int64ToAuxInt(c - 1)
22103 v.AddArg(x)
22104 return true
22105 }
22106
22107
22108
22109 for {
22110 if v_0.Op != OpARM64MOVDconst {
22111 break
22112 }
22113 c := auxIntToInt64(v_0.AuxInt)
22114 if v_1.Op != OpARM64MOVDconst {
22115 break
22116 }
22117 d := auxIntToInt64(v_1.AuxInt)
22118 if !(d != 0) {
22119 break
22120 }
22121 v.reset(OpARM64MOVDconst)
22122 v.AuxInt = int64ToAuxInt(int64(uint32(c) % uint32(d)))
22123 return true
22124 }
22125 return false
22126 }
22127 func rewriteValueARM64_OpARM64XOR(v *Value) bool {
22128 v_1 := v.Args[1]
22129 v_0 := v.Args[0]
22130 b := v.Block
22131 typ := &b.Func.Config.Types
22132
22133
22134 for {
22135 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22136 x := v_0
22137 if v_1.Op != OpARM64MOVDconst {
22138 continue
22139 }
22140 c := auxIntToInt64(v_1.AuxInt)
22141 v.reset(OpARM64XORconst)
22142 v.AuxInt = int64ToAuxInt(c)
22143 v.AddArg(x)
22144 return true
22145 }
22146 break
22147 }
22148
22149
22150 for {
22151 x := v_0
22152 if x != v_1 {
22153 break
22154 }
22155 v.reset(OpARM64MOVDconst)
22156 v.AuxInt = int64ToAuxInt(0)
22157 return true
22158 }
22159
22160
22161 for {
22162 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22163 x := v_0
22164 if v_1.Op != OpARM64MVN {
22165 continue
22166 }
22167 y := v_1.Args[0]
22168 v.reset(OpARM64EON)
22169 v.AddArg2(x, y)
22170 return true
22171 }
22172 break
22173 }
22174
22175
22176
22177 for {
22178 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22179 x0 := v_0
22180 x1 := v_1
22181 if x1.Op != OpARM64SLLconst {
22182 continue
22183 }
22184 c := auxIntToInt64(x1.AuxInt)
22185 y := x1.Args[0]
22186 if !(clobberIfDead(x1)) {
22187 continue
22188 }
22189 v.reset(OpARM64XORshiftLL)
22190 v.AuxInt = int64ToAuxInt(c)
22191 v.AddArg2(x0, y)
22192 return true
22193 }
22194 break
22195 }
22196
22197
22198
22199 for {
22200 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22201 x0 := v_0
22202 x1 := v_1
22203 if x1.Op != OpARM64SRLconst {
22204 continue
22205 }
22206 c := auxIntToInt64(x1.AuxInt)
22207 y := x1.Args[0]
22208 if !(clobberIfDead(x1)) {
22209 continue
22210 }
22211 v.reset(OpARM64XORshiftRL)
22212 v.AuxInt = int64ToAuxInt(c)
22213 v.AddArg2(x0, y)
22214 return true
22215 }
22216 break
22217 }
22218
22219
22220
22221 for {
22222 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22223 x0 := v_0
22224 x1 := v_1
22225 if x1.Op != OpARM64SRAconst {
22226 continue
22227 }
22228 c := auxIntToInt64(x1.AuxInt)
22229 y := x1.Args[0]
22230 if !(clobberIfDead(x1)) {
22231 continue
22232 }
22233 v.reset(OpARM64XORshiftRA)
22234 v.AuxInt = int64ToAuxInt(c)
22235 v.AddArg2(x0, y)
22236 return true
22237 }
22238 break
22239 }
22240
22241
22242
22243 for {
22244 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22245 x0 := v_0
22246 x1 := v_1
22247 if x1.Op != OpARM64RORconst {
22248 continue
22249 }
22250 c := auxIntToInt64(x1.AuxInt)
22251 y := x1.Args[0]
22252 if !(clobberIfDead(x1)) {
22253 continue
22254 }
22255 v.reset(OpARM64XORshiftRO)
22256 v.AuxInt = int64ToAuxInt(c)
22257 v.AddArg2(x0, y)
22258 return true
22259 }
22260 break
22261 }
22262
22263
22264
22265 for {
22266 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22267 if v_0.Op != OpARM64SLL {
22268 continue
22269 }
22270 _ = v_0.Args[1]
22271 x := v_0.Args[0]
22272 v_0_1 := v_0.Args[1]
22273 if v_0_1.Op != OpARM64ANDconst {
22274 continue
22275 }
22276 t := v_0_1.Type
22277 if auxIntToInt64(v_0_1.AuxInt) != 63 {
22278 continue
22279 }
22280 y := v_0_1.Args[0]
22281 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
22282 continue
22283 }
22284 cc := auxIntToOp(v_1.AuxInt)
22285 _ = v_1.Args[1]
22286 v_1_0 := v_1.Args[0]
22287 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 {
22288 continue
22289 }
22290 _ = v_1_0.Args[1]
22291 if x != v_1_0.Args[0] {
22292 continue
22293 }
22294 v_1_0_1 := v_1_0.Args[1]
22295 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
22296 continue
22297 }
22298 _ = v_1_0_1.Args[1]
22299 v_1_0_1_0 := v_1_0_1.Args[0]
22300 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 64 {
22301 continue
22302 }
22303 v_1_0_1_1 := v_1_0_1.Args[1]
22304 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 63 || y != v_1_0_1_1.Args[0] {
22305 continue
22306 }
22307 v_1_1 := v_1.Args[1]
22308 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
22309 continue
22310 }
22311 v_1_1_0 := v_1_1.Args[0]
22312 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
22313 continue
22314 }
22315 _ = v_1_1_0.Args[1]
22316 v_1_1_0_0 := v_1_1_0.Args[0]
22317 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 64 {
22318 continue
22319 }
22320 v_1_1_0_1 := v_1_1_0.Args[1]
22321 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
22322 continue
22323 }
22324 v.reset(OpARM64ROR)
22325 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
22326 v0.AddArg(y)
22327 v.AddArg2(x, v0)
22328 return true
22329 }
22330 break
22331 }
22332
22333
22334
22335 for {
22336 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22337 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 {
22338 continue
22339 }
22340 _ = v_0.Args[1]
22341 x := v_0.Args[0]
22342 v_0_1 := v_0.Args[1]
22343 if v_0_1.Op != OpARM64ANDconst {
22344 continue
22345 }
22346 t := v_0_1.Type
22347 if auxIntToInt64(v_0_1.AuxInt) != 63 {
22348 continue
22349 }
22350 y := v_0_1.Args[0]
22351 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
22352 continue
22353 }
22354 cc := auxIntToOp(v_1.AuxInt)
22355 _ = v_1.Args[1]
22356 v_1_0 := v_1.Args[0]
22357 if v_1_0.Op != OpARM64SLL {
22358 continue
22359 }
22360 _ = v_1_0.Args[1]
22361 if x != v_1_0.Args[0] {
22362 continue
22363 }
22364 v_1_0_1 := v_1_0.Args[1]
22365 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
22366 continue
22367 }
22368 _ = v_1_0_1.Args[1]
22369 v_1_0_1_0 := v_1_0_1.Args[0]
22370 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 64 {
22371 continue
22372 }
22373 v_1_0_1_1 := v_1_0_1.Args[1]
22374 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 63 || y != v_1_0_1_1.Args[0] {
22375 continue
22376 }
22377 v_1_1 := v_1.Args[1]
22378 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
22379 continue
22380 }
22381 v_1_1_0 := v_1_1.Args[0]
22382 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
22383 continue
22384 }
22385 _ = v_1_1_0.Args[1]
22386 v_1_1_0_0 := v_1_1_0.Args[0]
22387 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 64 {
22388 continue
22389 }
22390 v_1_1_0_1 := v_1_1_0.Args[1]
22391 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
22392 continue
22393 }
22394 v.reset(OpARM64ROR)
22395 v.AddArg2(x, y)
22396 return true
22397 }
22398 break
22399 }
22400
22401
22402
22403 for {
22404 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22405 if v_0.Op != OpARM64SLL {
22406 continue
22407 }
22408 _ = v_0.Args[1]
22409 x := v_0.Args[0]
22410 v_0_1 := v_0.Args[1]
22411 if v_0_1.Op != OpARM64ANDconst {
22412 continue
22413 }
22414 t := v_0_1.Type
22415 if auxIntToInt64(v_0_1.AuxInt) != 31 {
22416 continue
22417 }
22418 y := v_0_1.Args[0]
22419 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
22420 continue
22421 }
22422 cc := auxIntToOp(v_1.AuxInt)
22423 _ = v_1.Args[1]
22424 v_1_0 := v_1.Args[0]
22425 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 {
22426 continue
22427 }
22428 _ = v_1_0.Args[1]
22429 v_1_0_0 := v_1_0.Args[0]
22430 if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] {
22431 continue
22432 }
22433 v_1_0_1 := v_1_0.Args[1]
22434 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
22435 continue
22436 }
22437 _ = v_1_0_1.Args[1]
22438 v_1_0_1_0 := v_1_0_1.Args[0]
22439 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 32 {
22440 continue
22441 }
22442 v_1_0_1_1 := v_1_0_1.Args[1]
22443 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 31 || y != v_1_0_1_1.Args[0] {
22444 continue
22445 }
22446 v_1_1 := v_1.Args[1]
22447 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
22448 continue
22449 }
22450 v_1_1_0 := v_1_1.Args[0]
22451 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
22452 continue
22453 }
22454 _ = v_1_1_0.Args[1]
22455 v_1_1_0_0 := v_1_1_0.Args[0]
22456 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 32 {
22457 continue
22458 }
22459 v_1_1_0_1 := v_1_1_0.Args[1]
22460 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
22461 continue
22462 }
22463 v.reset(OpARM64RORW)
22464 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
22465 v0.AddArg(y)
22466 v.AddArg2(x, v0)
22467 return true
22468 }
22469 break
22470 }
22471
22472
22473
22474 for {
22475 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22476 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 {
22477 continue
22478 }
22479 _ = v_0.Args[1]
22480 v_0_0 := v_0.Args[0]
22481 if v_0_0.Op != OpARM64MOVWUreg {
22482 continue
22483 }
22484 x := v_0_0.Args[0]
22485 v_0_1 := v_0.Args[1]
22486 if v_0_1.Op != OpARM64ANDconst {
22487 continue
22488 }
22489 t := v_0_1.Type
22490 if auxIntToInt64(v_0_1.AuxInt) != 31 {
22491 continue
22492 }
22493 y := v_0_1.Args[0]
22494 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
22495 continue
22496 }
22497 cc := auxIntToOp(v_1.AuxInt)
22498 _ = v_1.Args[1]
22499 v_1_0 := v_1.Args[0]
22500 if v_1_0.Op != OpARM64SLL {
22501 continue
22502 }
22503 _ = v_1_0.Args[1]
22504 if x != v_1_0.Args[0] {
22505 continue
22506 }
22507 v_1_0_1 := v_1_0.Args[1]
22508 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
22509 continue
22510 }
22511 _ = v_1_0_1.Args[1]
22512 v_1_0_1_0 := v_1_0_1.Args[0]
22513 if v_1_0_1_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_0_1_0.AuxInt) != 32 {
22514 continue
22515 }
22516 v_1_0_1_1 := v_1_0_1.Args[1]
22517 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || auxIntToInt64(v_1_0_1_1.AuxInt) != 31 || y != v_1_0_1_1.Args[0] {
22518 continue
22519 }
22520 v_1_1 := v_1.Args[1]
22521 if v_1_1.Op != OpARM64CMPconst || auxIntToInt64(v_1_1.AuxInt) != 64 {
22522 continue
22523 }
22524 v_1_1_0 := v_1_1.Args[0]
22525 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
22526 continue
22527 }
22528 _ = v_1_1_0.Args[1]
22529 v_1_1_0_0 := v_1_1_0.Args[0]
22530 if v_1_1_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_1_1_0_0.AuxInt) != 32 {
22531 continue
22532 }
22533 v_1_1_0_1 := v_1_1_0.Args[1]
22534 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || auxIntToInt64(v_1_1_0_1.AuxInt) != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
22535 continue
22536 }
22537 v.reset(OpARM64RORW)
22538 v.AddArg2(x, y)
22539 return true
22540 }
22541 break
22542 }
22543 return false
22544 }
22545 func rewriteValueARM64_OpARM64XORconst(v *Value) bool {
22546 v_0 := v.Args[0]
22547
22548
22549 for {
22550 if auxIntToInt64(v.AuxInt) != 0 {
22551 break
22552 }
22553 x := v_0
22554 v.copyOf(x)
22555 return true
22556 }
22557
22558
22559 for {
22560 if auxIntToInt64(v.AuxInt) != -1 {
22561 break
22562 }
22563 x := v_0
22564 v.reset(OpARM64MVN)
22565 v.AddArg(x)
22566 return true
22567 }
22568
22569
22570 for {
22571 c := auxIntToInt64(v.AuxInt)
22572 if v_0.Op != OpARM64MOVDconst {
22573 break
22574 }
22575 d := auxIntToInt64(v_0.AuxInt)
22576 v.reset(OpARM64MOVDconst)
22577 v.AuxInt = int64ToAuxInt(c ^ d)
22578 return true
22579 }
22580
22581
22582 for {
22583 c := auxIntToInt64(v.AuxInt)
22584 if v_0.Op != OpARM64XORconst {
22585 break
22586 }
22587 d := auxIntToInt64(v_0.AuxInt)
22588 x := v_0.Args[0]
22589 v.reset(OpARM64XORconst)
22590 v.AuxInt = int64ToAuxInt(c ^ d)
22591 v.AddArg(x)
22592 return true
22593 }
22594 return false
22595 }
22596 func rewriteValueARM64_OpARM64XORshiftLL(v *Value) bool {
22597 v_1 := v.Args[1]
22598 v_0 := v.Args[0]
22599 b := v.Block
22600 typ := &b.Func.Config.Types
22601
22602
22603 for {
22604 d := auxIntToInt64(v.AuxInt)
22605 if v_0.Op != OpARM64MOVDconst {
22606 break
22607 }
22608 c := auxIntToInt64(v_0.AuxInt)
22609 x := v_1
22610 v.reset(OpARM64XORconst)
22611 v.AuxInt = int64ToAuxInt(c)
22612 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
22613 v0.AuxInt = int64ToAuxInt(d)
22614 v0.AddArg(x)
22615 v.AddArg(v0)
22616 return true
22617 }
22618
22619
22620 for {
22621 d := auxIntToInt64(v.AuxInt)
22622 x := v_0
22623 if v_1.Op != OpARM64MOVDconst {
22624 break
22625 }
22626 c := auxIntToInt64(v_1.AuxInt)
22627 v.reset(OpARM64XORconst)
22628 v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
22629 v.AddArg(x)
22630 return true
22631 }
22632
22633
22634 for {
22635 c := auxIntToInt64(v.AuxInt)
22636 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
22637 break
22638 }
22639 x := v_0.Args[0]
22640 if x != v_1 {
22641 break
22642 }
22643 v.reset(OpARM64MOVDconst)
22644 v.AuxInt = int64ToAuxInt(0)
22645 return true
22646 }
22647
22648
22649 for {
22650 c := auxIntToInt64(v.AuxInt)
22651 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
22652 break
22653 }
22654 x := v_0.Args[0]
22655 if x != v_1 {
22656 break
22657 }
22658 v.reset(OpARM64RORconst)
22659 v.AuxInt = int64ToAuxInt(64 - c)
22660 v.AddArg(x)
22661 return true
22662 }
22663
22664
22665
22666 for {
22667 t := v.Type
22668 c := auxIntToInt64(v.AuxInt)
22669 if v_0.Op != OpARM64UBFX {
22670 break
22671 }
22672 bfc := auxIntToArm64BitField(v_0.AuxInt)
22673 x := v_0.Args[0]
22674 if x != v_1 || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
22675 break
22676 }
22677 v.reset(OpARM64RORWconst)
22678 v.AuxInt = int64ToAuxInt(32 - c)
22679 v.AddArg(x)
22680 return true
22681 }
22682
22683
22684 for {
22685 if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
22686 break
22687 }
22688 x := v_0.Args[0]
22689 if x != v_1 {
22690 break
22691 }
22692 v.reset(OpARM64REV16W)
22693 v.AddArg(x)
22694 return true
22695 }
22696
22697
22698
22699 for {
22700 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
22701 break
22702 }
22703 v_0_0 := v_0.Args[0]
22704 if v_0_0.Op != OpARM64ANDconst {
22705 break
22706 }
22707 c1 := auxIntToInt64(v_0_0.AuxInt)
22708 x := v_0_0.Args[0]
22709 if v_1.Op != OpARM64ANDconst {
22710 break
22711 }
22712 c2 := auxIntToInt64(v_1.AuxInt)
22713 if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
22714 break
22715 }
22716 v.reset(OpARM64REV16W)
22717 v.AddArg(x)
22718 return true
22719 }
22720
22721
22722
22723 for {
22724 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
22725 break
22726 }
22727 v_0_0 := v_0.Args[0]
22728 if v_0_0.Op != OpARM64ANDconst {
22729 break
22730 }
22731 c1 := auxIntToInt64(v_0_0.AuxInt)
22732 x := v_0_0.Args[0]
22733 if v_1.Op != OpARM64ANDconst {
22734 break
22735 }
22736 c2 := auxIntToInt64(v_1.AuxInt)
22737 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
22738 break
22739 }
22740 v.reset(OpARM64REV16)
22741 v.AddArg(x)
22742 return true
22743 }
22744
22745
22746
22747 for {
22748 if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
22749 break
22750 }
22751 v_0_0 := v_0.Args[0]
22752 if v_0_0.Op != OpARM64ANDconst {
22753 break
22754 }
22755 c1 := auxIntToInt64(v_0_0.AuxInt)
22756 x := v_0_0.Args[0]
22757 if v_1.Op != OpARM64ANDconst {
22758 break
22759 }
22760 c2 := auxIntToInt64(v_1.AuxInt)
22761 if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
22762 break
22763 }
22764 v.reset(OpARM64REV16)
22765 v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
22766 v0.AuxInt = int64ToAuxInt(0xffffffff)
22767 v0.AddArg(x)
22768 v.AddArg(v0)
22769 return true
22770 }
22771
22772
22773 for {
22774 c := auxIntToInt64(v.AuxInt)
22775 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
22776 break
22777 }
22778 x := v_0.Args[0]
22779 x2 := v_1
22780 v.reset(OpARM64EXTRconst)
22781 v.AuxInt = int64ToAuxInt(64 - c)
22782 v.AddArg2(x2, x)
22783 return true
22784 }
22785
22786
22787
22788 for {
22789 t := v.Type
22790 c := auxIntToInt64(v.AuxInt)
22791 if v_0.Op != OpARM64UBFX {
22792 break
22793 }
22794 bfc := auxIntToArm64BitField(v_0.AuxInt)
22795 x := v_0.Args[0]
22796 x2 := v_1
22797 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
22798 break
22799 }
22800 v.reset(OpARM64EXTRWconst)
22801 v.AuxInt = int64ToAuxInt(32 - c)
22802 v.AddArg2(x2, x)
22803 return true
22804 }
22805 return false
22806 }
22807 func rewriteValueARM64_OpARM64XORshiftRA(v *Value) bool {
22808 v_1 := v.Args[1]
22809 v_0 := v.Args[0]
22810 b := v.Block
22811
22812
22813 for {
22814 d := auxIntToInt64(v.AuxInt)
22815 if v_0.Op != OpARM64MOVDconst {
22816 break
22817 }
22818 c := auxIntToInt64(v_0.AuxInt)
22819 x := v_1
22820 v.reset(OpARM64XORconst)
22821 v.AuxInt = int64ToAuxInt(c)
22822 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
22823 v0.AuxInt = int64ToAuxInt(d)
22824 v0.AddArg(x)
22825 v.AddArg(v0)
22826 return true
22827 }
22828
22829
22830 for {
22831 d := auxIntToInt64(v.AuxInt)
22832 x := v_0
22833 if v_1.Op != OpARM64MOVDconst {
22834 break
22835 }
22836 c := auxIntToInt64(v_1.AuxInt)
22837 v.reset(OpARM64XORconst)
22838 v.AuxInt = int64ToAuxInt(c >> uint64(d))
22839 v.AddArg(x)
22840 return true
22841 }
22842
22843
22844 for {
22845 c := auxIntToInt64(v.AuxInt)
22846 if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
22847 break
22848 }
22849 x := v_0.Args[0]
22850 if x != v_1 {
22851 break
22852 }
22853 v.reset(OpARM64MOVDconst)
22854 v.AuxInt = int64ToAuxInt(0)
22855 return true
22856 }
22857 return false
22858 }
22859 func rewriteValueARM64_OpARM64XORshiftRL(v *Value) bool {
22860 v_1 := v.Args[1]
22861 v_0 := v.Args[0]
22862 b := v.Block
22863
22864
22865 for {
22866 d := auxIntToInt64(v.AuxInt)
22867 if v_0.Op != OpARM64MOVDconst {
22868 break
22869 }
22870 c := auxIntToInt64(v_0.AuxInt)
22871 x := v_1
22872 v.reset(OpARM64XORconst)
22873 v.AuxInt = int64ToAuxInt(c)
22874 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
22875 v0.AuxInt = int64ToAuxInt(d)
22876 v0.AddArg(x)
22877 v.AddArg(v0)
22878 return true
22879 }
22880
22881
22882 for {
22883 d := auxIntToInt64(v.AuxInt)
22884 x := v_0
22885 if v_1.Op != OpARM64MOVDconst {
22886 break
22887 }
22888 c := auxIntToInt64(v_1.AuxInt)
22889 v.reset(OpARM64XORconst)
22890 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
22891 v.AddArg(x)
22892 return true
22893 }
22894
22895
22896 for {
22897 c := auxIntToInt64(v.AuxInt)
22898 if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
22899 break
22900 }
22901 x := v_0.Args[0]
22902 if x != v_1 {
22903 break
22904 }
22905 v.reset(OpARM64MOVDconst)
22906 v.AuxInt = int64ToAuxInt(0)
22907 return true
22908 }
22909
22910
22911 for {
22912 c := auxIntToInt64(v.AuxInt)
22913 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
22914 break
22915 }
22916 x := v_0.Args[0]
22917 if x != v_1 {
22918 break
22919 }
22920 v.reset(OpARM64RORconst)
22921 v.AuxInt = int64ToAuxInt(c)
22922 v.AddArg(x)
22923 return true
22924 }
22925
22926
22927
22928 for {
22929 t := v.Type
22930 c := auxIntToInt64(v.AuxInt)
22931 if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != 32-c {
22932 break
22933 }
22934 x := v_0.Args[0]
22935 if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) {
22936 break
22937 }
22938 v.reset(OpARM64RORWconst)
22939 v.AuxInt = int64ToAuxInt(c)
22940 v.AddArg(x)
22941 return true
22942 }
22943 return false
22944 }
22945 func rewriteValueARM64_OpARM64XORshiftRO(v *Value) bool {
22946 v_1 := v.Args[1]
22947 v_0 := v.Args[0]
22948 b := v.Block
22949
22950
22951 for {
22952 d := auxIntToInt64(v.AuxInt)
22953 if v_0.Op != OpARM64MOVDconst {
22954 break
22955 }
22956 c := auxIntToInt64(v_0.AuxInt)
22957 x := v_1
22958 v.reset(OpARM64XORconst)
22959 v.AuxInt = int64ToAuxInt(c)
22960 v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
22961 v0.AuxInt = int64ToAuxInt(d)
22962 v0.AddArg(x)
22963 v.AddArg(v0)
22964 return true
22965 }
22966
22967
22968 for {
22969 d := auxIntToInt64(v.AuxInt)
22970 x := v_0
22971 if v_1.Op != OpARM64MOVDconst {
22972 break
22973 }
22974 c := auxIntToInt64(v_1.AuxInt)
22975 v.reset(OpARM64XORconst)
22976 v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
22977 v.AddArg(x)
22978 return true
22979 }
22980
22981
22982 for {
22983 c := auxIntToInt64(v.AuxInt)
22984 if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
22985 break
22986 }
22987 x := v_0.Args[0]
22988 if x != v_1 {
22989 break
22990 }
22991 v.reset(OpARM64MOVDconst)
22992 v.AuxInt = int64ToAuxInt(0)
22993 return true
22994 }
22995 return false
22996 }
22997 func rewriteValueARM64_OpAddr(v *Value) bool {
22998 v_0 := v.Args[0]
22999
23000
23001 for {
23002 sym := auxToSym(v.Aux)
23003 base := v_0
23004 v.reset(OpARM64MOVDaddr)
23005 v.Aux = symToAux(sym)
23006 v.AddArg(base)
23007 return true
23008 }
23009 }
23010 func rewriteValueARM64_OpAtomicAnd32(v *Value) bool {
23011 v_2 := v.Args[2]
23012 v_1 := v.Args[1]
23013 v_0 := v.Args[0]
23014 b := v.Block
23015 typ := &b.Func.Config.Types
23016
23017
23018 for {
23019 ptr := v_0
23020 val := v_1
23021 mem := v_2
23022 v.reset(OpSelect1)
23023 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd32, types.NewTuple(typ.UInt32, types.TypeMem))
23024 v0.AddArg3(ptr, val, mem)
23025 v.AddArg(v0)
23026 return true
23027 }
23028 }
23029 func rewriteValueARM64_OpAtomicAnd32Variant(v *Value) bool {
23030 v_2 := v.Args[2]
23031 v_1 := v.Args[1]
23032 v_0 := v.Args[0]
23033 b := v.Block
23034 typ := &b.Func.Config.Types
23035
23036
23037 for {
23038 ptr := v_0
23039 val := v_1
23040 mem := v_2
23041 v.reset(OpSelect1)
23042 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd32Variant, types.NewTuple(typ.UInt32, types.TypeMem))
23043 v0.AddArg3(ptr, val, mem)
23044 v.AddArg(v0)
23045 return true
23046 }
23047 }
23048 func rewriteValueARM64_OpAtomicAnd8(v *Value) bool {
23049 v_2 := v.Args[2]
23050 v_1 := v.Args[1]
23051 v_0 := v.Args[0]
23052 b := v.Block
23053 typ := &b.Func.Config.Types
23054
23055
23056 for {
23057 ptr := v_0
23058 val := v_1
23059 mem := v_2
23060 v.reset(OpSelect1)
23061 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem))
23062 v0.AddArg3(ptr, val, mem)
23063 v.AddArg(v0)
23064 return true
23065 }
23066 }
23067 func rewriteValueARM64_OpAtomicAnd8Variant(v *Value) bool {
23068 v_2 := v.Args[2]
23069 v_1 := v.Args[1]
23070 v_0 := v.Args[0]
23071 b := v.Block
23072 typ := &b.Func.Config.Types
23073
23074
23075 for {
23076 ptr := v_0
23077 val := v_1
23078 mem := v_2
23079 v.reset(OpSelect1)
23080 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8Variant, types.NewTuple(typ.UInt8, types.TypeMem))
23081 v0.AddArg3(ptr, val, mem)
23082 v.AddArg(v0)
23083 return true
23084 }
23085 }
23086 func rewriteValueARM64_OpAtomicOr32(v *Value) bool {
23087 v_2 := v.Args[2]
23088 v_1 := v.Args[1]
23089 v_0 := v.Args[0]
23090 b := v.Block
23091 typ := &b.Func.Config.Types
23092
23093
23094 for {
23095 ptr := v_0
23096 val := v_1
23097 mem := v_2
23098 v.reset(OpSelect1)
23099 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr32, types.NewTuple(typ.UInt32, types.TypeMem))
23100 v0.AddArg3(ptr, val, mem)
23101 v.AddArg(v0)
23102 return true
23103 }
23104 }
23105 func rewriteValueARM64_OpAtomicOr32Variant(v *Value) bool {
23106 v_2 := v.Args[2]
23107 v_1 := v.Args[1]
23108 v_0 := v.Args[0]
23109 b := v.Block
23110 typ := &b.Func.Config.Types
23111
23112
23113 for {
23114 ptr := v_0
23115 val := v_1
23116 mem := v_2
23117 v.reset(OpSelect1)
23118 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr32Variant, types.NewTuple(typ.UInt32, types.TypeMem))
23119 v0.AddArg3(ptr, val, mem)
23120 v.AddArg(v0)
23121 return true
23122 }
23123 }
23124 func rewriteValueARM64_OpAtomicOr8(v *Value) bool {
23125 v_2 := v.Args[2]
23126 v_1 := v.Args[1]
23127 v_0 := v.Args[0]
23128 b := v.Block
23129 typ := &b.Func.Config.Types
23130
23131
23132 for {
23133 ptr := v_0
23134 val := v_1
23135 mem := v_2
23136 v.reset(OpSelect1)
23137 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem))
23138 v0.AddArg3(ptr, val, mem)
23139 v.AddArg(v0)
23140 return true
23141 }
23142 }
23143 func rewriteValueARM64_OpAtomicOr8Variant(v *Value) bool {
23144 v_2 := v.Args[2]
23145 v_1 := v.Args[1]
23146 v_0 := v.Args[0]
23147 b := v.Block
23148 typ := &b.Func.Config.Types
23149
23150
23151 for {
23152 ptr := v_0
23153 val := v_1
23154 mem := v_2
23155 v.reset(OpSelect1)
23156 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8Variant, types.NewTuple(typ.UInt8, types.TypeMem))
23157 v0.AddArg3(ptr, val, mem)
23158 v.AddArg(v0)
23159 return true
23160 }
23161 }
23162 func rewriteValueARM64_OpAvg64u(v *Value) bool {
23163 v_1 := v.Args[1]
23164 v_0 := v.Args[0]
23165 b := v.Block
23166
23167
23168 for {
23169 t := v.Type
23170 x := v_0
23171 y := v_1
23172 v.reset(OpARM64ADD)
23173 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t)
23174 v0.AuxInt = int64ToAuxInt(1)
23175 v1 := b.NewValue0(v.Pos, OpARM64SUB, t)
23176 v1.AddArg2(x, y)
23177 v0.AddArg(v1)
23178 v.AddArg2(v0, y)
23179 return true
23180 }
23181 }
23182 func rewriteValueARM64_OpBitLen32(v *Value) bool {
23183 v_0 := v.Args[0]
23184 b := v.Block
23185 typ := &b.Func.Config.Types
23186
23187
23188 for {
23189 x := v_0
23190 v.reset(OpARM64SUB)
23191 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23192 v0.AuxInt = int64ToAuxInt(32)
23193 v1 := b.NewValue0(v.Pos, OpARM64CLZW, typ.Int)
23194 v1.AddArg(x)
23195 v.AddArg2(v0, v1)
23196 return true
23197 }
23198 }
23199 func rewriteValueARM64_OpBitLen64(v *Value) bool {
23200 v_0 := v.Args[0]
23201 b := v.Block
23202 typ := &b.Func.Config.Types
23203
23204
23205 for {
23206 x := v_0
23207 v.reset(OpARM64SUB)
23208 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23209 v0.AuxInt = int64ToAuxInt(64)
23210 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int)
23211 v1.AddArg(x)
23212 v.AddArg2(v0, v1)
23213 return true
23214 }
23215 }
23216 func rewriteValueARM64_OpBitRev16(v *Value) bool {
23217 v_0 := v.Args[0]
23218 b := v.Block
23219 typ := &b.Func.Config.Types
23220
23221
23222 for {
23223 x := v_0
23224 v.reset(OpARM64SRLconst)
23225 v.AuxInt = int64ToAuxInt(48)
23226 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
23227 v0.AddArg(x)
23228 v.AddArg(v0)
23229 return true
23230 }
23231 }
23232 func rewriteValueARM64_OpBitRev8(v *Value) bool {
23233 v_0 := v.Args[0]
23234 b := v.Block
23235 typ := &b.Func.Config.Types
23236
23237
23238 for {
23239 x := v_0
23240 v.reset(OpARM64SRLconst)
23241 v.AuxInt = int64ToAuxInt(56)
23242 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
23243 v0.AddArg(x)
23244 v.AddArg(v0)
23245 return true
23246 }
23247 }
23248 func rewriteValueARM64_OpCondSelect(v *Value) bool {
23249 v_2 := v.Args[2]
23250 v_1 := v.Args[1]
23251 v_0 := v.Args[0]
23252 b := v.Block
23253
23254
23255
23256 for {
23257 x := v_0
23258 y := v_1
23259 boolval := v_2
23260 if !(flagArg(boolval) != nil) {
23261 break
23262 }
23263 v.reset(OpARM64CSEL)
23264 v.AuxInt = opToAuxInt(boolval.Op)
23265 v.AddArg3(x, y, flagArg(boolval))
23266 return true
23267 }
23268
23269
23270
23271 for {
23272 x := v_0
23273 y := v_1
23274 boolval := v_2
23275 if !(flagArg(boolval) == nil) {
23276 break
23277 }
23278 v.reset(OpARM64CSEL)
23279 v.AuxInt = opToAuxInt(OpARM64NotEqual)
23280 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
23281 v0.AuxInt = int32ToAuxInt(0)
23282 v0.AddArg(boolval)
23283 v.AddArg3(x, y, v0)
23284 return true
23285 }
23286 return false
23287 }
23288 func rewriteValueARM64_OpConst16(v *Value) bool {
23289
23290
23291 for {
23292 val := auxIntToInt16(v.AuxInt)
23293 v.reset(OpARM64MOVDconst)
23294 v.AuxInt = int64ToAuxInt(int64(val))
23295 return true
23296 }
23297 }
23298 func rewriteValueARM64_OpConst32(v *Value) bool {
23299
23300
23301 for {
23302 val := auxIntToInt32(v.AuxInt)
23303 v.reset(OpARM64MOVDconst)
23304 v.AuxInt = int64ToAuxInt(int64(val))
23305 return true
23306 }
23307 }
23308 func rewriteValueARM64_OpConst32F(v *Value) bool {
23309
23310
23311 for {
23312 val := auxIntToFloat32(v.AuxInt)
23313 v.reset(OpARM64FMOVSconst)
23314 v.AuxInt = float64ToAuxInt(float64(val))
23315 return true
23316 }
23317 }
23318 func rewriteValueARM64_OpConst64(v *Value) bool {
23319
23320
23321 for {
23322 val := auxIntToInt64(v.AuxInt)
23323 v.reset(OpARM64MOVDconst)
23324 v.AuxInt = int64ToAuxInt(int64(val))
23325 return true
23326 }
23327 }
23328 func rewriteValueARM64_OpConst64F(v *Value) bool {
23329
23330
23331 for {
23332 val := auxIntToFloat64(v.AuxInt)
23333 v.reset(OpARM64FMOVDconst)
23334 v.AuxInt = float64ToAuxInt(float64(val))
23335 return true
23336 }
23337 }
23338 func rewriteValueARM64_OpConst8(v *Value) bool {
23339
23340
23341 for {
23342 val := auxIntToInt8(v.AuxInt)
23343 v.reset(OpARM64MOVDconst)
23344 v.AuxInt = int64ToAuxInt(int64(val))
23345 return true
23346 }
23347 }
23348 func rewriteValueARM64_OpConstBool(v *Value) bool {
23349
23350
23351 for {
23352 t := auxIntToBool(v.AuxInt)
23353 v.reset(OpARM64MOVDconst)
23354 v.AuxInt = int64ToAuxInt(b2i(t))
23355 return true
23356 }
23357 }
23358 func rewriteValueARM64_OpConstNil(v *Value) bool {
23359
23360
23361 for {
23362 v.reset(OpARM64MOVDconst)
23363 v.AuxInt = int64ToAuxInt(0)
23364 return true
23365 }
23366 }
23367 func rewriteValueARM64_OpCtz16(v *Value) bool {
23368 v_0 := v.Args[0]
23369 b := v.Block
23370 typ := &b.Func.Config.Types
23371
23372
23373 for {
23374 t := v.Type
23375 x := v_0
23376 v.reset(OpARM64CLZW)
23377 v.Type = t
23378 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
23379 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
23380 v1.AuxInt = int64ToAuxInt(0x10000)
23381 v1.AddArg(x)
23382 v0.AddArg(v1)
23383 v.AddArg(v0)
23384 return true
23385 }
23386 }
23387 func rewriteValueARM64_OpCtz32(v *Value) bool {
23388 v_0 := v.Args[0]
23389 b := v.Block
23390
23391
23392 for {
23393 t := v.Type
23394 x := v_0
23395 v.reset(OpARM64CLZW)
23396 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t)
23397 v0.AddArg(x)
23398 v.AddArg(v0)
23399 return true
23400 }
23401 }
23402 func rewriteValueARM64_OpCtz64(v *Value) bool {
23403 v_0 := v.Args[0]
23404 b := v.Block
23405
23406
23407 for {
23408 t := v.Type
23409 x := v_0
23410 v.reset(OpARM64CLZ)
23411 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t)
23412 v0.AddArg(x)
23413 v.AddArg(v0)
23414 return true
23415 }
23416 }
23417 func rewriteValueARM64_OpCtz8(v *Value) bool {
23418 v_0 := v.Args[0]
23419 b := v.Block
23420 typ := &b.Func.Config.Types
23421
23422
23423 for {
23424 t := v.Type
23425 x := v_0
23426 v.reset(OpARM64CLZW)
23427 v.Type = t
23428 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
23429 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
23430 v1.AuxInt = int64ToAuxInt(0x100)
23431 v1.AddArg(x)
23432 v0.AddArg(v1)
23433 v.AddArg(v0)
23434 return true
23435 }
23436 }
23437 func rewriteValueARM64_OpDiv16(v *Value) bool {
23438 v_1 := v.Args[1]
23439 v_0 := v.Args[0]
23440 b := v.Block
23441 typ := &b.Func.Config.Types
23442
23443
23444 for {
23445 if auxIntToBool(v.AuxInt) != false {
23446 break
23447 }
23448 x := v_0
23449 y := v_1
23450 v.reset(OpARM64DIVW)
23451 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
23452 v0.AddArg(x)
23453 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
23454 v1.AddArg(y)
23455 v.AddArg2(v0, v1)
23456 return true
23457 }
23458 return false
23459 }
23460 func rewriteValueARM64_OpDiv16u(v *Value) bool {
23461 v_1 := v.Args[1]
23462 v_0 := v.Args[0]
23463 b := v.Block
23464 typ := &b.Func.Config.Types
23465
23466
23467 for {
23468 x := v_0
23469 y := v_1
23470 v.reset(OpARM64UDIVW)
23471 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
23472 v0.AddArg(x)
23473 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
23474 v1.AddArg(y)
23475 v.AddArg2(v0, v1)
23476 return true
23477 }
23478 }
23479 func rewriteValueARM64_OpDiv32(v *Value) bool {
23480 v_1 := v.Args[1]
23481 v_0 := v.Args[0]
23482
23483
23484 for {
23485 if auxIntToBool(v.AuxInt) != false {
23486 break
23487 }
23488 x := v_0
23489 y := v_1
23490 v.reset(OpARM64DIVW)
23491 v.AddArg2(x, y)
23492 return true
23493 }
23494 return false
23495 }
23496 func rewriteValueARM64_OpDiv64(v *Value) bool {
23497 v_1 := v.Args[1]
23498 v_0 := v.Args[0]
23499
23500
23501 for {
23502 if auxIntToBool(v.AuxInt) != false {
23503 break
23504 }
23505 x := v_0
23506 y := v_1
23507 v.reset(OpARM64DIV)
23508 v.AddArg2(x, y)
23509 return true
23510 }
23511 return false
23512 }
23513 func rewriteValueARM64_OpDiv8(v *Value) bool {
23514 v_1 := v.Args[1]
23515 v_0 := v.Args[0]
23516 b := v.Block
23517 typ := &b.Func.Config.Types
23518
23519
23520 for {
23521 x := v_0
23522 y := v_1
23523 v.reset(OpARM64DIVW)
23524 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
23525 v0.AddArg(x)
23526 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
23527 v1.AddArg(y)
23528 v.AddArg2(v0, v1)
23529 return true
23530 }
23531 }
23532 func rewriteValueARM64_OpDiv8u(v *Value) bool {
23533 v_1 := v.Args[1]
23534 v_0 := v.Args[0]
23535 b := v.Block
23536 typ := &b.Func.Config.Types
23537
23538
23539 for {
23540 x := v_0
23541 y := v_1
23542 v.reset(OpARM64UDIVW)
23543 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
23544 v0.AddArg(x)
23545 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
23546 v1.AddArg(y)
23547 v.AddArg2(v0, v1)
23548 return true
23549 }
23550 }
23551 func rewriteValueARM64_OpEq16(v *Value) bool {
23552 v_1 := v.Args[1]
23553 v_0 := v.Args[0]
23554 b := v.Block
23555 typ := &b.Func.Config.Types
23556
23557
23558 for {
23559 x := v_0
23560 y := v_1
23561 v.reset(OpARM64Equal)
23562 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23563 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
23564 v1.AddArg(x)
23565 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
23566 v2.AddArg(y)
23567 v0.AddArg2(v1, v2)
23568 v.AddArg(v0)
23569 return true
23570 }
23571 }
23572 func rewriteValueARM64_OpEq32(v *Value) bool {
23573 v_1 := v.Args[1]
23574 v_0 := v.Args[0]
23575 b := v.Block
23576
23577
23578 for {
23579 x := v_0
23580 y := v_1
23581 v.reset(OpARM64Equal)
23582 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23583 v0.AddArg2(x, y)
23584 v.AddArg(v0)
23585 return true
23586 }
23587 }
23588 func rewriteValueARM64_OpEq32F(v *Value) bool {
23589 v_1 := v.Args[1]
23590 v_0 := v.Args[0]
23591 b := v.Block
23592
23593
23594 for {
23595 x := v_0
23596 y := v_1
23597 v.reset(OpARM64Equal)
23598 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
23599 v0.AddArg2(x, y)
23600 v.AddArg(v0)
23601 return true
23602 }
23603 }
23604 func rewriteValueARM64_OpEq64(v *Value) bool {
23605 v_1 := v.Args[1]
23606 v_0 := v.Args[0]
23607 b := v.Block
23608
23609
23610 for {
23611 x := v_0
23612 y := v_1
23613 v.reset(OpARM64Equal)
23614 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
23615 v0.AddArg2(x, y)
23616 v.AddArg(v0)
23617 return true
23618 }
23619 }
23620 func rewriteValueARM64_OpEq64F(v *Value) bool {
23621 v_1 := v.Args[1]
23622 v_0 := v.Args[0]
23623 b := v.Block
23624
23625
23626 for {
23627 x := v_0
23628 y := v_1
23629 v.reset(OpARM64Equal)
23630 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
23631 v0.AddArg2(x, y)
23632 v.AddArg(v0)
23633 return true
23634 }
23635 }
23636 func rewriteValueARM64_OpEq8(v *Value) bool {
23637 v_1 := v.Args[1]
23638 v_0 := v.Args[0]
23639 b := v.Block
23640 typ := &b.Func.Config.Types
23641
23642
23643 for {
23644 x := v_0
23645 y := v_1
23646 v.reset(OpARM64Equal)
23647 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23648 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
23649 v1.AddArg(x)
23650 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
23651 v2.AddArg(y)
23652 v0.AddArg2(v1, v2)
23653 v.AddArg(v0)
23654 return true
23655 }
23656 }
23657 func rewriteValueARM64_OpEqB(v *Value) bool {
23658 v_1 := v.Args[1]
23659 v_0 := v.Args[0]
23660 b := v.Block
23661 typ := &b.Func.Config.Types
23662
23663
23664 for {
23665 x := v_0
23666 y := v_1
23667 v.reset(OpARM64XOR)
23668 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23669 v0.AuxInt = int64ToAuxInt(1)
23670 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool)
23671 v1.AddArg2(x, y)
23672 v.AddArg2(v0, v1)
23673 return true
23674 }
23675 }
23676 func rewriteValueARM64_OpEqPtr(v *Value) bool {
23677 v_1 := v.Args[1]
23678 v_0 := v.Args[0]
23679 b := v.Block
23680
23681
23682 for {
23683 x := v_0
23684 y := v_1
23685 v.reset(OpARM64Equal)
23686 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
23687 v0.AddArg2(x, y)
23688 v.AddArg(v0)
23689 return true
23690 }
23691 }
23692 func rewriteValueARM64_OpFMA(v *Value) bool {
23693 v_2 := v.Args[2]
23694 v_1 := v.Args[1]
23695 v_0 := v.Args[0]
23696
23697
23698 for {
23699 x := v_0
23700 y := v_1
23701 z := v_2
23702 v.reset(OpARM64FMADDD)
23703 v.AddArg3(z, x, y)
23704 return true
23705 }
23706 }
23707 func rewriteValueARM64_OpHmul32(v *Value) bool {
23708 v_1 := v.Args[1]
23709 v_0 := v.Args[0]
23710 b := v.Block
23711 typ := &b.Func.Config.Types
23712
23713
23714 for {
23715 x := v_0
23716 y := v_1
23717 v.reset(OpARM64SRAconst)
23718 v.AuxInt = int64ToAuxInt(32)
23719 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64)
23720 v0.AddArg2(x, y)
23721 v.AddArg(v0)
23722 return true
23723 }
23724 }
23725 func rewriteValueARM64_OpHmul32u(v *Value) bool {
23726 v_1 := v.Args[1]
23727 v_0 := v.Args[0]
23728 b := v.Block
23729 typ := &b.Func.Config.Types
23730
23731
23732 for {
23733 x := v_0
23734 y := v_1
23735 v.reset(OpARM64SRAconst)
23736 v.AuxInt = int64ToAuxInt(32)
23737 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64)
23738 v0.AddArg2(x, y)
23739 v.AddArg(v0)
23740 return true
23741 }
23742 }
23743 func rewriteValueARM64_OpIsInBounds(v *Value) bool {
23744 v_1 := v.Args[1]
23745 v_0 := v.Args[0]
23746 b := v.Block
23747
23748
23749 for {
23750 idx := v_0
23751 len := v_1
23752 v.reset(OpARM64LessThanU)
23753 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
23754 v0.AddArg2(idx, len)
23755 v.AddArg(v0)
23756 return true
23757 }
23758 }
23759 func rewriteValueARM64_OpIsNonNil(v *Value) bool {
23760 v_0 := v.Args[0]
23761 b := v.Block
23762
23763
23764 for {
23765 ptr := v_0
23766 v.reset(OpARM64NotEqual)
23767 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
23768 v0.AuxInt = int64ToAuxInt(0)
23769 v0.AddArg(ptr)
23770 v.AddArg(v0)
23771 return true
23772 }
23773 }
23774 func rewriteValueARM64_OpIsSliceInBounds(v *Value) bool {
23775 v_1 := v.Args[1]
23776 v_0 := v.Args[0]
23777 b := v.Block
23778
23779
23780 for {
23781 idx := v_0
23782 len := v_1
23783 v.reset(OpARM64LessEqualU)
23784 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
23785 v0.AddArg2(idx, len)
23786 v.AddArg(v0)
23787 return true
23788 }
23789 }
23790 func rewriteValueARM64_OpLeq16(v *Value) bool {
23791 v_1 := v.Args[1]
23792 v_0 := v.Args[0]
23793 b := v.Block
23794 typ := &b.Func.Config.Types
23795
23796
23797 for {
23798 x := v_0
23799 y := v_1
23800 v.reset(OpARM64LessEqual)
23801 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23802 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
23803 v1.AddArg(x)
23804 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
23805 v2.AddArg(y)
23806 v0.AddArg2(v1, v2)
23807 v.AddArg(v0)
23808 return true
23809 }
23810 }
23811 func rewriteValueARM64_OpLeq16U(v *Value) bool {
23812 v_1 := v.Args[1]
23813 v_0 := v.Args[0]
23814 b := v.Block
23815 typ := &b.Func.Config.Types
23816
23817
23818 for {
23819 x := v_0
23820 zero := v_1
23821 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
23822 break
23823 }
23824 v.reset(OpEq16)
23825 v.AddArg2(x, zero)
23826 return true
23827 }
23828
23829
23830 for {
23831 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
23832 break
23833 }
23834 x := v_1
23835 v.reset(OpNeq16)
23836 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23837 v0.AuxInt = int64ToAuxInt(0)
23838 v.AddArg2(v0, x)
23839 return true
23840 }
23841
23842
23843 for {
23844 x := v_0
23845 y := v_1
23846 v.reset(OpARM64LessEqualU)
23847 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23848 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
23849 v1.AddArg(x)
23850 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
23851 v2.AddArg(y)
23852 v0.AddArg2(v1, v2)
23853 v.AddArg(v0)
23854 return true
23855 }
23856 }
23857 func rewriteValueARM64_OpLeq32(v *Value) bool {
23858 v_1 := v.Args[1]
23859 v_0 := v.Args[0]
23860 b := v.Block
23861
23862
23863 for {
23864 x := v_0
23865 y := v_1
23866 v.reset(OpARM64LessEqual)
23867 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23868 v0.AddArg2(x, y)
23869 v.AddArg(v0)
23870 return true
23871 }
23872 }
23873 func rewriteValueARM64_OpLeq32F(v *Value) bool {
23874 v_1 := v.Args[1]
23875 v_0 := v.Args[0]
23876 b := v.Block
23877
23878
23879 for {
23880 x := v_0
23881 y := v_1
23882 v.reset(OpARM64LessEqualF)
23883 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
23884 v0.AddArg2(x, y)
23885 v.AddArg(v0)
23886 return true
23887 }
23888 }
23889 func rewriteValueARM64_OpLeq32U(v *Value) bool {
23890 v_1 := v.Args[1]
23891 v_0 := v.Args[0]
23892 b := v.Block
23893 typ := &b.Func.Config.Types
23894
23895
23896 for {
23897 x := v_0
23898 zero := v_1
23899 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
23900 break
23901 }
23902 v.reset(OpEq32)
23903 v.AddArg2(x, zero)
23904 return true
23905 }
23906
23907
23908 for {
23909 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
23910 break
23911 }
23912 x := v_1
23913 v.reset(OpNeq32)
23914 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23915 v0.AuxInt = int64ToAuxInt(0)
23916 v.AddArg2(v0, x)
23917 return true
23918 }
23919
23920
23921 for {
23922 x := v_0
23923 y := v_1
23924 v.reset(OpARM64LessEqualU)
23925 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
23926 v0.AddArg2(x, y)
23927 v.AddArg(v0)
23928 return true
23929 }
23930 }
23931 func rewriteValueARM64_OpLeq64(v *Value) bool {
23932 v_1 := v.Args[1]
23933 v_0 := v.Args[0]
23934 b := v.Block
23935
23936
23937 for {
23938 x := v_0
23939 y := v_1
23940 v.reset(OpARM64LessEqual)
23941 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
23942 v0.AddArg2(x, y)
23943 v.AddArg(v0)
23944 return true
23945 }
23946 }
23947 func rewriteValueARM64_OpLeq64F(v *Value) bool {
23948 v_1 := v.Args[1]
23949 v_0 := v.Args[0]
23950 b := v.Block
23951
23952
23953 for {
23954 x := v_0
23955 y := v_1
23956 v.reset(OpARM64LessEqualF)
23957 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
23958 v0.AddArg2(x, y)
23959 v.AddArg(v0)
23960 return true
23961 }
23962 }
23963 func rewriteValueARM64_OpLeq64U(v *Value) bool {
23964 v_1 := v.Args[1]
23965 v_0 := v.Args[0]
23966 b := v.Block
23967 typ := &b.Func.Config.Types
23968
23969
23970 for {
23971 x := v_0
23972 zero := v_1
23973 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
23974 break
23975 }
23976 v.reset(OpEq64)
23977 v.AddArg2(x, zero)
23978 return true
23979 }
23980
23981
23982 for {
23983 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
23984 break
23985 }
23986 x := v_1
23987 v.reset(OpNeq64)
23988 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
23989 v0.AuxInt = int64ToAuxInt(0)
23990 v.AddArg2(v0, x)
23991 return true
23992 }
23993
23994
23995 for {
23996 x := v_0
23997 y := v_1
23998 v.reset(OpARM64LessEqualU)
23999 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
24000 v0.AddArg2(x, y)
24001 v.AddArg(v0)
24002 return true
24003 }
24004 }
24005 func rewriteValueARM64_OpLeq8(v *Value) bool {
24006 v_1 := v.Args[1]
24007 v_0 := v.Args[0]
24008 b := v.Block
24009 typ := &b.Func.Config.Types
24010
24011
24012 for {
24013 x := v_0
24014 y := v_1
24015 v.reset(OpARM64LessEqual)
24016 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24017 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
24018 v1.AddArg(x)
24019 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
24020 v2.AddArg(y)
24021 v0.AddArg2(v1, v2)
24022 v.AddArg(v0)
24023 return true
24024 }
24025 }
24026 func rewriteValueARM64_OpLeq8U(v *Value) bool {
24027 v_1 := v.Args[1]
24028 v_0 := v.Args[0]
24029 b := v.Block
24030 typ := &b.Func.Config.Types
24031
24032
24033 for {
24034 x := v_0
24035 zero := v_1
24036 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
24037 break
24038 }
24039 v.reset(OpEq8)
24040 v.AddArg2(x, zero)
24041 return true
24042 }
24043
24044
24045 for {
24046 if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
24047 break
24048 }
24049 x := v_1
24050 v.reset(OpNeq8)
24051 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
24052 v0.AuxInt = int64ToAuxInt(0)
24053 v.AddArg2(v0, x)
24054 return true
24055 }
24056
24057
24058 for {
24059 x := v_0
24060 y := v_1
24061 v.reset(OpARM64LessEqualU)
24062 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24063 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
24064 v1.AddArg(x)
24065 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
24066 v2.AddArg(y)
24067 v0.AddArg2(v1, v2)
24068 v.AddArg(v0)
24069 return true
24070 }
24071 }
24072 func rewriteValueARM64_OpLess16(v *Value) bool {
24073 v_1 := v.Args[1]
24074 v_0 := v.Args[0]
24075 b := v.Block
24076 typ := &b.Func.Config.Types
24077
24078
24079 for {
24080 x := v_0
24081 y := v_1
24082 v.reset(OpARM64LessThan)
24083 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24084 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
24085 v1.AddArg(x)
24086 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
24087 v2.AddArg(y)
24088 v0.AddArg2(v1, v2)
24089 v.AddArg(v0)
24090 return true
24091 }
24092 }
24093 func rewriteValueARM64_OpLess16U(v *Value) bool {
24094 v_1 := v.Args[1]
24095 v_0 := v.Args[0]
24096 b := v.Block
24097 typ := &b.Func.Config.Types
24098
24099
24100 for {
24101 zero := v_0
24102 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
24103 break
24104 }
24105 x := v_1
24106 v.reset(OpNeq16)
24107 v.AddArg2(zero, x)
24108 return true
24109 }
24110
24111
24112 for {
24113 x := v_0
24114 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
24115 break
24116 }
24117 v.reset(OpEq16)
24118 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
24119 v0.AuxInt = int64ToAuxInt(0)
24120 v.AddArg2(x, v0)
24121 return true
24122 }
24123
24124
24125 for {
24126 x := v_0
24127 y := v_1
24128 v.reset(OpARM64LessThanU)
24129 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24130 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
24131 v1.AddArg(x)
24132 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
24133 v2.AddArg(y)
24134 v0.AddArg2(v1, v2)
24135 v.AddArg(v0)
24136 return true
24137 }
24138 }
24139 func rewriteValueARM64_OpLess32(v *Value) bool {
24140 v_1 := v.Args[1]
24141 v_0 := v.Args[0]
24142 b := v.Block
24143
24144
24145 for {
24146 x := v_0
24147 y := v_1
24148 v.reset(OpARM64LessThan)
24149 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24150 v0.AddArg2(x, y)
24151 v.AddArg(v0)
24152 return true
24153 }
24154 }
24155 func rewriteValueARM64_OpLess32F(v *Value) bool {
24156 v_1 := v.Args[1]
24157 v_0 := v.Args[0]
24158 b := v.Block
24159
24160
24161 for {
24162 x := v_0
24163 y := v_1
24164 v.reset(OpARM64LessThanF)
24165 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
24166 v0.AddArg2(x, y)
24167 v.AddArg(v0)
24168 return true
24169 }
24170 }
24171 func rewriteValueARM64_OpLess32U(v *Value) bool {
24172 v_1 := v.Args[1]
24173 v_0 := v.Args[0]
24174 b := v.Block
24175 typ := &b.Func.Config.Types
24176
24177
24178 for {
24179 zero := v_0
24180 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
24181 break
24182 }
24183 x := v_1
24184 v.reset(OpNeq32)
24185 v.AddArg2(zero, x)
24186 return true
24187 }
24188
24189
24190 for {
24191 x := v_0
24192 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
24193 break
24194 }
24195 v.reset(OpEq32)
24196 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
24197 v0.AuxInt = int64ToAuxInt(0)
24198 v.AddArg2(x, v0)
24199 return true
24200 }
24201
24202
24203 for {
24204 x := v_0
24205 y := v_1
24206 v.reset(OpARM64LessThanU)
24207 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24208 v0.AddArg2(x, y)
24209 v.AddArg(v0)
24210 return true
24211 }
24212 }
24213 func rewriteValueARM64_OpLess64(v *Value) bool {
24214 v_1 := v.Args[1]
24215 v_0 := v.Args[0]
24216 b := v.Block
24217
24218
24219 for {
24220 x := v_0
24221 y := v_1
24222 v.reset(OpARM64LessThan)
24223 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
24224 v0.AddArg2(x, y)
24225 v.AddArg(v0)
24226 return true
24227 }
24228 }
24229 func rewriteValueARM64_OpLess64F(v *Value) bool {
24230 v_1 := v.Args[1]
24231 v_0 := v.Args[0]
24232 b := v.Block
24233
24234
24235 for {
24236 x := v_0
24237 y := v_1
24238 v.reset(OpARM64LessThanF)
24239 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
24240 v0.AddArg2(x, y)
24241 v.AddArg(v0)
24242 return true
24243 }
24244 }
24245 func rewriteValueARM64_OpLess64U(v *Value) bool {
24246 v_1 := v.Args[1]
24247 v_0 := v.Args[0]
24248 b := v.Block
24249 typ := &b.Func.Config.Types
24250
24251
24252 for {
24253 zero := v_0
24254 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
24255 break
24256 }
24257 x := v_1
24258 v.reset(OpNeq64)
24259 v.AddArg2(zero, x)
24260 return true
24261 }
24262
24263
24264 for {
24265 x := v_0
24266 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
24267 break
24268 }
24269 v.reset(OpEq64)
24270 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
24271 v0.AuxInt = int64ToAuxInt(0)
24272 v.AddArg2(x, v0)
24273 return true
24274 }
24275
24276
24277 for {
24278 x := v_0
24279 y := v_1
24280 v.reset(OpARM64LessThanU)
24281 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
24282 v0.AddArg2(x, y)
24283 v.AddArg(v0)
24284 return true
24285 }
24286 }
24287 func rewriteValueARM64_OpLess8(v *Value) bool {
24288 v_1 := v.Args[1]
24289 v_0 := v.Args[0]
24290 b := v.Block
24291 typ := &b.Func.Config.Types
24292
24293
24294 for {
24295 x := v_0
24296 y := v_1
24297 v.reset(OpARM64LessThan)
24298 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24299 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
24300 v1.AddArg(x)
24301 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
24302 v2.AddArg(y)
24303 v0.AddArg2(v1, v2)
24304 v.AddArg(v0)
24305 return true
24306 }
24307 }
24308 func rewriteValueARM64_OpLess8U(v *Value) bool {
24309 v_1 := v.Args[1]
24310 v_0 := v.Args[0]
24311 b := v.Block
24312 typ := &b.Func.Config.Types
24313
24314
24315 for {
24316 zero := v_0
24317 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
24318 break
24319 }
24320 x := v_1
24321 v.reset(OpNeq8)
24322 v.AddArg2(zero, x)
24323 return true
24324 }
24325
24326
24327 for {
24328 x := v_0
24329 if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
24330 break
24331 }
24332 v.reset(OpEq8)
24333 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
24334 v0.AuxInt = int64ToAuxInt(0)
24335 v.AddArg2(x, v0)
24336 return true
24337 }
24338
24339
24340 for {
24341 x := v_0
24342 y := v_1
24343 v.reset(OpARM64LessThanU)
24344 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
24345 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
24346 v1.AddArg(x)
24347 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
24348 v2.AddArg(y)
24349 v0.AddArg2(v1, v2)
24350 v.AddArg(v0)
24351 return true
24352 }
24353 }
24354 func rewriteValueARM64_OpLoad(v *Value) bool {
24355 v_1 := v.Args[1]
24356 v_0 := v.Args[0]
24357
24358
24359
24360 for {
24361 t := v.Type
24362 ptr := v_0
24363 mem := v_1
24364 if !(t.IsBoolean()) {
24365 break
24366 }
24367 v.reset(OpARM64MOVBUload)
24368 v.AddArg2(ptr, mem)
24369 return true
24370 }
24371
24372
24373
24374 for {
24375 t := v.Type
24376 ptr := v_0
24377 mem := v_1
24378 if !(is8BitInt(t) && isSigned(t)) {
24379 break
24380 }
24381 v.reset(OpARM64MOVBload)
24382 v.AddArg2(ptr, mem)
24383 return true
24384 }
24385
24386
24387
24388 for {
24389 t := v.Type
24390 ptr := v_0
24391 mem := v_1
24392 if !(is8BitInt(t) && !isSigned(t)) {
24393 break
24394 }
24395 v.reset(OpARM64MOVBUload)
24396 v.AddArg2(ptr, mem)
24397 return true
24398 }
24399
24400
24401
24402 for {
24403 t := v.Type
24404 ptr := v_0
24405 mem := v_1
24406 if !(is16BitInt(t) && isSigned(t)) {
24407 break
24408 }
24409 v.reset(OpARM64MOVHload)
24410 v.AddArg2(ptr, mem)
24411 return true
24412 }
24413
24414
24415
24416 for {
24417 t := v.Type
24418 ptr := v_0
24419 mem := v_1
24420 if !(is16BitInt(t) && !isSigned(t)) {
24421 break
24422 }
24423 v.reset(OpARM64MOVHUload)
24424 v.AddArg2(ptr, mem)
24425 return true
24426 }
24427
24428
24429
24430 for {
24431 t := v.Type
24432 ptr := v_0
24433 mem := v_1
24434 if !(is32BitInt(t) && isSigned(t)) {
24435 break
24436 }
24437 v.reset(OpARM64MOVWload)
24438 v.AddArg2(ptr, mem)
24439 return true
24440 }
24441
24442
24443
24444 for {
24445 t := v.Type
24446 ptr := v_0
24447 mem := v_1
24448 if !(is32BitInt(t) && !isSigned(t)) {
24449 break
24450 }
24451 v.reset(OpARM64MOVWUload)
24452 v.AddArg2(ptr, mem)
24453 return true
24454 }
24455
24456
24457
24458 for {
24459 t := v.Type
24460 ptr := v_0
24461 mem := v_1
24462 if !(is64BitInt(t) || isPtr(t)) {
24463 break
24464 }
24465 v.reset(OpARM64MOVDload)
24466 v.AddArg2(ptr, mem)
24467 return true
24468 }
24469
24470
24471
24472 for {
24473 t := v.Type
24474 ptr := v_0
24475 mem := v_1
24476 if !(is32BitFloat(t)) {
24477 break
24478 }
24479 v.reset(OpARM64FMOVSload)
24480 v.AddArg2(ptr, mem)
24481 return true
24482 }
24483
24484
24485
24486 for {
24487 t := v.Type
24488 ptr := v_0
24489 mem := v_1
24490 if !(is64BitFloat(t)) {
24491 break
24492 }
24493 v.reset(OpARM64FMOVDload)
24494 v.AddArg2(ptr, mem)
24495 return true
24496 }
24497 return false
24498 }
24499 func rewriteValueARM64_OpLocalAddr(v *Value) bool {
24500 v_0 := v.Args[0]
24501
24502
24503 for {
24504 sym := auxToSym(v.Aux)
24505 base := v_0
24506 v.reset(OpARM64MOVDaddr)
24507 v.Aux = symToAux(sym)
24508 v.AddArg(base)
24509 return true
24510 }
24511 }
24512 func rewriteValueARM64_OpLsh16x16(v *Value) bool {
24513 v_1 := v.Args[1]
24514 v_0 := v.Args[0]
24515 b := v.Block
24516 typ := &b.Func.Config.Types
24517
24518
24519 for {
24520 t := v.Type
24521 x := v_0
24522 y := v_1
24523 v.reset(OpARM64CSEL)
24524 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24525 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24526 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
24527 v1.AddArg(y)
24528 v0.AddArg2(x, v1)
24529 v2 := b.NewValue0(v.Pos, OpConst64, t)
24530 v2.AuxInt = int64ToAuxInt(0)
24531 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24532 v3.AuxInt = int64ToAuxInt(64)
24533 v3.AddArg(v1)
24534 v.AddArg3(v0, v2, v3)
24535 return true
24536 }
24537 }
24538 func rewriteValueARM64_OpLsh16x32(v *Value) bool {
24539 v_1 := v.Args[1]
24540 v_0 := v.Args[0]
24541 b := v.Block
24542 typ := &b.Func.Config.Types
24543
24544
24545 for {
24546 t := v.Type
24547 x := v_0
24548 y := v_1
24549 v.reset(OpARM64CSEL)
24550 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24551 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24552 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
24553 v1.AddArg(y)
24554 v0.AddArg2(x, v1)
24555 v2 := b.NewValue0(v.Pos, OpConst64, t)
24556 v2.AuxInt = int64ToAuxInt(0)
24557 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24558 v3.AuxInt = int64ToAuxInt(64)
24559 v3.AddArg(v1)
24560 v.AddArg3(v0, v2, v3)
24561 return true
24562 }
24563 }
24564 func rewriteValueARM64_OpLsh16x64(v *Value) bool {
24565 v_1 := v.Args[1]
24566 v_0 := v.Args[0]
24567 b := v.Block
24568
24569
24570 for {
24571 t := v.Type
24572 x := v_0
24573 y := v_1
24574 v.reset(OpARM64CSEL)
24575 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24576 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24577 v0.AddArg2(x, y)
24578 v1 := b.NewValue0(v.Pos, OpConst64, t)
24579 v1.AuxInt = int64ToAuxInt(0)
24580 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24581 v2.AuxInt = int64ToAuxInt(64)
24582 v2.AddArg(y)
24583 v.AddArg3(v0, v1, v2)
24584 return true
24585 }
24586 }
24587 func rewriteValueARM64_OpLsh16x8(v *Value) bool {
24588 v_1 := v.Args[1]
24589 v_0 := v.Args[0]
24590 b := v.Block
24591 typ := &b.Func.Config.Types
24592
24593
24594 for {
24595 t := v.Type
24596 x := v_0
24597 y := v_1
24598 v.reset(OpARM64CSEL)
24599 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24600 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24601 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
24602 v1.AddArg(y)
24603 v0.AddArg2(x, v1)
24604 v2 := b.NewValue0(v.Pos, OpConst64, t)
24605 v2.AuxInt = int64ToAuxInt(0)
24606 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24607 v3.AuxInt = int64ToAuxInt(64)
24608 v3.AddArg(v1)
24609 v.AddArg3(v0, v2, v3)
24610 return true
24611 }
24612 }
24613 func rewriteValueARM64_OpLsh32x16(v *Value) bool {
24614 v_1 := v.Args[1]
24615 v_0 := v.Args[0]
24616 b := v.Block
24617 typ := &b.Func.Config.Types
24618
24619
24620 for {
24621 t := v.Type
24622 x := v_0
24623 y := v_1
24624 v.reset(OpARM64CSEL)
24625 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24626 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24627 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
24628 v1.AddArg(y)
24629 v0.AddArg2(x, v1)
24630 v2 := b.NewValue0(v.Pos, OpConst64, t)
24631 v2.AuxInt = int64ToAuxInt(0)
24632 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24633 v3.AuxInt = int64ToAuxInt(64)
24634 v3.AddArg(v1)
24635 v.AddArg3(v0, v2, v3)
24636 return true
24637 }
24638 }
24639 func rewriteValueARM64_OpLsh32x32(v *Value) bool {
24640 v_1 := v.Args[1]
24641 v_0 := v.Args[0]
24642 b := v.Block
24643 typ := &b.Func.Config.Types
24644
24645
24646 for {
24647 t := v.Type
24648 x := v_0
24649 y := v_1
24650 v.reset(OpARM64CSEL)
24651 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24652 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24653 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
24654 v1.AddArg(y)
24655 v0.AddArg2(x, v1)
24656 v2 := b.NewValue0(v.Pos, OpConst64, t)
24657 v2.AuxInt = int64ToAuxInt(0)
24658 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24659 v3.AuxInt = int64ToAuxInt(64)
24660 v3.AddArg(v1)
24661 v.AddArg3(v0, v2, v3)
24662 return true
24663 }
24664 }
24665 func rewriteValueARM64_OpLsh32x64(v *Value) bool {
24666 v_1 := v.Args[1]
24667 v_0 := v.Args[0]
24668 b := v.Block
24669
24670
24671 for {
24672 t := v.Type
24673 x := v_0
24674 y := v_1
24675 v.reset(OpARM64CSEL)
24676 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24677 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24678 v0.AddArg2(x, y)
24679 v1 := b.NewValue0(v.Pos, OpConst64, t)
24680 v1.AuxInt = int64ToAuxInt(0)
24681 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24682 v2.AuxInt = int64ToAuxInt(64)
24683 v2.AddArg(y)
24684 v.AddArg3(v0, v1, v2)
24685 return true
24686 }
24687 }
24688 func rewriteValueARM64_OpLsh32x8(v *Value) bool {
24689 v_1 := v.Args[1]
24690 v_0 := v.Args[0]
24691 b := v.Block
24692 typ := &b.Func.Config.Types
24693
24694
24695 for {
24696 t := v.Type
24697 x := v_0
24698 y := v_1
24699 v.reset(OpARM64CSEL)
24700 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24701 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24702 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
24703 v1.AddArg(y)
24704 v0.AddArg2(x, v1)
24705 v2 := b.NewValue0(v.Pos, OpConst64, t)
24706 v2.AuxInt = int64ToAuxInt(0)
24707 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24708 v3.AuxInt = int64ToAuxInt(64)
24709 v3.AddArg(v1)
24710 v.AddArg3(v0, v2, v3)
24711 return true
24712 }
24713 }
24714 func rewriteValueARM64_OpLsh64x16(v *Value) bool {
24715 v_1 := v.Args[1]
24716 v_0 := v.Args[0]
24717 b := v.Block
24718 typ := &b.Func.Config.Types
24719
24720
24721 for {
24722 t := v.Type
24723 x := v_0
24724 y := v_1
24725 v.reset(OpARM64CSEL)
24726 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24727 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24728 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
24729 v1.AddArg(y)
24730 v0.AddArg2(x, v1)
24731 v2 := b.NewValue0(v.Pos, OpConst64, t)
24732 v2.AuxInt = int64ToAuxInt(0)
24733 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24734 v3.AuxInt = int64ToAuxInt(64)
24735 v3.AddArg(v1)
24736 v.AddArg3(v0, v2, v3)
24737 return true
24738 }
24739 }
24740 func rewriteValueARM64_OpLsh64x32(v *Value) bool {
24741 v_1 := v.Args[1]
24742 v_0 := v.Args[0]
24743 b := v.Block
24744 typ := &b.Func.Config.Types
24745
24746
24747 for {
24748 t := v.Type
24749 x := v_0
24750 y := v_1
24751 v.reset(OpARM64CSEL)
24752 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24753 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24754 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
24755 v1.AddArg(y)
24756 v0.AddArg2(x, v1)
24757 v2 := b.NewValue0(v.Pos, OpConst64, t)
24758 v2.AuxInt = int64ToAuxInt(0)
24759 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24760 v3.AuxInt = int64ToAuxInt(64)
24761 v3.AddArg(v1)
24762 v.AddArg3(v0, v2, v3)
24763 return true
24764 }
24765 }
24766 func rewriteValueARM64_OpLsh64x64(v *Value) bool {
24767 v_1 := v.Args[1]
24768 v_0 := v.Args[0]
24769 b := v.Block
24770
24771
24772 for {
24773 t := v.Type
24774 x := v_0
24775 y := v_1
24776 v.reset(OpARM64CSEL)
24777 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24778 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24779 v0.AddArg2(x, y)
24780 v1 := b.NewValue0(v.Pos, OpConst64, t)
24781 v1.AuxInt = int64ToAuxInt(0)
24782 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24783 v2.AuxInt = int64ToAuxInt(64)
24784 v2.AddArg(y)
24785 v.AddArg3(v0, v1, v2)
24786 return true
24787 }
24788 }
24789 func rewriteValueARM64_OpLsh64x8(v *Value) bool {
24790 v_1 := v.Args[1]
24791 v_0 := v.Args[0]
24792 b := v.Block
24793 typ := &b.Func.Config.Types
24794
24795
24796 for {
24797 t := v.Type
24798 x := v_0
24799 y := v_1
24800 v.reset(OpARM64CSEL)
24801 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24802 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24803 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
24804 v1.AddArg(y)
24805 v0.AddArg2(x, v1)
24806 v2 := b.NewValue0(v.Pos, OpConst64, t)
24807 v2.AuxInt = int64ToAuxInt(0)
24808 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24809 v3.AuxInt = int64ToAuxInt(64)
24810 v3.AddArg(v1)
24811 v.AddArg3(v0, v2, v3)
24812 return true
24813 }
24814 }
24815 func rewriteValueARM64_OpLsh8x16(v *Value) bool {
24816 v_1 := v.Args[1]
24817 v_0 := v.Args[0]
24818 b := v.Block
24819 typ := &b.Func.Config.Types
24820
24821
24822 for {
24823 t := v.Type
24824 x := v_0
24825 y := v_1
24826 v.reset(OpARM64CSEL)
24827 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24828 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24829 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
24830 v1.AddArg(y)
24831 v0.AddArg2(x, v1)
24832 v2 := b.NewValue0(v.Pos, OpConst64, t)
24833 v2.AuxInt = int64ToAuxInt(0)
24834 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24835 v3.AuxInt = int64ToAuxInt(64)
24836 v3.AddArg(v1)
24837 v.AddArg3(v0, v2, v3)
24838 return true
24839 }
24840 }
24841 func rewriteValueARM64_OpLsh8x32(v *Value) bool {
24842 v_1 := v.Args[1]
24843 v_0 := v.Args[0]
24844 b := v.Block
24845 typ := &b.Func.Config.Types
24846
24847
24848 for {
24849 t := v.Type
24850 x := v_0
24851 y := v_1
24852 v.reset(OpARM64CSEL)
24853 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24854 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24855 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
24856 v1.AddArg(y)
24857 v0.AddArg2(x, v1)
24858 v2 := b.NewValue0(v.Pos, OpConst64, t)
24859 v2.AuxInt = int64ToAuxInt(0)
24860 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24861 v3.AuxInt = int64ToAuxInt(64)
24862 v3.AddArg(v1)
24863 v.AddArg3(v0, v2, v3)
24864 return true
24865 }
24866 }
24867 func rewriteValueARM64_OpLsh8x64(v *Value) bool {
24868 v_1 := v.Args[1]
24869 v_0 := v.Args[0]
24870 b := v.Block
24871
24872
24873 for {
24874 t := v.Type
24875 x := v_0
24876 y := v_1
24877 v.reset(OpARM64CSEL)
24878 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24879 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24880 v0.AddArg2(x, y)
24881 v1 := b.NewValue0(v.Pos, OpConst64, t)
24882 v1.AuxInt = int64ToAuxInt(0)
24883 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24884 v2.AuxInt = int64ToAuxInt(64)
24885 v2.AddArg(y)
24886 v.AddArg3(v0, v1, v2)
24887 return true
24888 }
24889 }
24890 func rewriteValueARM64_OpLsh8x8(v *Value) bool {
24891 v_1 := v.Args[1]
24892 v_0 := v.Args[0]
24893 b := v.Block
24894 typ := &b.Func.Config.Types
24895
24896
24897 for {
24898 t := v.Type
24899 x := v_0
24900 y := v_1
24901 v.reset(OpARM64CSEL)
24902 v.AuxInt = opToAuxInt(OpARM64LessThanU)
24903 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
24904 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
24905 v1.AddArg(y)
24906 v0.AddArg2(x, v1)
24907 v2 := b.NewValue0(v.Pos, OpConst64, t)
24908 v2.AuxInt = int64ToAuxInt(0)
24909 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
24910 v3.AuxInt = int64ToAuxInt(64)
24911 v3.AddArg(v1)
24912 v.AddArg3(v0, v2, v3)
24913 return true
24914 }
24915 }
24916 func rewriteValueARM64_OpMod16(v *Value) bool {
24917 v_1 := v.Args[1]
24918 v_0 := v.Args[0]
24919 b := v.Block
24920 typ := &b.Func.Config.Types
24921
24922
24923 for {
24924 x := v_0
24925 y := v_1
24926 v.reset(OpARM64MODW)
24927 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
24928 v0.AddArg(x)
24929 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
24930 v1.AddArg(y)
24931 v.AddArg2(v0, v1)
24932 return true
24933 }
24934 }
24935 func rewriteValueARM64_OpMod16u(v *Value) bool {
24936 v_1 := v.Args[1]
24937 v_0 := v.Args[0]
24938 b := v.Block
24939 typ := &b.Func.Config.Types
24940
24941
24942 for {
24943 x := v_0
24944 y := v_1
24945 v.reset(OpARM64UMODW)
24946 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
24947 v0.AddArg(x)
24948 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
24949 v1.AddArg(y)
24950 v.AddArg2(v0, v1)
24951 return true
24952 }
24953 }
24954 func rewriteValueARM64_OpMod32(v *Value) bool {
24955 v_1 := v.Args[1]
24956 v_0 := v.Args[0]
24957
24958
24959 for {
24960 x := v_0
24961 y := v_1
24962 v.reset(OpARM64MODW)
24963 v.AddArg2(x, y)
24964 return true
24965 }
24966 }
24967 func rewriteValueARM64_OpMod64(v *Value) bool {
24968 v_1 := v.Args[1]
24969 v_0 := v.Args[0]
24970
24971
24972 for {
24973 x := v_0
24974 y := v_1
24975 v.reset(OpARM64MOD)
24976 v.AddArg2(x, y)
24977 return true
24978 }
24979 }
24980 func rewriteValueARM64_OpMod8(v *Value) bool {
24981 v_1 := v.Args[1]
24982 v_0 := v.Args[0]
24983 b := v.Block
24984 typ := &b.Func.Config.Types
24985
24986
24987 for {
24988 x := v_0
24989 y := v_1
24990 v.reset(OpARM64MODW)
24991 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
24992 v0.AddArg(x)
24993 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
24994 v1.AddArg(y)
24995 v.AddArg2(v0, v1)
24996 return true
24997 }
24998 }
24999 func rewriteValueARM64_OpMod8u(v *Value) bool {
25000 v_1 := v.Args[1]
25001 v_0 := v.Args[0]
25002 b := v.Block
25003 typ := &b.Func.Config.Types
25004
25005
25006 for {
25007 x := v_0
25008 y := v_1
25009 v.reset(OpARM64UMODW)
25010 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
25011 v0.AddArg(x)
25012 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
25013 v1.AddArg(y)
25014 v.AddArg2(v0, v1)
25015 return true
25016 }
25017 }
25018 func rewriteValueARM64_OpMove(v *Value) bool {
25019 v_2 := v.Args[2]
25020 v_1 := v.Args[1]
25021 v_0 := v.Args[0]
25022 b := v.Block
25023 config := b.Func.Config
25024 typ := &b.Func.Config.Types
25025
25026
25027 for {
25028 if auxIntToInt64(v.AuxInt) != 0 {
25029 break
25030 }
25031 mem := v_2
25032 v.copyOf(mem)
25033 return true
25034 }
25035
25036
25037 for {
25038 if auxIntToInt64(v.AuxInt) != 1 {
25039 break
25040 }
25041 dst := v_0
25042 src := v_1
25043 mem := v_2
25044 v.reset(OpARM64MOVBstore)
25045 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
25046 v0.AddArg2(src, mem)
25047 v.AddArg3(dst, v0, mem)
25048 return true
25049 }
25050
25051
25052 for {
25053 if auxIntToInt64(v.AuxInt) != 2 {
25054 break
25055 }
25056 dst := v_0
25057 src := v_1
25058 mem := v_2
25059 v.reset(OpARM64MOVHstore)
25060 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
25061 v0.AddArg2(src, mem)
25062 v.AddArg3(dst, v0, mem)
25063 return true
25064 }
25065
25066
25067 for {
25068 if auxIntToInt64(v.AuxInt) != 4 {
25069 break
25070 }
25071 dst := v_0
25072 src := v_1
25073 mem := v_2
25074 v.reset(OpARM64MOVWstore)
25075 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
25076 v0.AddArg2(src, mem)
25077 v.AddArg3(dst, v0, mem)
25078 return true
25079 }
25080
25081
25082 for {
25083 if auxIntToInt64(v.AuxInt) != 8 {
25084 break
25085 }
25086 dst := v_0
25087 src := v_1
25088 mem := v_2
25089 v.reset(OpARM64MOVDstore)
25090 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25091 v0.AddArg2(src, mem)
25092 v.AddArg3(dst, v0, mem)
25093 return true
25094 }
25095
25096
25097 for {
25098 if auxIntToInt64(v.AuxInt) != 3 {
25099 break
25100 }
25101 dst := v_0
25102 src := v_1
25103 mem := v_2
25104 v.reset(OpARM64MOVBstore)
25105 v.AuxInt = int32ToAuxInt(2)
25106 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
25107 v0.AuxInt = int32ToAuxInt(2)
25108 v0.AddArg2(src, mem)
25109 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
25110 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
25111 v2.AddArg2(src, mem)
25112 v1.AddArg3(dst, v2, mem)
25113 v.AddArg3(dst, v0, v1)
25114 return true
25115 }
25116
25117
25118 for {
25119 if auxIntToInt64(v.AuxInt) != 5 {
25120 break
25121 }
25122 dst := v_0
25123 src := v_1
25124 mem := v_2
25125 v.reset(OpARM64MOVBstore)
25126 v.AuxInt = int32ToAuxInt(4)
25127 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
25128 v0.AuxInt = int32ToAuxInt(4)
25129 v0.AddArg2(src, mem)
25130 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
25131 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
25132 v2.AddArg2(src, mem)
25133 v1.AddArg3(dst, v2, mem)
25134 v.AddArg3(dst, v0, v1)
25135 return true
25136 }
25137
25138
25139 for {
25140 if auxIntToInt64(v.AuxInt) != 6 {
25141 break
25142 }
25143 dst := v_0
25144 src := v_1
25145 mem := v_2
25146 v.reset(OpARM64MOVHstore)
25147 v.AuxInt = int32ToAuxInt(4)
25148 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
25149 v0.AuxInt = int32ToAuxInt(4)
25150 v0.AddArg2(src, mem)
25151 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
25152 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
25153 v2.AddArg2(src, mem)
25154 v1.AddArg3(dst, v2, mem)
25155 v.AddArg3(dst, v0, v1)
25156 return true
25157 }
25158
25159
25160 for {
25161 if auxIntToInt64(v.AuxInt) != 7 {
25162 break
25163 }
25164 dst := v_0
25165 src := v_1
25166 mem := v_2
25167 v.reset(OpARM64MOVBstore)
25168 v.AuxInt = int32ToAuxInt(6)
25169 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
25170 v0.AuxInt = int32ToAuxInt(6)
25171 v0.AddArg2(src, mem)
25172 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
25173 v1.AuxInt = int32ToAuxInt(4)
25174 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
25175 v2.AuxInt = int32ToAuxInt(4)
25176 v2.AddArg2(src, mem)
25177 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
25178 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
25179 v4.AddArg2(src, mem)
25180 v3.AddArg3(dst, v4, mem)
25181 v1.AddArg3(dst, v2, v3)
25182 v.AddArg3(dst, v0, v1)
25183 return true
25184 }
25185
25186
25187 for {
25188 if auxIntToInt64(v.AuxInt) != 12 {
25189 break
25190 }
25191 dst := v_0
25192 src := v_1
25193 mem := v_2
25194 v.reset(OpARM64MOVWstore)
25195 v.AuxInt = int32ToAuxInt(8)
25196 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
25197 v0.AuxInt = int32ToAuxInt(8)
25198 v0.AddArg2(src, mem)
25199 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
25200 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25201 v2.AddArg2(src, mem)
25202 v1.AddArg3(dst, v2, mem)
25203 v.AddArg3(dst, v0, v1)
25204 return true
25205 }
25206
25207
25208 for {
25209 if auxIntToInt64(v.AuxInt) != 16 {
25210 break
25211 }
25212 dst := v_0
25213 src := v_1
25214 mem := v_2
25215 v.reset(OpARM64MOVDstore)
25216 v.AuxInt = int32ToAuxInt(8)
25217 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25218 v0.AuxInt = int32ToAuxInt(8)
25219 v0.AddArg2(src, mem)
25220 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
25221 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25222 v2.AddArg2(src, mem)
25223 v1.AddArg3(dst, v2, mem)
25224 v.AddArg3(dst, v0, v1)
25225 return true
25226 }
25227
25228
25229 for {
25230 if auxIntToInt64(v.AuxInt) != 24 {
25231 break
25232 }
25233 dst := v_0
25234 src := v_1
25235 mem := v_2
25236 v.reset(OpARM64MOVDstore)
25237 v.AuxInt = int32ToAuxInt(16)
25238 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25239 v0.AuxInt = int32ToAuxInt(16)
25240 v0.AddArg2(src, mem)
25241 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
25242 v1.AuxInt = int32ToAuxInt(8)
25243 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25244 v2.AuxInt = int32ToAuxInt(8)
25245 v2.AddArg2(src, mem)
25246 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
25247 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25248 v4.AddArg2(src, mem)
25249 v3.AddArg3(dst, v4, mem)
25250 v1.AddArg3(dst, v2, v3)
25251 v.AddArg3(dst, v0, v1)
25252 return true
25253 }
25254
25255
25256
25257 for {
25258 s := auxIntToInt64(v.AuxInt)
25259 dst := v_0
25260 src := v_1
25261 mem := v_2
25262 if !(s%8 != 0 && s > 8) {
25263 break
25264 }
25265 v.reset(OpMove)
25266 v.AuxInt = int64ToAuxInt(s % 8)
25267 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
25268 v0.AuxInt = int64ToAuxInt(s - s%8)
25269 v0.AddArg(dst)
25270 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
25271 v1.AuxInt = int64ToAuxInt(s - s%8)
25272 v1.AddArg(src)
25273 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
25274 v2.AuxInt = int64ToAuxInt(s - s%8)
25275 v2.AddArg3(dst, src, mem)
25276 v.AddArg3(v0, v1, v2)
25277 return true
25278 }
25279
25280
25281
25282 for {
25283 s := auxIntToInt64(v.AuxInt)
25284 dst := v_0
25285 src := v_1
25286 mem := v_2
25287 if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice && logLargeCopy(v, s)) {
25288 break
25289 }
25290 v.reset(OpARM64MOVDstore)
25291 v.AuxInt = int32ToAuxInt(int32(s - 8))
25292 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
25293 v0.AuxInt = int32ToAuxInt(int32(s - 8))
25294 v0.AddArg2(src, mem)
25295 v1 := b.NewValue0(v.Pos, OpARM64DUFFCOPY, types.TypeMem)
25296 v1.AuxInt = int64ToAuxInt(8 * (64 - (s-8)/16))
25297 v1.AddArg3(dst, src, mem)
25298 v.AddArg3(dst, v0, v1)
25299 return true
25300 }
25301
25302
25303
25304 for {
25305 s := auxIntToInt64(v.AuxInt)
25306 dst := v_0
25307 src := v_1
25308 mem := v_2
25309 if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
25310 break
25311 }
25312 v.reset(OpARM64DUFFCOPY)
25313 v.AuxInt = int64ToAuxInt(8 * (64 - s/16))
25314 v.AddArg3(dst, src, mem)
25315 return true
25316 }
25317
25318
25319
25320 for {
25321 s := auxIntToInt64(v.AuxInt)
25322 dst := v_0
25323 src := v_1
25324 mem := v_2
25325 if !(s > 24 && s%8 == 0 && logLargeCopy(v, s)) {
25326 break
25327 }
25328 v.reset(OpARM64LoweredMove)
25329 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type)
25330 v0.AuxInt = int64ToAuxInt(s - 8)
25331 v0.AddArg(src)
25332 v.AddArg4(dst, src, v0, mem)
25333 return true
25334 }
25335 return false
25336 }
25337 func rewriteValueARM64_OpNeq16(v *Value) bool {
25338 v_1 := v.Args[1]
25339 v_0 := v.Args[0]
25340 b := v.Block
25341 typ := &b.Func.Config.Types
25342
25343
25344 for {
25345 x := v_0
25346 y := v_1
25347 v.reset(OpARM64NotEqual)
25348 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
25349 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
25350 v1.AddArg(x)
25351 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
25352 v2.AddArg(y)
25353 v0.AddArg2(v1, v2)
25354 v.AddArg(v0)
25355 return true
25356 }
25357 }
25358 func rewriteValueARM64_OpNeq32(v *Value) bool {
25359 v_1 := v.Args[1]
25360 v_0 := v.Args[0]
25361 b := v.Block
25362
25363
25364 for {
25365 x := v_0
25366 y := v_1
25367 v.reset(OpARM64NotEqual)
25368 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
25369 v0.AddArg2(x, y)
25370 v.AddArg(v0)
25371 return true
25372 }
25373 }
25374 func rewriteValueARM64_OpNeq32F(v *Value) bool {
25375 v_1 := v.Args[1]
25376 v_0 := v.Args[0]
25377 b := v.Block
25378
25379
25380 for {
25381 x := v_0
25382 y := v_1
25383 v.reset(OpARM64NotEqual)
25384 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
25385 v0.AddArg2(x, y)
25386 v.AddArg(v0)
25387 return true
25388 }
25389 }
25390 func rewriteValueARM64_OpNeq64(v *Value) bool {
25391 v_1 := v.Args[1]
25392 v_0 := v.Args[0]
25393 b := v.Block
25394
25395
25396 for {
25397 x := v_0
25398 y := v_1
25399 v.reset(OpARM64NotEqual)
25400 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
25401 v0.AddArg2(x, y)
25402 v.AddArg(v0)
25403 return true
25404 }
25405 }
25406 func rewriteValueARM64_OpNeq64F(v *Value) bool {
25407 v_1 := v.Args[1]
25408 v_0 := v.Args[0]
25409 b := v.Block
25410
25411
25412 for {
25413 x := v_0
25414 y := v_1
25415 v.reset(OpARM64NotEqual)
25416 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
25417 v0.AddArg2(x, y)
25418 v.AddArg(v0)
25419 return true
25420 }
25421 }
25422 func rewriteValueARM64_OpNeq8(v *Value) bool {
25423 v_1 := v.Args[1]
25424 v_0 := v.Args[0]
25425 b := v.Block
25426 typ := &b.Func.Config.Types
25427
25428
25429 for {
25430 x := v_0
25431 y := v_1
25432 v.reset(OpARM64NotEqual)
25433 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
25434 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
25435 v1.AddArg(x)
25436 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
25437 v2.AddArg(y)
25438 v0.AddArg2(v1, v2)
25439 v.AddArg(v0)
25440 return true
25441 }
25442 }
25443 func rewriteValueARM64_OpNeqPtr(v *Value) bool {
25444 v_1 := v.Args[1]
25445 v_0 := v.Args[0]
25446 b := v.Block
25447
25448
25449 for {
25450 x := v_0
25451 y := v_1
25452 v.reset(OpARM64NotEqual)
25453 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
25454 v0.AddArg2(x, y)
25455 v.AddArg(v0)
25456 return true
25457 }
25458 }
25459 func rewriteValueARM64_OpNot(v *Value) bool {
25460 v_0 := v.Args[0]
25461 b := v.Block
25462 typ := &b.Func.Config.Types
25463
25464
25465 for {
25466 x := v_0
25467 v.reset(OpARM64XOR)
25468 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
25469 v0.AuxInt = int64ToAuxInt(1)
25470 v.AddArg2(v0, x)
25471 return true
25472 }
25473 }
25474 func rewriteValueARM64_OpOffPtr(v *Value) bool {
25475 v_0 := v.Args[0]
25476
25477
25478
25479 for {
25480 off := auxIntToInt64(v.AuxInt)
25481 ptr := v_0
25482 if ptr.Op != OpSP || !(is32Bit(off)) {
25483 break
25484 }
25485 v.reset(OpARM64MOVDaddr)
25486 v.AuxInt = int32ToAuxInt(int32(off))
25487 v.AddArg(ptr)
25488 return true
25489 }
25490
25491
25492 for {
25493 off := auxIntToInt64(v.AuxInt)
25494 ptr := v_0
25495 v.reset(OpARM64ADDconst)
25496 v.AuxInt = int64ToAuxInt(off)
25497 v.AddArg(ptr)
25498 return true
25499 }
25500 }
25501 func rewriteValueARM64_OpPanicBounds(v *Value) bool {
25502 v_2 := v.Args[2]
25503 v_1 := v.Args[1]
25504 v_0 := v.Args[0]
25505
25506
25507
25508 for {
25509 kind := auxIntToInt64(v.AuxInt)
25510 x := v_0
25511 y := v_1
25512 mem := v_2
25513 if !(boundsABI(kind) == 0) {
25514 break
25515 }
25516 v.reset(OpARM64LoweredPanicBoundsA)
25517 v.AuxInt = int64ToAuxInt(kind)
25518 v.AddArg3(x, y, mem)
25519 return true
25520 }
25521
25522
25523
25524 for {
25525 kind := auxIntToInt64(v.AuxInt)
25526 x := v_0
25527 y := v_1
25528 mem := v_2
25529 if !(boundsABI(kind) == 1) {
25530 break
25531 }
25532 v.reset(OpARM64LoweredPanicBoundsB)
25533 v.AuxInt = int64ToAuxInt(kind)
25534 v.AddArg3(x, y, mem)
25535 return true
25536 }
25537
25538
25539
25540 for {
25541 kind := auxIntToInt64(v.AuxInt)
25542 x := v_0
25543 y := v_1
25544 mem := v_2
25545 if !(boundsABI(kind) == 2) {
25546 break
25547 }
25548 v.reset(OpARM64LoweredPanicBoundsC)
25549 v.AuxInt = int64ToAuxInt(kind)
25550 v.AddArg3(x, y, mem)
25551 return true
25552 }
25553 return false
25554 }
25555 func rewriteValueARM64_OpPopCount16(v *Value) bool {
25556 v_0 := v.Args[0]
25557 b := v.Block
25558 typ := &b.Func.Config.Types
25559
25560
25561 for {
25562 t := v.Type
25563 x := v_0
25564 v.reset(OpARM64FMOVDfpgp)
25565 v.Type = t
25566 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
25567 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
25568 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
25569 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25570 v3.AddArg(x)
25571 v2.AddArg(v3)
25572 v1.AddArg(v2)
25573 v0.AddArg(v1)
25574 v.AddArg(v0)
25575 return true
25576 }
25577 }
25578 func rewriteValueARM64_OpPopCount32(v *Value) bool {
25579 v_0 := v.Args[0]
25580 b := v.Block
25581 typ := &b.Func.Config.Types
25582
25583
25584 for {
25585 t := v.Type
25586 x := v_0
25587 v.reset(OpARM64FMOVDfpgp)
25588 v.Type = t
25589 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
25590 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
25591 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
25592 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
25593 v3.AddArg(x)
25594 v2.AddArg(v3)
25595 v1.AddArg(v2)
25596 v0.AddArg(v1)
25597 v.AddArg(v0)
25598 return true
25599 }
25600 }
25601 func rewriteValueARM64_OpPopCount64(v *Value) bool {
25602 v_0 := v.Args[0]
25603 b := v.Block
25604 typ := &b.Func.Config.Types
25605
25606
25607 for {
25608 t := v.Type
25609 x := v_0
25610 v.reset(OpARM64FMOVDfpgp)
25611 v.Type = t
25612 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
25613 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
25614 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
25615 v2.AddArg(x)
25616 v1.AddArg(v2)
25617 v0.AddArg(v1)
25618 v.AddArg(v0)
25619 return true
25620 }
25621 }
25622 func rewriteValueARM64_OpPrefetchCache(v *Value) bool {
25623 v_1 := v.Args[1]
25624 v_0 := v.Args[0]
25625
25626
25627 for {
25628 addr := v_0
25629 mem := v_1
25630 v.reset(OpARM64PRFM)
25631 v.AuxInt = int64ToAuxInt(0)
25632 v.AddArg2(addr, mem)
25633 return true
25634 }
25635 }
25636 func rewriteValueARM64_OpPrefetchCacheStreamed(v *Value) bool {
25637 v_1 := v.Args[1]
25638 v_0 := v.Args[0]
25639
25640
25641 for {
25642 addr := v_0
25643 mem := v_1
25644 v.reset(OpARM64PRFM)
25645 v.AuxInt = int64ToAuxInt(1)
25646 v.AddArg2(addr, mem)
25647 return true
25648 }
25649 }
25650 func rewriteValueARM64_OpPubBarrier(v *Value) bool {
25651 v_0 := v.Args[0]
25652
25653
25654 for {
25655 mem := v_0
25656 v.reset(OpARM64DMB)
25657 v.AuxInt = int64ToAuxInt(0xe)
25658 v.AddArg(mem)
25659 return true
25660 }
25661 }
25662 func rewriteValueARM64_OpRotateLeft16(v *Value) bool {
25663 v_1 := v.Args[1]
25664 v_0 := v.Args[0]
25665 b := v.Block
25666 typ := &b.Func.Config.Types
25667
25668
25669 for {
25670 t := v.Type
25671 x := v_0
25672 if v_1.Op != OpARM64MOVDconst {
25673 break
25674 }
25675 c := auxIntToInt64(v_1.AuxInt)
25676 v.reset(OpOr16)
25677 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
25678 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
25679 v1.AuxInt = int64ToAuxInt(c & 15)
25680 v0.AddArg2(x, v1)
25681 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
25682 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
25683 v3.AuxInt = int64ToAuxInt(-c & 15)
25684 v2.AddArg2(x, v3)
25685 v.AddArg2(v0, v2)
25686 return true
25687 }
25688 return false
25689 }
25690 func rewriteValueARM64_OpRotateLeft32(v *Value) bool {
25691 v_1 := v.Args[1]
25692 v_0 := v.Args[0]
25693 b := v.Block
25694
25695
25696 for {
25697 x := v_0
25698 y := v_1
25699 v.reset(OpARM64RORW)
25700 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
25701 v0.AddArg(y)
25702 v.AddArg2(x, v0)
25703 return true
25704 }
25705 }
25706 func rewriteValueARM64_OpRotateLeft64(v *Value) bool {
25707 v_1 := v.Args[1]
25708 v_0 := v.Args[0]
25709 b := v.Block
25710
25711
25712 for {
25713 x := v_0
25714 y := v_1
25715 v.reset(OpARM64ROR)
25716 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
25717 v0.AddArg(y)
25718 v.AddArg2(x, v0)
25719 return true
25720 }
25721 }
25722 func rewriteValueARM64_OpRotateLeft8(v *Value) bool {
25723 v_1 := v.Args[1]
25724 v_0 := v.Args[0]
25725 b := v.Block
25726 typ := &b.Func.Config.Types
25727
25728
25729 for {
25730 t := v.Type
25731 x := v_0
25732 if v_1.Op != OpARM64MOVDconst {
25733 break
25734 }
25735 c := auxIntToInt64(v_1.AuxInt)
25736 v.reset(OpOr8)
25737 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
25738 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
25739 v1.AuxInt = int64ToAuxInt(c & 7)
25740 v0.AddArg2(x, v1)
25741 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
25742 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
25743 v3.AuxInt = int64ToAuxInt(-c & 7)
25744 v2.AddArg2(x, v3)
25745 v.AddArg2(v0, v2)
25746 return true
25747 }
25748 return false
25749 }
25750 func rewriteValueARM64_OpRsh16Ux16(v *Value) bool {
25751 v_1 := v.Args[1]
25752 v_0 := v.Args[0]
25753 b := v.Block
25754 typ := &b.Func.Config.Types
25755
25756
25757 for {
25758 t := v.Type
25759 x := v_0
25760 y := v_1
25761 v.reset(OpARM64CSEL)
25762 v.AuxInt = opToAuxInt(OpARM64LessThanU)
25763 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
25764 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25765 v1.AddArg(x)
25766 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25767 v2.AddArg(y)
25768 v0.AddArg2(v1, v2)
25769 v3 := b.NewValue0(v.Pos, OpConst64, t)
25770 v3.AuxInt = int64ToAuxInt(0)
25771 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25772 v4.AuxInt = int64ToAuxInt(64)
25773 v4.AddArg(v2)
25774 v.AddArg3(v0, v3, v4)
25775 return true
25776 }
25777 }
25778 func rewriteValueARM64_OpRsh16Ux32(v *Value) bool {
25779 v_1 := v.Args[1]
25780 v_0 := v.Args[0]
25781 b := v.Block
25782 typ := &b.Func.Config.Types
25783
25784
25785 for {
25786 t := v.Type
25787 x := v_0
25788 y := v_1
25789 v.reset(OpARM64CSEL)
25790 v.AuxInt = opToAuxInt(OpARM64LessThanU)
25791 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
25792 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25793 v1.AddArg(x)
25794 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
25795 v2.AddArg(y)
25796 v0.AddArg2(v1, v2)
25797 v3 := b.NewValue0(v.Pos, OpConst64, t)
25798 v3.AuxInt = int64ToAuxInt(0)
25799 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25800 v4.AuxInt = int64ToAuxInt(64)
25801 v4.AddArg(v2)
25802 v.AddArg3(v0, v3, v4)
25803 return true
25804 }
25805 }
25806 func rewriteValueARM64_OpRsh16Ux64(v *Value) bool {
25807 v_1 := v.Args[1]
25808 v_0 := v.Args[0]
25809 b := v.Block
25810 typ := &b.Func.Config.Types
25811
25812
25813 for {
25814 t := v.Type
25815 x := v_0
25816 y := v_1
25817 v.reset(OpARM64CSEL)
25818 v.AuxInt = opToAuxInt(OpARM64LessThanU)
25819 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
25820 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25821 v1.AddArg(x)
25822 v0.AddArg2(v1, y)
25823 v2 := b.NewValue0(v.Pos, OpConst64, t)
25824 v2.AuxInt = int64ToAuxInt(0)
25825 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25826 v3.AuxInt = int64ToAuxInt(64)
25827 v3.AddArg(y)
25828 v.AddArg3(v0, v2, v3)
25829 return true
25830 }
25831 }
25832 func rewriteValueARM64_OpRsh16Ux8(v *Value) bool {
25833 v_1 := v.Args[1]
25834 v_0 := v.Args[0]
25835 b := v.Block
25836 typ := &b.Func.Config.Types
25837
25838
25839 for {
25840 t := v.Type
25841 x := v_0
25842 y := v_1
25843 v.reset(OpARM64CSEL)
25844 v.AuxInt = opToAuxInt(OpARM64LessThanU)
25845 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
25846 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25847 v1.AddArg(x)
25848 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
25849 v2.AddArg(y)
25850 v0.AddArg2(v1, v2)
25851 v3 := b.NewValue0(v.Pos, OpConst64, t)
25852 v3.AuxInt = int64ToAuxInt(0)
25853 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25854 v4.AuxInt = int64ToAuxInt(64)
25855 v4.AddArg(v2)
25856 v.AddArg3(v0, v3, v4)
25857 return true
25858 }
25859 }
25860 func rewriteValueARM64_OpRsh16x16(v *Value) bool {
25861 v_1 := v.Args[1]
25862 v_0 := v.Args[0]
25863 b := v.Block
25864 typ := &b.Func.Config.Types
25865
25866
25867 for {
25868 x := v_0
25869 y := v_1
25870 v.reset(OpARM64SRA)
25871 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
25872 v0.AddArg(x)
25873 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
25874 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
25875 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25876 v2.AddArg(y)
25877 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
25878 v3.AuxInt = int64ToAuxInt(63)
25879 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25880 v4.AuxInt = int64ToAuxInt(64)
25881 v4.AddArg(v2)
25882 v1.AddArg3(v2, v3, v4)
25883 v.AddArg2(v0, v1)
25884 return true
25885 }
25886 }
25887 func rewriteValueARM64_OpRsh16x32(v *Value) bool {
25888 v_1 := v.Args[1]
25889 v_0 := v.Args[0]
25890 b := v.Block
25891 typ := &b.Func.Config.Types
25892
25893
25894 for {
25895 x := v_0
25896 y := v_1
25897 v.reset(OpARM64SRA)
25898 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
25899 v0.AddArg(x)
25900 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
25901 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
25902 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
25903 v2.AddArg(y)
25904 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
25905 v3.AuxInt = int64ToAuxInt(63)
25906 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25907 v4.AuxInt = int64ToAuxInt(64)
25908 v4.AddArg(v2)
25909 v1.AddArg3(v2, v3, v4)
25910 v.AddArg2(v0, v1)
25911 return true
25912 }
25913 }
25914 func rewriteValueARM64_OpRsh16x64(v *Value) bool {
25915 v_1 := v.Args[1]
25916 v_0 := v.Args[0]
25917 b := v.Block
25918 typ := &b.Func.Config.Types
25919
25920
25921 for {
25922 x := v_0
25923 y := v_1
25924 v.reset(OpARM64SRA)
25925 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
25926 v0.AddArg(x)
25927 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
25928 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
25929 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
25930 v2.AuxInt = int64ToAuxInt(63)
25931 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25932 v3.AuxInt = int64ToAuxInt(64)
25933 v3.AddArg(y)
25934 v1.AddArg3(y, v2, v3)
25935 v.AddArg2(v0, v1)
25936 return true
25937 }
25938 }
25939 func rewriteValueARM64_OpRsh16x8(v *Value) bool {
25940 v_1 := v.Args[1]
25941 v_0 := v.Args[0]
25942 b := v.Block
25943 typ := &b.Func.Config.Types
25944
25945
25946 for {
25947 x := v_0
25948 y := v_1
25949 v.reset(OpARM64SRA)
25950 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
25951 v0.AddArg(x)
25952 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
25953 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
25954 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
25955 v2.AddArg(y)
25956 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
25957 v3.AuxInt = int64ToAuxInt(63)
25958 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25959 v4.AuxInt = int64ToAuxInt(64)
25960 v4.AddArg(v2)
25961 v1.AddArg3(v2, v3, v4)
25962 v.AddArg2(v0, v1)
25963 return true
25964 }
25965 }
25966 func rewriteValueARM64_OpRsh32Ux16(v *Value) bool {
25967 v_1 := v.Args[1]
25968 v_0 := v.Args[0]
25969 b := v.Block
25970 typ := &b.Func.Config.Types
25971
25972
25973 for {
25974 t := v.Type
25975 x := v_0
25976 y := v_1
25977 v.reset(OpARM64CSEL)
25978 v.AuxInt = opToAuxInt(OpARM64LessThanU)
25979 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
25980 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
25981 v1.AddArg(x)
25982 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
25983 v2.AddArg(y)
25984 v0.AddArg2(v1, v2)
25985 v3 := b.NewValue0(v.Pos, OpConst64, t)
25986 v3.AuxInt = int64ToAuxInt(0)
25987 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
25988 v4.AuxInt = int64ToAuxInt(64)
25989 v4.AddArg(v2)
25990 v.AddArg3(v0, v3, v4)
25991 return true
25992 }
25993 }
25994 func rewriteValueARM64_OpRsh32Ux32(v *Value) bool {
25995 v_1 := v.Args[1]
25996 v_0 := v.Args[0]
25997 b := v.Block
25998 typ := &b.Func.Config.Types
25999
26000
26001 for {
26002 t := v.Type
26003 x := v_0
26004 y := v_1
26005 v.reset(OpARM64CSEL)
26006 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26007 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26008 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26009 v1.AddArg(x)
26010 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26011 v2.AddArg(y)
26012 v0.AddArg2(v1, v2)
26013 v3 := b.NewValue0(v.Pos, OpConst64, t)
26014 v3.AuxInt = int64ToAuxInt(0)
26015 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26016 v4.AuxInt = int64ToAuxInt(64)
26017 v4.AddArg(v2)
26018 v.AddArg3(v0, v3, v4)
26019 return true
26020 }
26021 }
26022 func rewriteValueARM64_OpRsh32Ux64(v *Value) bool {
26023 v_1 := v.Args[1]
26024 v_0 := v.Args[0]
26025 b := v.Block
26026 typ := &b.Func.Config.Types
26027
26028
26029 for {
26030 t := v.Type
26031 x := v_0
26032 y := v_1
26033 v.reset(OpARM64CSEL)
26034 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26035 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26036 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26037 v1.AddArg(x)
26038 v0.AddArg2(v1, y)
26039 v2 := b.NewValue0(v.Pos, OpConst64, t)
26040 v2.AuxInt = int64ToAuxInt(0)
26041 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26042 v3.AuxInt = int64ToAuxInt(64)
26043 v3.AddArg(y)
26044 v.AddArg3(v0, v2, v3)
26045 return true
26046 }
26047 }
26048 func rewriteValueARM64_OpRsh32Ux8(v *Value) bool {
26049 v_1 := v.Args[1]
26050 v_0 := v.Args[0]
26051 b := v.Block
26052 typ := &b.Func.Config.Types
26053
26054
26055 for {
26056 t := v.Type
26057 x := v_0
26058 y := v_1
26059 v.reset(OpARM64CSEL)
26060 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26061 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26062 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26063 v1.AddArg(x)
26064 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26065 v2.AddArg(y)
26066 v0.AddArg2(v1, v2)
26067 v3 := b.NewValue0(v.Pos, OpConst64, t)
26068 v3.AuxInt = int64ToAuxInt(0)
26069 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26070 v4.AuxInt = int64ToAuxInt(64)
26071 v4.AddArg(v2)
26072 v.AddArg3(v0, v3, v4)
26073 return true
26074 }
26075 }
26076 func rewriteValueARM64_OpRsh32x16(v *Value) bool {
26077 v_1 := v.Args[1]
26078 v_0 := v.Args[0]
26079 b := v.Block
26080 typ := &b.Func.Config.Types
26081
26082
26083 for {
26084 x := v_0
26085 y := v_1
26086 v.reset(OpARM64SRA)
26087 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
26088 v0.AddArg(x)
26089 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26090 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26091 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
26092 v2.AddArg(y)
26093 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
26094 v3.AuxInt = int64ToAuxInt(63)
26095 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26096 v4.AuxInt = int64ToAuxInt(64)
26097 v4.AddArg(v2)
26098 v1.AddArg3(v2, v3, v4)
26099 v.AddArg2(v0, v1)
26100 return true
26101 }
26102 }
26103 func rewriteValueARM64_OpRsh32x32(v *Value) bool {
26104 v_1 := v.Args[1]
26105 v_0 := v.Args[0]
26106 b := v.Block
26107 typ := &b.Func.Config.Types
26108
26109
26110 for {
26111 x := v_0
26112 y := v_1
26113 v.reset(OpARM64SRA)
26114 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
26115 v0.AddArg(x)
26116 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26117 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26118 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26119 v2.AddArg(y)
26120 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
26121 v3.AuxInt = int64ToAuxInt(63)
26122 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26123 v4.AuxInt = int64ToAuxInt(64)
26124 v4.AddArg(v2)
26125 v1.AddArg3(v2, v3, v4)
26126 v.AddArg2(v0, v1)
26127 return true
26128 }
26129 }
26130 func rewriteValueARM64_OpRsh32x64(v *Value) bool {
26131 v_1 := v.Args[1]
26132 v_0 := v.Args[0]
26133 b := v.Block
26134 typ := &b.Func.Config.Types
26135
26136
26137 for {
26138 x := v_0
26139 y := v_1
26140 v.reset(OpARM64SRA)
26141 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
26142 v0.AddArg(x)
26143 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26144 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26145 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
26146 v2.AuxInt = int64ToAuxInt(63)
26147 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26148 v3.AuxInt = int64ToAuxInt(64)
26149 v3.AddArg(y)
26150 v1.AddArg3(y, v2, v3)
26151 v.AddArg2(v0, v1)
26152 return true
26153 }
26154 }
26155 func rewriteValueARM64_OpRsh32x8(v *Value) bool {
26156 v_1 := v.Args[1]
26157 v_0 := v.Args[0]
26158 b := v.Block
26159 typ := &b.Func.Config.Types
26160
26161
26162 for {
26163 x := v_0
26164 y := v_1
26165 v.reset(OpARM64SRA)
26166 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
26167 v0.AddArg(x)
26168 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26169 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26170 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26171 v2.AddArg(y)
26172 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
26173 v3.AuxInt = int64ToAuxInt(63)
26174 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26175 v4.AuxInt = int64ToAuxInt(64)
26176 v4.AddArg(v2)
26177 v1.AddArg3(v2, v3, v4)
26178 v.AddArg2(v0, v1)
26179 return true
26180 }
26181 }
26182 func rewriteValueARM64_OpRsh64Ux16(v *Value) bool {
26183 v_1 := v.Args[1]
26184 v_0 := v.Args[0]
26185 b := v.Block
26186 typ := &b.Func.Config.Types
26187
26188
26189 for {
26190 t := v.Type
26191 x := v_0
26192 y := v_1
26193 v.reset(OpARM64CSEL)
26194 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26195 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26196 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
26197 v1.AddArg(y)
26198 v0.AddArg2(x, v1)
26199 v2 := b.NewValue0(v.Pos, OpConst64, t)
26200 v2.AuxInt = int64ToAuxInt(0)
26201 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26202 v3.AuxInt = int64ToAuxInt(64)
26203 v3.AddArg(v1)
26204 v.AddArg3(v0, v2, v3)
26205 return true
26206 }
26207 }
26208 func rewriteValueARM64_OpRsh64Ux32(v *Value) bool {
26209 v_1 := v.Args[1]
26210 v_0 := v.Args[0]
26211 b := v.Block
26212 typ := &b.Func.Config.Types
26213
26214
26215 for {
26216 t := v.Type
26217 x := v_0
26218 y := v_1
26219 v.reset(OpARM64CSEL)
26220 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26221 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26222 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26223 v1.AddArg(y)
26224 v0.AddArg2(x, v1)
26225 v2 := b.NewValue0(v.Pos, OpConst64, t)
26226 v2.AuxInt = int64ToAuxInt(0)
26227 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26228 v3.AuxInt = int64ToAuxInt(64)
26229 v3.AddArg(v1)
26230 v.AddArg3(v0, v2, v3)
26231 return true
26232 }
26233 }
26234 func rewriteValueARM64_OpRsh64Ux64(v *Value) bool {
26235 v_1 := v.Args[1]
26236 v_0 := v.Args[0]
26237 b := v.Block
26238
26239
26240 for {
26241 t := v.Type
26242 x := v_0
26243 y := v_1
26244 v.reset(OpARM64CSEL)
26245 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26246 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26247 v0.AddArg2(x, y)
26248 v1 := b.NewValue0(v.Pos, OpConst64, t)
26249 v1.AuxInt = int64ToAuxInt(0)
26250 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26251 v2.AuxInt = int64ToAuxInt(64)
26252 v2.AddArg(y)
26253 v.AddArg3(v0, v1, v2)
26254 return true
26255 }
26256 }
26257 func rewriteValueARM64_OpRsh64Ux8(v *Value) bool {
26258 v_1 := v.Args[1]
26259 v_0 := v.Args[0]
26260 b := v.Block
26261 typ := &b.Func.Config.Types
26262
26263
26264 for {
26265 t := v.Type
26266 x := v_0
26267 y := v_1
26268 v.reset(OpARM64CSEL)
26269 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26270 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26271 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26272 v1.AddArg(y)
26273 v0.AddArg2(x, v1)
26274 v2 := b.NewValue0(v.Pos, OpConst64, t)
26275 v2.AuxInt = int64ToAuxInt(0)
26276 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26277 v3.AuxInt = int64ToAuxInt(64)
26278 v3.AddArg(v1)
26279 v.AddArg3(v0, v2, v3)
26280 return true
26281 }
26282 }
26283 func rewriteValueARM64_OpRsh64x16(v *Value) bool {
26284 v_1 := v.Args[1]
26285 v_0 := v.Args[0]
26286 b := v.Block
26287 typ := &b.Func.Config.Types
26288
26289
26290 for {
26291 x := v_0
26292 y := v_1
26293 v.reset(OpARM64SRA)
26294 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26295 v0.AuxInt = opToAuxInt(OpARM64LessThanU)
26296 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
26297 v1.AddArg(y)
26298 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
26299 v2.AuxInt = int64ToAuxInt(63)
26300 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26301 v3.AuxInt = int64ToAuxInt(64)
26302 v3.AddArg(v1)
26303 v0.AddArg3(v1, v2, v3)
26304 v.AddArg2(x, v0)
26305 return true
26306 }
26307 }
26308 func rewriteValueARM64_OpRsh64x32(v *Value) bool {
26309 v_1 := v.Args[1]
26310 v_0 := v.Args[0]
26311 b := v.Block
26312 typ := &b.Func.Config.Types
26313
26314
26315 for {
26316 x := v_0
26317 y := v_1
26318 v.reset(OpARM64SRA)
26319 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26320 v0.AuxInt = opToAuxInt(OpARM64LessThanU)
26321 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26322 v1.AddArg(y)
26323 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
26324 v2.AuxInt = int64ToAuxInt(63)
26325 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26326 v3.AuxInt = int64ToAuxInt(64)
26327 v3.AddArg(v1)
26328 v0.AddArg3(v1, v2, v3)
26329 v.AddArg2(x, v0)
26330 return true
26331 }
26332 }
26333 func rewriteValueARM64_OpRsh64x64(v *Value) bool {
26334 v_1 := v.Args[1]
26335 v_0 := v.Args[0]
26336 b := v.Block
26337
26338
26339 for {
26340 x := v_0
26341 y := v_1
26342 v.reset(OpARM64SRA)
26343 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26344 v0.AuxInt = opToAuxInt(OpARM64LessThanU)
26345 v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
26346 v1.AuxInt = int64ToAuxInt(63)
26347 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26348 v2.AuxInt = int64ToAuxInt(64)
26349 v2.AddArg(y)
26350 v0.AddArg3(y, v1, v2)
26351 v.AddArg2(x, v0)
26352 return true
26353 }
26354 }
26355 func rewriteValueARM64_OpRsh64x8(v *Value) bool {
26356 v_1 := v.Args[1]
26357 v_0 := v.Args[0]
26358 b := v.Block
26359 typ := &b.Func.Config.Types
26360
26361
26362 for {
26363 x := v_0
26364 y := v_1
26365 v.reset(OpARM64SRA)
26366 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26367 v0.AuxInt = opToAuxInt(OpARM64LessThanU)
26368 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26369 v1.AddArg(y)
26370 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
26371 v2.AuxInt = int64ToAuxInt(63)
26372 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26373 v3.AuxInt = int64ToAuxInt(64)
26374 v3.AddArg(v1)
26375 v0.AddArg3(v1, v2, v3)
26376 v.AddArg2(x, v0)
26377 return true
26378 }
26379 }
26380 func rewriteValueARM64_OpRsh8Ux16(v *Value) bool {
26381 v_1 := v.Args[1]
26382 v_0 := v.Args[0]
26383 b := v.Block
26384 typ := &b.Func.Config.Types
26385
26386
26387 for {
26388 t := v.Type
26389 x := v_0
26390 y := v_1
26391 v.reset(OpARM64CSEL)
26392 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26393 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26394 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26395 v1.AddArg(x)
26396 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
26397 v2.AddArg(y)
26398 v0.AddArg2(v1, v2)
26399 v3 := b.NewValue0(v.Pos, OpConst64, t)
26400 v3.AuxInt = int64ToAuxInt(0)
26401 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26402 v4.AuxInt = int64ToAuxInt(64)
26403 v4.AddArg(v2)
26404 v.AddArg3(v0, v3, v4)
26405 return true
26406 }
26407 }
26408 func rewriteValueARM64_OpRsh8Ux32(v *Value) bool {
26409 v_1 := v.Args[1]
26410 v_0 := v.Args[0]
26411 b := v.Block
26412 typ := &b.Func.Config.Types
26413
26414
26415 for {
26416 t := v.Type
26417 x := v_0
26418 y := v_1
26419 v.reset(OpARM64CSEL)
26420 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26421 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26422 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26423 v1.AddArg(x)
26424 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26425 v2.AddArg(y)
26426 v0.AddArg2(v1, v2)
26427 v3 := b.NewValue0(v.Pos, OpConst64, t)
26428 v3.AuxInt = int64ToAuxInt(0)
26429 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26430 v4.AuxInt = int64ToAuxInt(64)
26431 v4.AddArg(v2)
26432 v.AddArg3(v0, v3, v4)
26433 return true
26434 }
26435 }
26436 func rewriteValueARM64_OpRsh8Ux64(v *Value) bool {
26437 v_1 := v.Args[1]
26438 v_0 := v.Args[0]
26439 b := v.Block
26440 typ := &b.Func.Config.Types
26441
26442
26443 for {
26444 t := v.Type
26445 x := v_0
26446 y := v_1
26447 v.reset(OpARM64CSEL)
26448 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26449 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26450 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26451 v1.AddArg(x)
26452 v0.AddArg2(v1, y)
26453 v2 := b.NewValue0(v.Pos, OpConst64, t)
26454 v2.AuxInt = int64ToAuxInt(0)
26455 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26456 v3.AuxInt = int64ToAuxInt(64)
26457 v3.AddArg(y)
26458 v.AddArg3(v0, v2, v3)
26459 return true
26460 }
26461 }
26462 func rewriteValueARM64_OpRsh8Ux8(v *Value) bool {
26463 v_1 := v.Args[1]
26464 v_0 := v.Args[0]
26465 b := v.Block
26466 typ := &b.Func.Config.Types
26467
26468
26469 for {
26470 t := v.Type
26471 x := v_0
26472 y := v_1
26473 v.reset(OpARM64CSEL)
26474 v.AuxInt = opToAuxInt(OpARM64LessThanU)
26475 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
26476 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26477 v1.AddArg(x)
26478 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26479 v2.AddArg(y)
26480 v0.AddArg2(v1, v2)
26481 v3 := b.NewValue0(v.Pos, OpConst64, t)
26482 v3.AuxInt = int64ToAuxInt(0)
26483 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26484 v4.AuxInt = int64ToAuxInt(64)
26485 v4.AddArg(v2)
26486 v.AddArg3(v0, v3, v4)
26487 return true
26488 }
26489 }
26490 func rewriteValueARM64_OpRsh8x16(v *Value) bool {
26491 v_1 := v.Args[1]
26492 v_0 := v.Args[0]
26493 b := v.Block
26494 typ := &b.Func.Config.Types
26495
26496
26497 for {
26498 x := v_0
26499 y := v_1
26500 v.reset(OpARM64SRA)
26501 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
26502 v0.AddArg(x)
26503 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26504 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26505 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
26506 v2.AddArg(y)
26507 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
26508 v3.AuxInt = int64ToAuxInt(63)
26509 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26510 v4.AuxInt = int64ToAuxInt(64)
26511 v4.AddArg(v2)
26512 v1.AddArg3(v2, v3, v4)
26513 v.AddArg2(v0, v1)
26514 return true
26515 }
26516 }
26517 func rewriteValueARM64_OpRsh8x32(v *Value) bool {
26518 v_1 := v.Args[1]
26519 v_0 := v.Args[0]
26520 b := v.Block
26521 typ := &b.Func.Config.Types
26522
26523
26524 for {
26525 x := v_0
26526 y := v_1
26527 v.reset(OpARM64SRA)
26528 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
26529 v0.AddArg(x)
26530 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26531 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26532 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
26533 v2.AddArg(y)
26534 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
26535 v3.AuxInt = int64ToAuxInt(63)
26536 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26537 v4.AuxInt = int64ToAuxInt(64)
26538 v4.AddArg(v2)
26539 v1.AddArg3(v2, v3, v4)
26540 v.AddArg2(v0, v1)
26541 return true
26542 }
26543 }
26544 func rewriteValueARM64_OpRsh8x64(v *Value) bool {
26545 v_1 := v.Args[1]
26546 v_0 := v.Args[0]
26547 b := v.Block
26548 typ := &b.Func.Config.Types
26549
26550
26551 for {
26552 x := v_0
26553 y := v_1
26554 v.reset(OpARM64SRA)
26555 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
26556 v0.AddArg(x)
26557 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26558 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26559 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
26560 v2.AuxInt = int64ToAuxInt(63)
26561 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26562 v3.AuxInt = int64ToAuxInt(64)
26563 v3.AddArg(y)
26564 v1.AddArg3(y, v2, v3)
26565 v.AddArg2(v0, v1)
26566 return true
26567 }
26568 }
26569 func rewriteValueARM64_OpRsh8x8(v *Value) bool {
26570 v_1 := v.Args[1]
26571 v_0 := v.Args[0]
26572 b := v.Block
26573 typ := &b.Func.Config.Types
26574
26575
26576 for {
26577 x := v_0
26578 y := v_1
26579 v.reset(OpARM64SRA)
26580 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
26581 v0.AddArg(x)
26582 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
26583 v1.AuxInt = opToAuxInt(OpARM64LessThanU)
26584 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
26585 v2.AddArg(y)
26586 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
26587 v3.AuxInt = int64ToAuxInt(63)
26588 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
26589 v4.AuxInt = int64ToAuxInt(64)
26590 v4.AddArg(v2)
26591 v1.AddArg3(v2, v3, v4)
26592 v.AddArg2(v0, v1)
26593 return true
26594 }
26595 }
26596 func rewriteValueARM64_OpSelect0(v *Value) bool {
26597 v_0 := v.Args[0]
26598 b := v.Block
26599 typ := &b.Func.Config.Types
26600
26601
26602 for {
26603 if v_0.Op != OpAdd64carry {
26604 break
26605 }
26606 c := v_0.Args[2]
26607 x := v_0.Args[0]
26608 y := v_0.Args[1]
26609 v.reset(OpSelect0)
26610 v.Type = typ.UInt64
26611 v0 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26612 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
26613 v2 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26614 v2.AuxInt = int64ToAuxInt(-1)
26615 v2.AddArg(c)
26616 v1.AddArg(v2)
26617 v0.AddArg3(x, y, v1)
26618 v.AddArg(v0)
26619 return true
26620 }
26621
26622
26623 for {
26624 if v_0.Op != OpSub64borrow {
26625 break
26626 }
26627 bo := v_0.Args[2]
26628 x := v_0.Args[0]
26629 y := v_0.Args[1]
26630 v.reset(OpSelect0)
26631 v.Type = typ.UInt64
26632 v0 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26633 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
26634 v2 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26635 v2.AddArg(bo)
26636 v1.AddArg(v2)
26637 v0.AddArg3(x, y, v1)
26638 v.AddArg(v0)
26639 return true
26640 }
26641 return false
26642 }
26643 func rewriteValueARM64_OpSelect1(v *Value) bool {
26644 v_0 := v.Args[0]
26645 b := v.Block
26646 typ := &b.Func.Config.Types
26647
26648
26649 for {
26650 if v_0.Op != OpAdd64carry {
26651 break
26652 }
26653 c := v_0.Args[2]
26654 x := v_0.Args[0]
26655 y := v_0.Args[1]
26656 v.reset(OpARM64ADCzerocarry)
26657 v.Type = typ.UInt64
26658 v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
26659 v1 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26660 v2 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
26661 v3 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26662 v3.AuxInt = int64ToAuxInt(-1)
26663 v3.AddArg(c)
26664 v2.AddArg(v3)
26665 v1.AddArg3(x, y, v2)
26666 v0.AddArg(v1)
26667 v.AddArg(v0)
26668 return true
26669 }
26670
26671
26672 for {
26673 if v_0.Op != OpSub64borrow {
26674 break
26675 }
26676 bo := v_0.Args[2]
26677 x := v_0.Args[0]
26678 y := v_0.Args[1]
26679 v.reset(OpARM64NEG)
26680 v.Type = typ.UInt64
26681 v0 := b.NewValue0(v.Pos, OpARM64NGCzerocarry, typ.UInt64)
26682 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
26683 v2 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26684 v3 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
26685 v4 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
26686 v4.AddArg(bo)
26687 v3.AddArg(v4)
26688 v2.AddArg3(x, y, v3)
26689 v1.AddArg(v2)
26690 v0.AddArg(v1)
26691 v.AddArg(v0)
26692 return true
26693 }
26694 return false
26695 }
26696 func rewriteValueARM64_OpSelectN(v *Value) bool {
26697 v_0 := v.Args[0]
26698 b := v.Block
26699 config := b.Func.Config
26700
26701
26702
26703 for {
26704 if auxIntToInt64(v.AuxInt) != 0 {
26705 break
26706 }
26707 call := v_0
26708 if call.Op != OpARM64CALLstatic || len(call.Args) != 1 {
26709 break
26710 }
26711 sym := auxToCall(call.Aux)
26712 s1 := call.Args[0]
26713 if s1.Op != OpARM64MOVDstore {
26714 break
26715 }
26716 _ = s1.Args[2]
26717 s1_1 := s1.Args[1]
26718 if s1_1.Op != OpARM64MOVDconst {
26719 break
26720 }
26721 sz := auxIntToInt64(s1_1.AuxInt)
26722 s2 := s1.Args[2]
26723 if s2.Op != OpARM64MOVDstore {
26724 break
26725 }
26726 _ = s2.Args[2]
26727 src := s2.Args[1]
26728 s3 := s2.Args[2]
26729 if s3.Op != OpARM64MOVDstore {
26730 break
26731 }
26732 mem := s3.Args[2]
26733 dst := s3.Args[1]
26734 if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)) {
26735 break
26736 }
26737 v.reset(OpMove)
26738 v.AuxInt = int64ToAuxInt(sz)
26739 v.AddArg3(dst, src, mem)
26740 return true
26741 }
26742
26743
26744
26745 for {
26746 if auxIntToInt64(v.AuxInt) != 0 {
26747 break
26748 }
26749 call := v_0
26750 if call.Op != OpARM64CALLstatic || len(call.Args) != 4 {
26751 break
26752 }
26753 sym := auxToCall(call.Aux)
26754 mem := call.Args[3]
26755 dst := call.Args[0]
26756 src := call.Args[1]
26757 call_2 := call.Args[2]
26758 if call_2.Op != OpARM64MOVDconst {
26759 break
26760 }
26761 sz := auxIntToInt64(call_2.AuxInt)
26762 if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
26763 break
26764 }
26765 v.reset(OpMove)
26766 v.AuxInt = int64ToAuxInt(sz)
26767 v.AddArg3(dst, src, mem)
26768 return true
26769 }
26770 return false
26771 }
26772 func rewriteValueARM64_OpSlicemask(v *Value) bool {
26773 v_0 := v.Args[0]
26774 b := v.Block
26775
26776
26777 for {
26778 t := v.Type
26779 x := v_0
26780 v.reset(OpARM64SRAconst)
26781 v.AuxInt = int64ToAuxInt(63)
26782 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
26783 v0.AddArg(x)
26784 v.AddArg(v0)
26785 return true
26786 }
26787 }
26788 func rewriteValueARM64_OpStore(v *Value) bool {
26789 v_2 := v.Args[2]
26790 v_1 := v.Args[1]
26791 v_0 := v.Args[0]
26792
26793
26794
26795 for {
26796 t := auxToType(v.Aux)
26797 ptr := v_0
26798 val := v_1
26799 mem := v_2
26800 if !(t.Size() == 1) {
26801 break
26802 }
26803 v.reset(OpARM64MOVBstore)
26804 v.AddArg3(ptr, val, mem)
26805 return true
26806 }
26807
26808
26809
26810 for {
26811 t := auxToType(v.Aux)
26812 ptr := v_0
26813 val := v_1
26814 mem := v_2
26815 if !(t.Size() == 2) {
26816 break
26817 }
26818 v.reset(OpARM64MOVHstore)
26819 v.AddArg3(ptr, val, mem)
26820 return true
26821 }
26822
26823
26824
26825 for {
26826 t := auxToType(v.Aux)
26827 ptr := v_0
26828 val := v_1
26829 mem := v_2
26830 if !(t.Size() == 4 && !is32BitFloat(val.Type)) {
26831 break
26832 }
26833 v.reset(OpARM64MOVWstore)
26834 v.AddArg3(ptr, val, mem)
26835 return true
26836 }
26837
26838
26839
26840 for {
26841 t := auxToType(v.Aux)
26842 ptr := v_0
26843 val := v_1
26844 mem := v_2
26845 if !(t.Size() == 8 && !is64BitFloat(val.Type)) {
26846 break
26847 }
26848 v.reset(OpARM64MOVDstore)
26849 v.AddArg3(ptr, val, mem)
26850 return true
26851 }
26852
26853
26854
26855 for {
26856 t := auxToType(v.Aux)
26857 ptr := v_0
26858 val := v_1
26859 mem := v_2
26860 if !(t.Size() == 4 && is32BitFloat(val.Type)) {
26861 break
26862 }
26863 v.reset(OpARM64FMOVSstore)
26864 v.AddArg3(ptr, val, mem)
26865 return true
26866 }
26867
26868
26869
26870 for {
26871 t := auxToType(v.Aux)
26872 ptr := v_0
26873 val := v_1
26874 mem := v_2
26875 if !(t.Size() == 8 && is64BitFloat(val.Type)) {
26876 break
26877 }
26878 v.reset(OpARM64FMOVDstore)
26879 v.AddArg3(ptr, val, mem)
26880 return true
26881 }
26882 return false
26883 }
26884 func rewriteValueARM64_OpZero(v *Value) bool {
26885 v_1 := v.Args[1]
26886 v_0 := v.Args[0]
26887 b := v.Block
26888 config := b.Func.Config
26889 typ := &b.Func.Config.Types
26890
26891
26892 for {
26893 if auxIntToInt64(v.AuxInt) != 0 {
26894 break
26895 }
26896 mem := v_1
26897 v.copyOf(mem)
26898 return true
26899 }
26900
26901
26902 for {
26903 if auxIntToInt64(v.AuxInt) != 1 {
26904 break
26905 }
26906 ptr := v_0
26907 mem := v_1
26908 v.reset(OpARM64MOVBstore)
26909 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
26910 v0.AuxInt = int64ToAuxInt(0)
26911 v.AddArg3(ptr, v0, mem)
26912 return true
26913 }
26914
26915
26916 for {
26917 if auxIntToInt64(v.AuxInt) != 2 {
26918 break
26919 }
26920 ptr := v_0
26921 mem := v_1
26922 v.reset(OpARM64MOVHstore)
26923 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
26924 v0.AuxInt = int64ToAuxInt(0)
26925 v.AddArg3(ptr, v0, mem)
26926 return true
26927 }
26928
26929
26930 for {
26931 if auxIntToInt64(v.AuxInt) != 4 {
26932 break
26933 }
26934 ptr := v_0
26935 mem := v_1
26936 v.reset(OpARM64MOVWstore)
26937 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
26938 v0.AuxInt = int64ToAuxInt(0)
26939 v.AddArg3(ptr, v0, mem)
26940 return true
26941 }
26942
26943
26944 for {
26945 if auxIntToInt64(v.AuxInt) != 8 {
26946 break
26947 }
26948 ptr := v_0
26949 mem := v_1
26950 v.reset(OpARM64MOVDstore)
26951 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
26952 v0.AuxInt = int64ToAuxInt(0)
26953 v.AddArg3(ptr, v0, mem)
26954 return true
26955 }
26956
26957
26958 for {
26959 if auxIntToInt64(v.AuxInt) != 3 {
26960 break
26961 }
26962 ptr := v_0
26963 mem := v_1
26964 v.reset(OpARM64MOVBstore)
26965 v.AuxInt = int32ToAuxInt(2)
26966 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
26967 v0.AuxInt = int64ToAuxInt(0)
26968 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
26969 v1.AddArg3(ptr, v0, mem)
26970 v.AddArg3(ptr, v0, v1)
26971 return true
26972 }
26973
26974
26975 for {
26976 if auxIntToInt64(v.AuxInt) != 5 {
26977 break
26978 }
26979 ptr := v_0
26980 mem := v_1
26981 v.reset(OpARM64MOVBstore)
26982 v.AuxInt = int32ToAuxInt(4)
26983 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
26984 v0.AuxInt = int64ToAuxInt(0)
26985 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
26986 v1.AddArg3(ptr, v0, mem)
26987 v.AddArg3(ptr, v0, v1)
26988 return true
26989 }
26990
26991
26992 for {
26993 if auxIntToInt64(v.AuxInt) != 6 {
26994 break
26995 }
26996 ptr := v_0
26997 mem := v_1
26998 v.reset(OpARM64MOVHstore)
26999 v.AuxInt = int32ToAuxInt(4)
27000 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27001 v0.AuxInt = int64ToAuxInt(0)
27002 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
27003 v1.AddArg3(ptr, v0, mem)
27004 v.AddArg3(ptr, v0, v1)
27005 return true
27006 }
27007
27008
27009 for {
27010 if auxIntToInt64(v.AuxInt) != 7 {
27011 break
27012 }
27013 ptr := v_0
27014 mem := v_1
27015 v.reset(OpARM64MOVBstore)
27016 v.AuxInt = int32ToAuxInt(6)
27017 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27018 v0.AuxInt = int64ToAuxInt(0)
27019 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
27020 v1.AuxInt = int32ToAuxInt(4)
27021 v2 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
27022 v2.AddArg3(ptr, v0, mem)
27023 v1.AddArg3(ptr, v0, v2)
27024 v.AddArg3(ptr, v0, v1)
27025 return true
27026 }
27027
27028
27029 for {
27030 if auxIntToInt64(v.AuxInt) != 9 {
27031 break
27032 }
27033 ptr := v_0
27034 mem := v_1
27035 v.reset(OpARM64MOVBstore)
27036 v.AuxInt = int32ToAuxInt(8)
27037 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27038 v0.AuxInt = int64ToAuxInt(0)
27039 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27040 v1.AddArg3(ptr, v0, mem)
27041 v.AddArg3(ptr, v0, v1)
27042 return true
27043 }
27044
27045
27046 for {
27047 if auxIntToInt64(v.AuxInt) != 10 {
27048 break
27049 }
27050 ptr := v_0
27051 mem := v_1
27052 v.reset(OpARM64MOVHstore)
27053 v.AuxInt = int32ToAuxInt(8)
27054 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27055 v0.AuxInt = int64ToAuxInt(0)
27056 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27057 v1.AddArg3(ptr, v0, mem)
27058 v.AddArg3(ptr, v0, v1)
27059 return true
27060 }
27061
27062
27063 for {
27064 if auxIntToInt64(v.AuxInt) != 11 {
27065 break
27066 }
27067 ptr := v_0
27068 mem := v_1
27069 v.reset(OpARM64MOVBstore)
27070 v.AuxInt = int32ToAuxInt(10)
27071 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27072 v0.AuxInt = int64ToAuxInt(0)
27073 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
27074 v1.AuxInt = int32ToAuxInt(8)
27075 v2 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27076 v2.AddArg3(ptr, v0, mem)
27077 v1.AddArg3(ptr, v0, v2)
27078 v.AddArg3(ptr, v0, v1)
27079 return true
27080 }
27081
27082
27083 for {
27084 if auxIntToInt64(v.AuxInt) != 12 {
27085 break
27086 }
27087 ptr := v_0
27088 mem := v_1
27089 v.reset(OpARM64MOVWstore)
27090 v.AuxInt = int32ToAuxInt(8)
27091 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27092 v0.AuxInt = int64ToAuxInt(0)
27093 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27094 v1.AddArg3(ptr, v0, mem)
27095 v.AddArg3(ptr, v0, v1)
27096 return true
27097 }
27098
27099
27100 for {
27101 if auxIntToInt64(v.AuxInt) != 13 {
27102 break
27103 }
27104 ptr := v_0
27105 mem := v_1
27106 v.reset(OpARM64MOVBstore)
27107 v.AuxInt = int32ToAuxInt(12)
27108 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27109 v0.AuxInt = int64ToAuxInt(0)
27110 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
27111 v1.AuxInt = int32ToAuxInt(8)
27112 v2 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27113 v2.AddArg3(ptr, v0, mem)
27114 v1.AddArg3(ptr, v0, v2)
27115 v.AddArg3(ptr, v0, v1)
27116 return true
27117 }
27118
27119
27120 for {
27121 if auxIntToInt64(v.AuxInt) != 14 {
27122 break
27123 }
27124 ptr := v_0
27125 mem := v_1
27126 v.reset(OpARM64MOVHstore)
27127 v.AuxInt = int32ToAuxInt(12)
27128 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27129 v0.AuxInt = int64ToAuxInt(0)
27130 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
27131 v1.AuxInt = int32ToAuxInt(8)
27132 v2 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27133 v2.AddArg3(ptr, v0, mem)
27134 v1.AddArg3(ptr, v0, v2)
27135 v.AddArg3(ptr, v0, v1)
27136 return true
27137 }
27138
27139
27140 for {
27141 if auxIntToInt64(v.AuxInt) != 15 {
27142 break
27143 }
27144 ptr := v_0
27145 mem := v_1
27146 v.reset(OpARM64MOVBstore)
27147 v.AuxInt = int32ToAuxInt(14)
27148 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27149 v0.AuxInt = int64ToAuxInt(0)
27150 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
27151 v1.AuxInt = int32ToAuxInt(12)
27152 v2 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
27153 v2.AuxInt = int32ToAuxInt(8)
27154 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
27155 v3.AddArg3(ptr, v0, mem)
27156 v2.AddArg3(ptr, v0, v3)
27157 v1.AddArg3(ptr, v0, v2)
27158 v.AddArg3(ptr, v0, v1)
27159 return true
27160 }
27161
27162
27163 for {
27164 if auxIntToInt64(v.AuxInt) != 16 {
27165 break
27166 }
27167 ptr := v_0
27168 mem := v_1
27169 v.reset(OpARM64STP)
27170 v.AuxInt = int32ToAuxInt(0)
27171 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27172 v0.AuxInt = int64ToAuxInt(0)
27173 v.AddArg4(ptr, v0, v0, mem)
27174 return true
27175 }
27176
27177
27178 for {
27179 if auxIntToInt64(v.AuxInt) != 32 {
27180 break
27181 }
27182 ptr := v_0
27183 mem := v_1
27184 v.reset(OpARM64STP)
27185 v.AuxInt = int32ToAuxInt(16)
27186 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27187 v0.AuxInt = int64ToAuxInt(0)
27188 v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
27189 v1.AuxInt = int32ToAuxInt(0)
27190 v1.AddArg4(ptr, v0, v0, mem)
27191 v.AddArg4(ptr, v0, v0, v1)
27192 return true
27193 }
27194
27195
27196 for {
27197 if auxIntToInt64(v.AuxInt) != 48 {
27198 break
27199 }
27200 ptr := v_0
27201 mem := v_1
27202 v.reset(OpARM64STP)
27203 v.AuxInt = int32ToAuxInt(32)
27204 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27205 v0.AuxInt = int64ToAuxInt(0)
27206 v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
27207 v1.AuxInt = int32ToAuxInt(16)
27208 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
27209 v2.AuxInt = int32ToAuxInt(0)
27210 v2.AddArg4(ptr, v0, v0, mem)
27211 v1.AddArg4(ptr, v0, v0, v2)
27212 v.AddArg4(ptr, v0, v0, v1)
27213 return true
27214 }
27215
27216
27217 for {
27218 if auxIntToInt64(v.AuxInt) != 64 {
27219 break
27220 }
27221 ptr := v_0
27222 mem := v_1
27223 v.reset(OpARM64STP)
27224 v.AuxInt = int32ToAuxInt(48)
27225 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
27226 v0.AuxInt = int64ToAuxInt(0)
27227 v1 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
27228 v1.AuxInt = int32ToAuxInt(32)
27229 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
27230 v2.AuxInt = int32ToAuxInt(16)
27231 v3 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
27232 v3.AuxInt = int32ToAuxInt(0)
27233 v3.AddArg4(ptr, v0, v0, mem)
27234 v2.AddArg4(ptr, v0, v0, v3)
27235 v1.AddArg4(ptr, v0, v0, v2)
27236 v.AddArg4(ptr, v0, v0, v1)
27237 return true
27238 }
27239
27240
27241
27242 for {
27243 s := auxIntToInt64(v.AuxInt)
27244 ptr := v_0
27245 mem := v_1
27246 if !(s%16 != 0 && s%16 <= 8 && s > 16) {
27247 break
27248 }
27249 v.reset(OpZero)
27250 v.AuxInt = int64ToAuxInt(8)
27251 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
27252 v0.AuxInt = int64ToAuxInt(s - 8)
27253 v0.AddArg(ptr)
27254 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
27255 v1.AuxInt = int64ToAuxInt(s - s%16)
27256 v1.AddArg2(ptr, mem)
27257 v.AddArg2(v0, v1)
27258 return true
27259 }
27260
27261
27262
27263 for {
27264 s := auxIntToInt64(v.AuxInt)
27265 ptr := v_0
27266 mem := v_1
27267 if !(s%16 != 0 && s%16 > 8 && s > 16) {
27268 break
27269 }
27270 v.reset(OpZero)
27271 v.AuxInt = int64ToAuxInt(16)
27272 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
27273 v0.AuxInt = int64ToAuxInt(s - 16)
27274 v0.AddArg(ptr)
27275 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
27276 v1.AuxInt = int64ToAuxInt(s - s%16)
27277 v1.AddArg2(ptr, mem)
27278 v.AddArg2(v0, v1)
27279 return true
27280 }
27281
27282
27283
27284 for {
27285 s := auxIntToInt64(v.AuxInt)
27286 ptr := v_0
27287 mem := v_1
27288 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) {
27289 break
27290 }
27291 v.reset(OpARM64DUFFZERO)
27292 v.AuxInt = int64ToAuxInt(4 * (64 - s/16))
27293 v.AddArg2(ptr, mem)
27294 return true
27295 }
27296
27297
27298
27299 for {
27300 s := auxIntToInt64(v.AuxInt)
27301 ptr := v_0
27302 mem := v_1
27303 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) {
27304 break
27305 }
27306 v.reset(OpARM64LoweredZero)
27307 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type)
27308 v0.AuxInt = int64ToAuxInt(s - 16)
27309 v0.AddArg(ptr)
27310 v.AddArg3(ptr, v0, mem)
27311 return true
27312 }
27313 return false
27314 }
27315 func rewriteBlockARM64(b *Block) bool {
27316 switch b.Kind {
27317 case BlockARM64EQ:
27318
27319
27320
27321 for b.Controls[0].Op == OpARM64CMPWconst {
27322 v_0 := b.Controls[0]
27323 if auxIntToInt32(v_0.AuxInt) != 0 {
27324 break
27325 }
27326 x := v_0.Args[0]
27327 if x.Op != OpARM64ANDconst {
27328 break
27329 }
27330 c := auxIntToInt64(x.AuxInt)
27331 y := x.Args[0]
27332 if !(x.Uses == 1) {
27333 break
27334 }
27335 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
27336 v0.AuxInt = int32ToAuxInt(int32(c))
27337 v0.AddArg(y)
27338 b.resetWithControl(BlockARM64EQ, v0)
27339 return true
27340 }
27341
27342
27343
27344 for b.Controls[0].Op == OpARM64CMPconst {
27345 v_0 := b.Controls[0]
27346 if auxIntToInt64(v_0.AuxInt) != 0 {
27347 break
27348 }
27349 z := v_0.Args[0]
27350 if z.Op != OpARM64AND {
27351 break
27352 }
27353 _ = z.Args[1]
27354 z_0 := z.Args[0]
27355 z_1 := z.Args[1]
27356 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27357 x := z_0
27358 y := z_1
27359 if !(z.Uses == 1) {
27360 continue
27361 }
27362 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
27363 v0.AddArg2(x, y)
27364 b.resetWithControl(BlockARM64EQ, v0)
27365 return true
27366 }
27367 break
27368 }
27369
27370
27371
27372 for b.Controls[0].Op == OpARM64CMPWconst {
27373 v_0 := b.Controls[0]
27374 if auxIntToInt32(v_0.AuxInt) != 0 {
27375 break
27376 }
27377 z := v_0.Args[0]
27378 if z.Op != OpARM64AND {
27379 break
27380 }
27381 _ = z.Args[1]
27382 z_0 := z.Args[0]
27383 z_1 := z.Args[1]
27384 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27385 x := z_0
27386 y := z_1
27387 if !(z.Uses == 1) {
27388 continue
27389 }
27390 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
27391 v0.AddArg2(x, y)
27392 b.resetWithControl(BlockARM64EQ, v0)
27393 return true
27394 }
27395 break
27396 }
27397
27398
27399
27400 for b.Controls[0].Op == OpARM64CMPconst {
27401 v_0 := b.Controls[0]
27402 if auxIntToInt64(v_0.AuxInt) != 0 {
27403 break
27404 }
27405 x := v_0.Args[0]
27406 if x.Op != OpARM64ANDconst {
27407 break
27408 }
27409 c := auxIntToInt64(x.AuxInt)
27410 y := x.Args[0]
27411 if !(x.Uses == 1) {
27412 break
27413 }
27414 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
27415 v0.AuxInt = int64ToAuxInt(c)
27416 v0.AddArg(y)
27417 b.resetWithControl(BlockARM64EQ, v0)
27418 return true
27419 }
27420
27421
27422
27423 for b.Controls[0].Op == OpARM64CMPconst {
27424 v_0 := b.Controls[0]
27425 if auxIntToInt64(v_0.AuxInt) != 0 {
27426 break
27427 }
27428 x := v_0.Args[0]
27429 if x.Op != OpARM64ADDconst {
27430 break
27431 }
27432 c := auxIntToInt64(x.AuxInt)
27433 y := x.Args[0]
27434 if !(x.Uses == 1) {
27435 break
27436 }
27437 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
27438 v0.AuxInt = int64ToAuxInt(c)
27439 v0.AddArg(y)
27440 b.resetWithControl(BlockARM64EQ, v0)
27441 return true
27442 }
27443
27444
27445
27446 for b.Controls[0].Op == OpARM64CMPWconst {
27447 v_0 := b.Controls[0]
27448 if auxIntToInt32(v_0.AuxInt) != 0 {
27449 break
27450 }
27451 x := v_0.Args[0]
27452 if x.Op != OpARM64ADDconst {
27453 break
27454 }
27455 c := auxIntToInt64(x.AuxInt)
27456 y := x.Args[0]
27457 if !(x.Uses == 1) {
27458 break
27459 }
27460 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
27461 v0.AuxInt = int32ToAuxInt(int32(c))
27462 v0.AddArg(y)
27463 b.resetWithControl(BlockARM64EQ, v0)
27464 return true
27465 }
27466
27467
27468
27469 for b.Controls[0].Op == OpARM64CMPconst {
27470 v_0 := b.Controls[0]
27471 if auxIntToInt64(v_0.AuxInt) != 0 {
27472 break
27473 }
27474 z := v_0.Args[0]
27475 if z.Op != OpARM64ADD {
27476 break
27477 }
27478 _ = z.Args[1]
27479 z_0 := z.Args[0]
27480 z_1 := z.Args[1]
27481 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27482 x := z_0
27483 y := z_1
27484 if !(z.Uses == 1) {
27485 continue
27486 }
27487 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
27488 v0.AddArg2(x, y)
27489 b.resetWithControl(BlockARM64EQ, v0)
27490 return true
27491 }
27492 break
27493 }
27494
27495
27496
27497 for b.Controls[0].Op == OpARM64CMPWconst {
27498 v_0 := b.Controls[0]
27499 if auxIntToInt32(v_0.AuxInt) != 0 {
27500 break
27501 }
27502 z := v_0.Args[0]
27503 if z.Op != OpARM64ADD {
27504 break
27505 }
27506 _ = z.Args[1]
27507 z_0 := z.Args[0]
27508 z_1 := z.Args[1]
27509 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27510 x := z_0
27511 y := z_1
27512 if !(z.Uses == 1) {
27513 continue
27514 }
27515 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
27516 v0.AddArg2(x, y)
27517 b.resetWithControl(BlockARM64EQ, v0)
27518 return true
27519 }
27520 break
27521 }
27522
27523
27524
27525 for b.Controls[0].Op == OpARM64CMP {
27526 v_0 := b.Controls[0]
27527 _ = v_0.Args[1]
27528 x := v_0.Args[0]
27529 z := v_0.Args[1]
27530 if z.Op != OpARM64NEG {
27531 break
27532 }
27533 y := z.Args[0]
27534 if !(z.Uses == 1) {
27535 break
27536 }
27537 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
27538 v0.AddArg2(x, y)
27539 b.resetWithControl(BlockARM64EQ, v0)
27540 return true
27541 }
27542
27543
27544
27545 for b.Controls[0].Op == OpARM64CMPW {
27546 v_0 := b.Controls[0]
27547 _ = v_0.Args[1]
27548 x := v_0.Args[0]
27549 z := v_0.Args[1]
27550 if z.Op != OpARM64NEG {
27551 break
27552 }
27553 y := z.Args[0]
27554 if !(z.Uses == 1) {
27555 break
27556 }
27557 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
27558 v0.AddArg2(x, y)
27559 b.resetWithControl(BlockARM64EQ, v0)
27560 return true
27561 }
27562
27563
27564 for b.Controls[0].Op == OpARM64CMPconst {
27565 v_0 := b.Controls[0]
27566 if auxIntToInt64(v_0.AuxInt) != 0 {
27567 break
27568 }
27569 x := v_0.Args[0]
27570 b.resetWithControl(BlockARM64Z, x)
27571 return true
27572 }
27573
27574
27575 for b.Controls[0].Op == OpARM64CMPWconst {
27576 v_0 := b.Controls[0]
27577 if auxIntToInt32(v_0.AuxInt) != 0 {
27578 break
27579 }
27580 x := v_0.Args[0]
27581 b.resetWithControl(BlockARM64ZW, x)
27582 return true
27583 }
27584
27585
27586
27587 for b.Controls[0].Op == OpARM64CMPconst {
27588 v_0 := b.Controls[0]
27589 if auxIntToInt64(v_0.AuxInt) != 0 {
27590 break
27591 }
27592 z := v_0.Args[0]
27593 if z.Op != OpARM64MADD {
27594 break
27595 }
27596 y := z.Args[2]
27597 a := z.Args[0]
27598 x := z.Args[1]
27599 if !(z.Uses == 1) {
27600 break
27601 }
27602 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
27603 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
27604 v1.AddArg2(x, y)
27605 v0.AddArg2(a, v1)
27606 b.resetWithControl(BlockARM64EQ, v0)
27607 return true
27608 }
27609
27610
27611
27612 for b.Controls[0].Op == OpARM64CMPconst {
27613 v_0 := b.Controls[0]
27614 if auxIntToInt64(v_0.AuxInt) != 0 {
27615 break
27616 }
27617 z := v_0.Args[0]
27618 if z.Op != OpARM64MSUB {
27619 break
27620 }
27621 y := z.Args[2]
27622 a := z.Args[0]
27623 x := z.Args[1]
27624 if !(z.Uses == 1) {
27625 break
27626 }
27627 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
27628 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
27629 v1.AddArg2(x, y)
27630 v0.AddArg2(a, v1)
27631 b.resetWithControl(BlockARM64EQ, v0)
27632 return true
27633 }
27634
27635
27636
27637 for b.Controls[0].Op == OpARM64CMPWconst {
27638 v_0 := b.Controls[0]
27639 if auxIntToInt32(v_0.AuxInt) != 0 {
27640 break
27641 }
27642 z := v_0.Args[0]
27643 if z.Op != OpARM64MADDW {
27644 break
27645 }
27646 y := z.Args[2]
27647 a := z.Args[0]
27648 x := z.Args[1]
27649 if !(z.Uses == 1) {
27650 break
27651 }
27652 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
27653 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
27654 v1.AddArg2(x, y)
27655 v0.AddArg2(a, v1)
27656 b.resetWithControl(BlockARM64EQ, v0)
27657 return true
27658 }
27659
27660
27661
27662 for b.Controls[0].Op == OpARM64CMPWconst {
27663 v_0 := b.Controls[0]
27664 if auxIntToInt32(v_0.AuxInt) != 0 {
27665 break
27666 }
27667 z := v_0.Args[0]
27668 if z.Op != OpARM64MSUBW {
27669 break
27670 }
27671 y := z.Args[2]
27672 a := z.Args[0]
27673 x := z.Args[1]
27674 if !(z.Uses == 1) {
27675 break
27676 }
27677 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
27678 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
27679 v1.AddArg2(x, y)
27680 v0.AddArg2(a, v1)
27681 b.resetWithControl(BlockARM64EQ, v0)
27682 return true
27683 }
27684
27685
27686
27687 for b.Controls[0].Op == OpARM64TSTconst {
27688 v_0 := b.Controls[0]
27689 c := auxIntToInt64(v_0.AuxInt)
27690 x := v_0.Args[0]
27691 if !(oneBit(c)) {
27692 break
27693 }
27694 b.resetWithControl(BlockARM64TBZ, x)
27695 b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
27696 return true
27697 }
27698
27699
27700
27701 for b.Controls[0].Op == OpARM64TSTWconst {
27702 v_0 := b.Controls[0]
27703 c := auxIntToInt32(v_0.AuxInt)
27704 x := v_0.Args[0]
27705 if !(oneBit(int64(uint32(c)))) {
27706 break
27707 }
27708 b.resetWithControl(BlockARM64TBZ, x)
27709 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
27710 return true
27711 }
27712
27713
27714
27715 for b.Controls[0].Op == OpARM64FlagConstant {
27716 v_0 := b.Controls[0]
27717 fc := auxIntToFlagConstant(v_0.AuxInt)
27718 if !(fc.eq()) {
27719 break
27720 }
27721 b.Reset(BlockFirst)
27722 return true
27723 }
27724
27725
27726
27727 for b.Controls[0].Op == OpARM64FlagConstant {
27728 v_0 := b.Controls[0]
27729 fc := auxIntToFlagConstant(v_0.AuxInt)
27730 if !(!fc.eq()) {
27731 break
27732 }
27733 b.Reset(BlockFirst)
27734 b.swapSuccessors()
27735 return true
27736 }
27737
27738
27739 for b.Controls[0].Op == OpARM64InvertFlags {
27740 v_0 := b.Controls[0]
27741 cmp := v_0.Args[0]
27742 b.resetWithControl(BlockARM64EQ, cmp)
27743 return true
27744 }
27745 case BlockARM64FGE:
27746
27747
27748 for b.Controls[0].Op == OpARM64InvertFlags {
27749 v_0 := b.Controls[0]
27750 cmp := v_0.Args[0]
27751 b.resetWithControl(BlockARM64FLE, cmp)
27752 return true
27753 }
27754 case BlockARM64FGT:
27755
27756
27757 for b.Controls[0].Op == OpARM64InvertFlags {
27758 v_0 := b.Controls[0]
27759 cmp := v_0.Args[0]
27760 b.resetWithControl(BlockARM64FLT, cmp)
27761 return true
27762 }
27763 case BlockARM64FLE:
27764
27765
27766 for b.Controls[0].Op == OpARM64InvertFlags {
27767 v_0 := b.Controls[0]
27768 cmp := v_0.Args[0]
27769 b.resetWithControl(BlockARM64FGE, cmp)
27770 return true
27771 }
27772 case BlockARM64FLT:
27773
27774
27775 for b.Controls[0].Op == OpARM64InvertFlags {
27776 v_0 := b.Controls[0]
27777 cmp := v_0.Args[0]
27778 b.resetWithControl(BlockARM64FGT, cmp)
27779 return true
27780 }
27781 case BlockARM64GE:
27782
27783
27784
27785 for b.Controls[0].Op == OpARM64CMPWconst {
27786 v_0 := b.Controls[0]
27787 if auxIntToInt32(v_0.AuxInt) != 0 {
27788 break
27789 }
27790 x := v_0.Args[0]
27791 if x.Op != OpARM64ANDconst {
27792 break
27793 }
27794 c := auxIntToInt64(x.AuxInt)
27795 y := x.Args[0]
27796 if !(x.Uses == 1) {
27797 break
27798 }
27799 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
27800 v0.AuxInt = int32ToAuxInt(int32(c))
27801 v0.AddArg(y)
27802 b.resetWithControl(BlockARM64GE, v0)
27803 return true
27804 }
27805
27806
27807
27808 for b.Controls[0].Op == OpARM64CMPconst {
27809 v_0 := b.Controls[0]
27810 if auxIntToInt64(v_0.AuxInt) != 0 {
27811 break
27812 }
27813 z := v_0.Args[0]
27814 if z.Op != OpARM64AND {
27815 break
27816 }
27817 _ = z.Args[1]
27818 z_0 := z.Args[0]
27819 z_1 := z.Args[1]
27820 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27821 x := z_0
27822 y := z_1
27823 if !(z.Uses == 1) {
27824 continue
27825 }
27826 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
27827 v0.AddArg2(x, y)
27828 b.resetWithControl(BlockARM64GE, v0)
27829 return true
27830 }
27831 break
27832 }
27833
27834
27835
27836 for b.Controls[0].Op == OpARM64CMPWconst {
27837 v_0 := b.Controls[0]
27838 if auxIntToInt32(v_0.AuxInt) != 0 {
27839 break
27840 }
27841 z := v_0.Args[0]
27842 if z.Op != OpARM64AND {
27843 break
27844 }
27845 _ = z.Args[1]
27846 z_0 := z.Args[0]
27847 z_1 := z.Args[1]
27848 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27849 x := z_0
27850 y := z_1
27851 if !(z.Uses == 1) {
27852 continue
27853 }
27854 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
27855 v0.AddArg2(x, y)
27856 b.resetWithControl(BlockARM64GE, v0)
27857 return true
27858 }
27859 break
27860 }
27861
27862
27863
27864 for b.Controls[0].Op == OpARM64CMPconst {
27865 v_0 := b.Controls[0]
27866 if auxIntToInt64(v_0.AuxInt) != 0 {
27867 break
27868 }
27869 x := v_0.Args[0]
27870 if x.Op != OpARM64ANDconst {
27871 break
27872 }
27873 c := auxIntToInt64(x.AuxInt)
27874 y := x.Args[0]
27875 if !(x.Uses == 1) {
27876 break
27877 }
27878 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
27879 v0.AuxInt = int64ToAuxInt(c)
27880 v0.AddArg(y)
27881 b.resetWithControl(BlockARM64GE, v0)
27882 return true
27883 }
27884
27885
27886
27887 for b.Controls[0].Op == OpARM64CMPconst {
27888 v_0 := b.Controls[0]
27889 if auxIntToInt64(v_0.AuxInt) != 0 {
27890 break
27891 }
27892 x := v_0.Args[0]
27893 if x.Op != OpARM64ADDconst {
27894 break
27895 }
27896 c := auxIntToInt64(x.AuxInt)
27897 y := x.Args[0]
27898 if !(x.Uses == 1) {
27899 break
27900 }
27901 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
27902 v0.AuxInt = int64ToAuxInt(c)
27903 v0.AddArg(y)
27904 b.resetWithControl(BlockARM64GEnoov, v0)
27905 return true
27906 }
27907
27908
27909
27910 for b.Controls[0].Op == OpARM64CMPWconst {
27911 v_0 := b.Controls[0]
27912 if auxIntToInt32(v_0.AuxInt) != 0 {
27913 break
27914 }
27915 x := v_0.Args[0]
27916 if x.Op != OpARM64ADDconst {
27917 break
27918 }
27919 c := auxIntToInt64(x.AuxInt)
27920 y := x.Args[0]
27921 if !(x.Uses == 1) {
27922 break
27923 }
27924 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
27925 v0.AuxInt = int32ToAuxInt(int32(c))
27926 v0.AddArg(y)
27927 b.resetWithControl(BlockARM64GEnoov, v0)
27928 return true
27929 }
27930
27931
27932
27933 for b.Controls[0].Op == OpARM64CMPconst {
27934 v_0 := b.Controls[0]
27935 if auxIntToInt64(v_0.AuxInt) != 0 {
27936 break
27937 }
27938 z := v_0.Args[0]
27939 if z.Op != OpARM64ADD {
27940 break
27941 }
27942 _ = z.Args[1]
27943 z_0 := z.Args[0]
27944 z_1 := z.Args[1]
27945 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27946 x := z_0
27947 y := z_1
27948 if !(z.Uses == 1) {
27949 continue
27950 }
27951 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
27952 v0.AddArg2(x, y)
27953 b.resetWithControl(BlockARM64GEnoov, v0)
27954 return true
27955 }
27956 break
27957 }
27958
27959
27960
27961 for b.Controls[0].Op == OpARM64CMPWconst {
27962 v_0 := b.Controls[0]
27963 if auxIntToInt32(v_0.AuxInt) != 0 {
27964 break
27965 }
27966 z := v_0.Args[0]
27967 if z.Op != OpARM64ADD {
27968 break
27969 }
27970 _ = z.Args[1]
27971 z_0 := z.Args[0]
27972 z_1 := z.Args[1]
27973 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
27974 x := z_0
27975 y := z_1
27976 if !(z.Uses == 1) {
27977 continue
27978 }
27979 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
27980 v0.AddArg2(x, y)
27981 b.resetWithControl(BlockARM64GEnoov, v0)
27982 return true
27983 }
27984 break
27985 }
27986
27987
27988
27989 for b.Controls[0].Op == OpARM64CMPconst {
27990 v_0 := b.Controls[0]
27991 if auxIntToInt64(v_0.AuxInt) != 0 {
27992 break
27993 }
27994 z := v_0.Args[0]
27995 if z.Op != OpARM64MADD {
27996 break
27997 }
27998 y := z.Args[2]
27999 a := z.Args[0]
28000 x := z.Args[1]
28001 if !(z.Uses == 1) {
28002 break
28003 }
28004 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
28005 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
28006 v1.AddArg2(x, y)
28007 v0.AddArg2(a, v1)
28008 b.resetWithControl(BlockARM64GEnoov, v0)
28009 return true
28010 }
28011
28012
28013
28014 for b.Controls[0].Op == OpARM64CMPconst {
28015 v_0 := b.Controls[0]
28016 if auxIntToInt64(v_0.AuxInt) != 0 {
28017 break
28018 }
28019 z := v_0.Args[0]
28020 if z.Op != OpARM64MSUB {
28021 break
28022 }
28023 y := z.Args[2]
28024 a := z.Args[0]
28025 x := z.Args[1]
28026 if !(z.Uses == 1) {
28027 break
28028 }
28029 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
28030 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
28031 v1.AddArg2(x, y)
28032 v0.AddArg2(a, v1)
28033 b.resetWithControl(BlockARM64GEnoov, v0)
28034 return true
28035 }
28036
28037
28038
28039 for b.Controls[0].Op == OpARM64CMPWconst {
28040 v_0 := b.Controls[0]
28041 if auxIntToInt32(v_0.AuxInt) != 0 {
28042 break
28043 }
28044 z := v_0.Args[0]
28045 if z.Op != OpARM64MADDW {
28046 break
28047 }
28048 y := z.Args[2]
28049 a := z.Args[0]
28050 x := z.Args[1]
28051 if !(z.Uses == 1) {
28052 break
28053 }
28054 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
28055 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
28056 v1.AddArg2(x, y)
28057 v0.AddArg2(a, v1)
28058 b.resetWithControl(BlockARM64GEnoov, v0)
28059 return true
28060 }
28061
28062
28063
28064 for b.Controls[0].Op == OpARM64CMPWconst {
28065 v_0 := b.Controls[0]
28066 if auxIntToInt32(v_0.AuxInt) != 0 {
28067 break
28068 }
28069 z := v_0.Args[0]
28070 if z.Op != OpARM64MSUBW {
28071 break
28072 }
28073 y := z.Args[2]
28074 a := z.Args[0]
28075 x := z.Args[1]
28076 if !(z.Uses == 1) {
28077 break
28078 }
28079 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
28080 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
28081 v1.AddArg2(x, y)
28082 v0.AddArg2(a, v1)
28083 b.resetWithControl(BlockARM64GEnoov, v0)
28084 return true
28085 }
28086
28087
28088 for b.Controls[0].Op == OpARM64CMPWconst {
28089 v_0 := b.Controls[0]
28090 if auxIntToInt32(v_0.AuxInt) != 0 {
28091 break
28092 }
28093 x := v_0.Args[0]
28094 b.resetWithControl(BlockARM64TBZ, x)
28095 b.AuxInt = int64ToAuxInt(31)
28096 return true
28097 }
28098
28099
28100 for b.Controls[0].Op == OpARM64CMPconst {
28101 v_0 := b.Controls[0]
28102 if auxIntToInt64(v_0.AuxInt) != 0 {
28103 break
28104 }
28105 x := v_0.Args[0]
28106 b.resetWithControl(BlockARM64TBZ, x)
28107 b.AuxInt = int64ToAuxInt(63)
28108 return true
28109 }
28110
28111
28112
28113 for b.Controls[0].Op == OpARM64FlagConstant {
28114 v_0 := b.Controls[0]
28115 fc := auxIntToFlagConstant(v_0.AuxInt)
28116 if !(fc.ge()) {
28117 break
28118 }
28119 b.Reset(BlockFirst)
28120 return true
28121 }
28122
28123
28124
28125 for b.Controls[0].Op == OpARM64FlagConstant {
28126 v_0 := b.Controls[0]
28127 fc := auxIntToFlagConstant(v_0.AuxInt)
28128 if !(!fc.ge()) {
28129 break
28130 }
28131 b.Reset(BlockFirst)
28132 b.swapSuccessors()
28133 return true
28134 }
28135
28136
28137 for b.Controls[0].Op == OpARM64InvertFlags {
28138 v_0 := b.Controls[0]
28139 cmp := v_0.Args[0]
28140 b.resetWithControl(BlockARM64LE, cmp)
28141 return true
28142 }
28143 case BlockARM64GEnoov:
28144
28145
28146
28147 for b.Controls[0].Op == OpARM64FlagConstant {
28148 v_0 := b.Controls[0]
28149 fc := auxIntToFlagConstant(v_0.AuxInt)
28150 if !(fc.geNoov()) {
28151 break
28152 }
28153 b.Reset(BlockFirst)
28154 return true
28155 }
28156
28157
28158
28159 for b.Controls[0].Op == OpARM64FlagConstant {
28160 v_0 := b.Controls[0]
28161 fc := auxIntToFlagConstant(v_0.AuxInt)
28162 if !(!fc.geNoov()) {
28163 break
28164 }
28165 b.Reset(BlockFirst)
28166 b.swapSuccessors()
28167 return true
28168 }
28169
28170
28171 for b.Controls[0].Op == OpARM64InvertFlags {
28172 v_0 := b.Controls[0]
28173 cmp := v_0.Args[0]
28174 b.resetWithControl(BlockARM64LEnoov, cmp)
28175 return true
28176 }
28177 case BlockARM64GT:
28178
28179
28180
28181 for b.Controls[0].Op == OpARM64CMPWconst {
28182 v_0 := b.Controls[0]
28183 if auxIntToInt32(v_0.AuxInt) != 0 {
28184 break
28185 }
28186 x := v_0.Args[0]
28187 if x.Op != OpARM64ANDconst {
28188 break
28189 }
28190 c := auxIntToInt64(x.AuxInt)
28191 y := x.Args[0]
28192 if !(x.Uses == 1) {
28193 break
28194 }
28195 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
28196 v0.AuxInt = int32ToAuxInt(int32(c))
28197 v0.AddArg(y)
28198 b.resetWithControl(BlockARM64GT, v0)
28199 return true
28200 }
28201
28202
28203
28204 for b.Controls[0].Op == OpARM64CMPconst {
28205 v_0 := b.Controls[0]
28206 if auxIntToInt64(v_0.AuxInt) != 0 {
28207 break
28208 }
28209 z := v_0.Args[0]
28210 if z.Op != OpARM64AND {
28211 break
28212 }
28213 _ = z.Args[1]
28214 z_0 := z.Args[0]
28215 z_1 := z.Args[1]
28216 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28217 x := z_0
28218 y := z_1
28219 if !(z.Uses == 1) {
28220 continue
28221 }
28222 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
28223 v0.AddArg2(x, y)
28224 b.resetWithControl(BlockARM64GT, v0)
28225 return true
28226 }
28227 break
28228 }
28229
28230
28231
28232 for b.Controls[0].Op == OpARM64CMPWconst {
28233 v_0 := b.Controls[0]
28234 if auxIntToInt32(v_0.AuxInt) != 0 {
28235 break
28236 }
28237 z := v_0.Args[0]
28238 if z.Op != OpARM64AND {
28239 break
28240 }
28241 _ = z.Args[1]
28242 z_0 := z.Args[0]
28243 z_1 := z.Args[1]
28244 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28245 x := z_0
28246 y := z_1
28247 if !(z.Uses == 1) {
28248 continue
28249 }
28250 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
28251 v0.AddArg2(x, y)
28252 b.resetWithControl(BlockARM64GT, v0)
28253 return true
28254 }
28255 break
28256 }
28257
28258
28259
28260 for b.Controls[0].Op == OpARM64CMPconst {
28261 v_0 := b.Controls[0]
28262 if auxIntToInt64(v_0.AuxInt) != 0 {
28263 break
28264 }
28265 x := v_0.Args[0]
28266 if x.Op != OpARM64ANDconst {
28267 break
28268 }
28269 c := auxIntToInt64(x.AuxInt)
28270 y := x.Args[0]
28271 if !(x.Uses == 1) {
28272 break
28273 }
28274 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
28275 v0.AuxInt = int64ToAuxInt(c)
28276 v0.AddArg(y)
28277 b.resetWithControl(BlockARM64GT, v0)
28278 return true
28279 }
28280
28281
28282
28283 for b.Controls[0].Op == OpARM64CMPconst {
28284 v_0 := b.Controls[0]
28285 if auxIntToInt64(v_0.AuxInt) != 0 {
28286 break
28287 }
28288 x := v_0.Args[0]
28289 if x.Op != OpARM64ADDconst {
28290 break
28291 }
28292 c := auxIntToInt64(x.AuxInt)
28293 y := x.Args[0]
28294 if !(x.Uses == 1) {
28295 break
28296 }
28297 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
28298 v0.AuxInt = int64ToAuxInt(c)
28299 v0.AddArg(y)
28300 b.resetWithControl(BlockARM64GTnoov, v0)
28301 return true
28302 }
28303
28304
28305
28306 for b.Controls[0].Op == OpARM64CMPWconst {
28307 v_0 := b.Controls[0]
28308 if auxIntToInt32(v_0.AuxInt) != 0 {
28309 break
28310 }
28311 x := v_0.Args[0]
28312 if x.Op != OpARM64ADDconst {
28313 break
28314 }
28315 c := auxIntToInt64(x.AuxInt)
28316 y := x.Args[0]
28317 if !(x.Uses == 1) {
28318 break
28319 }
28320 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
28321 v0.AuxInt = int32ToAuxInt(int32(c))
28322 v0.AddArg(y)
28323 b.resetWithControl(BlockARM64GTnoov, v0)
28324 return true
28325 }
28326
28327
28328
28329 for b.Controls[0].Op == OpARM64CMPconst {
28330 v_0 := b.Controls[0]
28331 if auxIntToInt64(v_0.AuxInt) != 0 {
28332 break
28333 }
28334 z := v_0.Args[0]
28335 if z.Op != OpARM64ADD {
28336 break
28337 }
28338 _ = z.Args[1]
28339 z_0 := z.Args[0]
28340 z_1 := z.Args[1]
28341 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28342 x := z_0
28343 y := z_1
28344 if !(z.Uses == 1) {
28345 continue
28346 }
28347 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
28348 v0.AddArg2(x, y)
28349 b.resetWithControl(BlockARM64GTnoov, v0)
28350 return true
28351 }
28352 break
28353 }
28354
28355
28356
28357 for b.Controls[0].Op == OpARM64CMPWconst {
28358 v_0 := b.Controls[0]
28359 if auxIntToInt32(v_0.AuxInt) != 0 {
28360 break
28361 }
28362 z := v_0.Args[0]
28363 if z.Op != OpARM64ADD {
28364 break
28365 }
28366 _ = z.Args[1]
28367 z_0 := z.Args[0]
28368 z_1 := z.Args[1]
28369 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28370 x := z_0
28371 y := z_1
28372 if !(z.Uses == 1) {
28373 continue
28374 }
28375 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
28376 v0.AddArg2(x, y)
28377 b.resetWithControl(BlockARM64GTnoov, v0)
28378 return true
28379 }
28380 break
28381 }
28382
28383
28384
28385 for b.Controls[0].Op == OpARM64CMPconst {
28386 v_0 := b.Controls[0]
28387 if auxIntToInt64(v_0.AuxInt) != 0 {
28388 break
28389 }
28390 z := v_0.Args[0]
28391 if z.Op != OpARM64MADD {
28392 break
28393 }
28394 y := z.Args[2]
28395 a := z.Args[0]
28396 x := z.Args[1]
28397 if !(z.Uses == 1) {
28398 break
28399 }
28400 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
28401 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
28402 v1.AddArg2(x, y)
28403 v0.AddArg2(a, v1)
28404 b.resetWithControl(BlockARM64GTnoov, v0)
28405 return true
28406 }
28407
28408
28409
28410 for b.Controls[0].Op == OpARM64CMPconst {
28411 v_0 := b.Controls[0]
28412 if auxIntToInt64(v_0.AuxInt) != 0 {
28413 break
28414 }
28415 z := v_0.Args[0]
28416 if z.Op != OpARM64MSUB {
28417 break
28418 }
28419 y := z.Args[2]
28420 a := z.Args[0]
28421 x := z.Args[1]
28422 if !(z.Uses == 1) {
28423 break
28424 }
28425 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
28426 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
28427 v1.AddArg2(x, y)
28428 v0.AddArg2(a, v1)
28429 b.resetWithControl(BlockARM64GTnoov, v0)
28430 return true
28431 }
28432
28433
28434
28435 for b.Controls[0].Op == OpARM64CMPWconst {
28436 v_0 := b.Controls[0]
28437 if auxIntToInt32(v_0.AuxInt) != 0 {
28438 break
28439 }
28440 z := v_0.Args[0]
28441 if z.Op != OpARM64MADDW {
28442 break
28443 }
28444 y := z.Args[2]
28445 a := z.Args[0]
28446 x := z.Args[1]
28447 if !(z.Uses == 1) {
28448 break
28449 }
28450 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
28451 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
28452 v1.AddArg2(x, y)
28453 v0.AddArg2(a, v1)
28454 b.resetWithControl(BlockARM64GTnoov, v0)
28455 return true
28456 }
28457
28458
28459
28460 for b.Controls[0].Op == OpARM64CMPWconst {
28461 v_0 := b.Controls[0]
28462 if auxIntToInt32(v_0.AuxInt) != 0 {
28463 break
28464 }
28465 z := v_0.Args[0]
28466 if z.Op != OpARM64MSUBW {
28467 break
28468 }
28469 y := z.Args[2]
28470 a := z.Args[0]
28471 x := z.Args[1]
28472 if !(z.Uses == 1) {
28473 break
28474 }
28475 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
28476 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
28477 v1.AddArg2(x, y)
28478 v0.AddArg2(a, v1)
28479 b.resetWithControl(BlockARM64GTnoov, v0)
28480 return true
28481 }
28482
28483
28484
28485 for b.Controls[0].Op == OpARM64FlagConstant {
28486 v_0 := b.Controls[0]
28487 fc := auxIntToFlagConstant(v_0.AuxInt)
28488 if !(fc.gt()) {
28489 break
28490 }
28491 b.Reset(BlockFirst)
28492 return true
28493 }
28494
28495
28496
28497 for b.Controls[0].Op == OpARM64FlagConstant {
28498 v_0 := b.Controls[0]
28499 fc := auxIntToFlagConstant(v_0.AuxInt)
28500 if !(!fc.gt()) {
28501 break
28502 }
28503 b.Reset(BlockFirst)
28504 b.swapSuccessors()
28505 return true
28506 }
28507
28508
28509 for b.Controls[0].Op == OpARM64InvertFlags {
28510 v_0 := b.Controls[0]
28511 cmp := v_0.Args[0]
28512 b.resetWithControl(BlockARM64LT, cmp)
28513 return true
28514 }
28515 case BlockARM64GTnoov:
28516
28517
28518
28519 for b.Controls[0].Op == OpARM64FlagConstant {
28520 v_0 := b.Controls[0]
28521 fc := auxIntToFlagConstant(v_0.AuxInt)
28522 if !(fc.gtNoov()) {
28523 break
28524 }
28525 b.Reset(BlockFirst)
28526 return true
28527 }
28528
28529
28530
28531 for b.Controls[0].Op == OpARM64FlagConstant {
28532 v_0 := b.Controls[0]
28533 fc := auxIntToFlagConstant(v_0.AuxInt)
28534 if !(!fc.gtNoov()) {
28535 break
28536 }
28537 b.Reset(BlockFirst)
28538 b.swapSuccessors()
28539 return true
28540 }
28541
28542
28543 for b.Controls[0].Op == OpARM64InvertFlags {
28544 v_0 := b.Controls[0]
28545 cmp := v_0.Args[0]
28546 b.resetWithControl(BlockARM64LTnoov, cmp)
28547 return true
28548 }
28549 case BlockIf:
28550
28551
28552 for b.Controls[0].Op == OpARM64Equal {
28553 v_0 := b.Controls[0]
28554 cc := v_0.Args[0]
28555 b.resetWithControl(BlockARM64EQ, cc)
28556 return true
28557 }
28558
28559
28560 for b.Controls[0].Op == OpARM64NotEqual {
28561 v_0 := b.Controls[0]
28562 cc := v_0.Args[0]
28563 b.resetWithControl(BlockARM64NE, cc)
28564 return true
28565 }
28566
28567
28568 for b.Controls[0].Op == OpARM64LessThan {
28569 v_0 := b.Controls[0]
28570 cc := v_0.Args[0]
28571 b.resetWithControl(BlockARM64LT, cc)
28572 return true
28573 }
28574
28575
28576 for b.Controls[0].Op == OpARM64LessThanU {
28577 v_0 := b.Controls[0]
28578 cc := v_0.Args[0]
28579 b.resetWithControl(BlockARM64ULT, cc)
28580 return true
28581 }
28582
28583
28584 for b.Controls[0].Op == OpARM64LessEqual {
28585 v_0 := b.Controls[0]
28586 cc := v_0.Args[0]
28587 b.resetWithControl(BlockARM64LE, cc)
28588 return true
28589 }
28590
28591
28592 for b.Controls[0].Op == OpARM64LessEqualU {
28593 v_0 := b.Controls[0]
28594 cc := v_0.Args[0]
28595 b.resetWithControl(BlockARM64ULE, cc)
28596 return true
28597 }
28598
28599
28600 for b.Controls[0].Op == OpARM64GreaterThan {
28601 v_0 := b.Controls[0]
28602 cc := v_0.Args[0]
28603 b.resetWithControl(BlockARM64GT, cc)
28604 return true
28605 }
28606
28607
28608 for b.Controls[0].Op == OpARM64GreaterThanU {
28609 v_0 := b.Controls[0]
28610 cc := v_0.Args[0]
28611 b.resetWithControl(BlockARM64UGT, cc)
28612 return true
28613 }
28614
28615
28616 for b.Controls[0].Op == OpARM64GreaterEqual {
28617 v_0 := b.Controls[0]
28618 cc := v_0.Args[0]
28619 b.resetWithControl(BlockARM64GE, cc)
28620 return true
28621 }
28622
28623
28624 for b.Controls[0].Op == OpARM64GreaterEqualU {
28625 v_0 := b.Controls[0]
28626 cc := v_0.Args[0]
28627 b.resetWithControl(BlockARM64UGE, cc)
28628 return true
28629 }
28630
28631
28632 for b.Controls[0].Op == OpARM64LessThanF {
28633 v_0 := b.Controls[0]
28634 cc := v_0.Args[0]
28635 b.resetWithControl(BlockARM64FLT, cc)
28636 return true
28637 }
28638
28639
28640 for b.Controls[0].Op == OpARM64LessEqualF {
28641 v_0 := b.Controls[0]
28642 cc := v_0.Args[0]
28643 b.resetWithControl(BlockARM64FLE, cc)
28644 return true
28645 }
28646
28647
28648 for b.Controls[0].Op == OpARM64GreaterThanF {
28649 v_0 := b.Controls[0]
28650 cc := v_0.Args[0]
28651 b.resetWithControl(BlockARM64FGT, cc)
28652 return true
28653 }
28654
28655
28656 for b.Controls[0].Op == OpARM64GreaterEqualF {
28657 v_0 := b.Controls[0]
28658 cc := v_0.Args[0]
28659 b.resetWithControl(BlockARM64FGE, cc)
28660 return true
28661 }
28662
28663
28664 for {
28665 cond := b.Controls[0]
28666 b.resetWithControl(BlockARM64NZ, cond)
28667 return true
28668 }
28669 case BlockARM64LE:
28670
28671
28672
28673 for b.Controls[0].Op == OpARM64CMPWconst {
28674 v_0 := b.Controls[0]
28675 if auxIntToInt32(v_0.AuxInt) != 0 {
28676 break
28677 }
28678 x := v_0.Args[0]
28679 if x.Op != OpARM64ANDconst {
28680 break
28681 }
28682 c := auxIntToInt64(x.AuxInt)
28683 y := x.Args[0]
28684 if !(x.Uses == 1) {
28685 break
28686 }
28687 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
28688 v0.AuxInt = int32ToAuxInt(int32(c))
28689 v0.AddArg(y)
28690 b.resetWithControl(BlockARM64LE, v0)
28691 return true
28692 }
28693
28694
28695
28696 for b.Controls[0].Op == OpARM64CMPconst {
28697 v_0 := b.Controls[0]
28698 if auxIntToInt64(v_0.AuxInt) != 0 {
28699 break
28700 }
28701 z := v_0.Args[0]
28702 if z.Op != OpARM64AND {
28703 break
28704 }
28705 _ = z.Args[1]
28706 z_0 := z.Args[0]
28707 z_1 := z.Args[1]
28708 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28709 x := z_0
28710 y := z_1
28711 if !(z.Uses == 1) {
28712 continue
28713 }
28714 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
28715 v0.AddArg2(x, y)
28716 b.resetWithControl(BlockARM64LE, v0)
28717 return true
28718 }
28719 break
28720 }
28721
28722
28723
28724 for b.Controls[0].Op == OpARM64CMPWconst {
28725 v_0 := b.Controls[0]
28726 if auxIntToInt32(v_0.AuxInt) != 0 {
28727 break
28728 }
28729 z := v_0.Args[0]
28730 if z.Op != OpARM64AND {
28731 break
28732 }
28733 _ = z.Args[1]
28734 z_0 := z.Args[0]
28735 z_1 := z.Args[1]
28736 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28737 x := z_0
28738 y := z_1
28739 if !(z.Uses == 1) {
28740 continue
28741 }
28742 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
28743 v0.AddArg2(x, y)
28744 b.resetWithControl(BlockARM64LE, v0)
28745 return true
28746 }
28747 break
28748 }
28749
28750
28751
28752 for b.Controls[0].Op == OpARM64CMPconst {
28753 v_0 := b.Controls[0]
28754 if auxIntToInt64(v_0.AuxInt) != 0 {
28755 break
28756 }
28757 x := v_0.Args[0]
28758 if x.Op != OpARM64ANDconst {
28759 break
28760 }
28761 c := auxIntToInt64(x.AuxInt)
28762 y := x.Args[0]
28763 if !(x.Uses == 1) {
28764 break
28765 }
28766 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
28767 v0.AuxInt = int64ToAuxInt(c)
28768 v0.AddArg(y)
28769 b.resetWithControl(BlockARM64LE, v0)
28770 return true
28771 }
28772
28773
28774
28775 for b.Controls[0].Op == OpARM64CMPconst {
28776 v_0 := b.Controls[0]
28777 if auxIntToInt64(v_0.AuxInt) != 0 {
28778 break
28779 }
28780 x := v_0.Args[0]
28781 if x.Op != OpARM64ADDconst {
28782 break
28783 }
28784 c := auxIntToInt64(x.AuxInt)
28785 y := x.Args[0]
28786 if !(x.Uses == 1) {
28787 break
28788 }
28789 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
28790 v0.AuxInt = int64ToAuxInt(c)
28791 v0.AddArg(y)
28792 b.resetWithControl(BlockARM64LEnoov, v0)
28793 return true
28794 }
28795
28796
28797
28798 for b.Controls[0].Op == OpARM64CMPWconst {
28799 v_0 := b.Controls[0]
28800 if auxIntToInt32(v_0.AuxInt) != 0 {
28801 break
28802 }
28803 x := v_0.Args[0]
28804 if x.Op != OpARM64ADDconst {
28805 break
28806 }
28807 c := auxIntToInt64(x.AuxInt)
28808 y := x.Args[0]
28809 if !(x.Uses == 1) {
28810 break
28811 }
28812 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
28813 v0.AuxInt = int32ToAuxInt(int32(c))
28814 v0.AddArg(y)
28815 b.resetWithControl(BlockARM64LEnoov, v0)
28816 return true
28817 }
28818
28819
28820
28821 for b.Controls[0].Op == OpARM64CMPconst {
28822 v_0 := b.Controls[0]
28823 if auxIntToInt64(v_0.AuxInt) != 0 {
28824 break
28825 }
28826 z := v_0.Args[0]
28827 if z.Op != OpARM64ADD {
28828 break
28829 }
28830 _ = z.Args[1]
28831 z_0 := z.Args[0]
28832 z_1 := z.Args[1]
28833 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28834 x := z_0
28835 y := z_1
28836 if !(z.Uses == 1) {
28837 continue
28838 }
28839 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
28840 v0.AddArg2(x, y)
28841 b.resetWithControl(BlockARM64LEnoov, v0)
28842 return true
28843 }
28844 break
28845 }
28846
28847
28848
28849 for b.Controls[0].Op == OpARM64CMPWconst {
28850 v_0 := b.Controls[0]
28851 if auxIntToInt32(v_0.AuxInt) != 0 {
28852 break
28853 }
28854 z := v_0.Args[0]
28855 if z.Op != OpARM64ADD {
28856 break
28857 }
28858 _ = z.Args[1]
28859 z_0 := z.Args[0]
28860 z_1 := z.Args[1]
28861 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
28862 x := z_0
28863 y := z_1
28864 if !(z.Uses == 1) {
28865 continue
28866 }
28867 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
28868 v0.AddArg2(x, y)
28869 b.resetWithControl(BlockARM64LEnoov, v0)
28870 return true
28871 }
28872 break
28873 }
28874
28875
28876
28877 for b.Controls[0].Op == OpARM64CMPconst {
28878 v_0 := b.Controls[0]
28879 if auxIntToInt64(v_0.AuxInt) != 0 {
28880 break
28881 }
28882 z := v_0.Args[0]
28883 if z.Op != OpARM64MADD {
28884 break
28885 }
28886 y := z.Args[2]
28887 a := z.Args[0]
28888 x := z.Args[1]
28889 if !(z.Uses == 1) {
28890 break
28891 }
28892 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
28893 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
28894 v1.AddArg2(x, y)
28895 v0.AddArg2(a, v1)
28896 b.resetWithControl(BlockARM64LEnoov, v0)
28897 return true
28898 }
28899
28900
28901
28902 for b.Controls[0].Op == OpARM64CMPconst {
28903 v_0 := b.Controls[0]
28904 if auxIntToInt64(v_0.AuxInt) != 0 {
28905 break
28906 }
28907 z := v_0.Args[0]
28908 if z.Op != OpARM64MSUB {
28909 break
28910 }
28911 y := z.Args[2]
28912 a := z.Args[0]
28913 x := z.Args[1]
28914 if !(z.Uses == 1) {
28915 break
28916 }
28917 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
28918 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
28919 v1.AddArg2(x, y)
28920 v0.AddArg2(a, v1)
28921 b.resetWithControl(BlockARM64LEnoov, v0)
28922 return true
28923 }
28924
28925
28926
28927 for b.Controls[0].Op == OpARM64CMPWconst {
28928 v_0 := b.Controls[0]
28929 if auxIntToInt32(v_0.AuxInt) != 0 {
28930 break
28931 }
28932 z := v_0.Args[0]
28933 if z.Op != OpARM64MADDW {
28934 break
28935 }
28936 y := z.Args[2]
28937 a := z.Args[0]
28938 x := z.Args[1]
28939 if !(z.Uses == 1) {
28940 break
28941 }
28942 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
28943 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
28944 v1.AddArg2(x, y)
28945 v0.AddArg2(a, v1)
28946 b.resetWithControl(BlockARM64LEnoov, v0)
28947 return true
28948 }
28949
28950
28951
28952 for b.Controls[0].Op == OpARM64CMPWconst {
28953 v_0 := b.Controls[0]
28954 if auxIntToInt32(v_0.AuxInt) != 0 {
28955 break
28956 }
28957 z := v_0.Args[0]
28958 if z.Op != OpARM64MSUBW {
28959 break
28960 }
28961 y := z.Args[2]
28962 a := z.Args[0]
28963 x := z.Args[1]
28964 if !(z.Uses == 1) {
28965 break
28966 }
28967 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
28968 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
28969 v1.AddArg2(x, y)
28970 v0.AddArg2(a, v1)
28971 b.resetWithControl(BlockARM64LEnoov, v0)
28972 return true
28973 }
28974
28975
28976
28977 for b.Controls[0].Op == OpARM64FlagConstant {
28978 v_0 := b.Controls[0]
28979 fc := auxIntToFlagConstant(v_0.AuxInt)
28980 if !(fc.le()) {
28981 break
28982 }
28983 b.Reset(BlockFirst)
28984 return true
28985 }
28986
28987
28988
28989 for b.Controls[0].Op == OpARM64FlagConstant {
28990 v_0 := b.Controls[0]
28991 fc := auxIntToFlagConstant(v_0.AuxInt)
28992 if !(!fc.le()) {
28993 break
28994 }
28995 b.Reset(BlockFirst)
28996 b.swapSuccessors()
28997 return true
28998 }
28999
29000
29001 for b.Controls[0].Op == OpARM64InvertFlags {
29002 v_0 := b.Controls[0]
29003 cmp := v_0.Args[0]
29004 b.resetWithControl(BlockARM64GE, cmp)
29005 return true
29006 }
29007 case BlockARM64LEnoov:
29008
29009
29010
29011 for b.Controls[0].Op == OpARM64FlagConstant {
29012 v_0 := b.Controls[0]
29013 fc := auxIntToFlagConstant(v_0.AuxInt)
29014 if !(fc.leNoov()) {
29015 break
29016 }
29017 b.Reset(BlockFirst)
29018 return true
29019 }
29020
29021
29022
29023 for b.Controls[0].Op == OpARM64FlagConstant {
29024 v_0 := b.Controls[0]
29025 fc := auxIntToFlagConstant(v_0.AuxInt)
29026 if !(!fc.leNoov()) {
29027 break
29028 }
29029 b.Reset(BlockFirst)
29030 b.swapSuccessors()
29031 return true
29032 }
29033
29034
29035 for b.Controls[0].Op == OpARM64InvertFlags {
29036 v_0 := b.Controls[0]
29037 cmp := v_0.Args[0]
29038 b.resetWithControl(BlockARM64GEnoov, cmp)
29039 return true
29040 }
29041 case BlockARM64LT:
29042
29043
29044
29045 for b.Controls[0].Op == OpARM64CMPWconst {
29046 v_0 := b.Controls[0]
29047 if auxIntToInt32(v_0.AuxInt) != 0 {
29048 break
29049 }
29050 x := v_0.Args[0]
29051 if x.Op != OpARM64ANDconst {
29052 break
29053 }
29054 c := auxIntToInt64(x.AuxInt)
29055 y := x.Args[0]
29056 if !(x.Uses == 1) {
29057 break
29058 }
29059 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
29060 v0.AuxInt = int32ToAuxInt(int32(c))
29061 v0.AddArg(y)
29062 b.resetWithControl(BlockARM64LT, v0)
29063 return true
29064 }
29065
29066
29067
29068 for b.Controls[0].Op == OpARM64CMPconst {
29069 v_0 := b.Controls[0]
29070 if auxIntToInt64(v_0.AuxInt) != 0 {
29071 break
29072 }
29073 z := v_0.Args[0]
29074 if z.Op != OpARM64AND {
29075 break
29076 }
29077 _ = z.Args[1]
29078 z_0 := z.Args[0]
29079 z_1 := z.Args[1]
29080 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29081 x := z_0
29082 y := z_1
29083 if !(z.Uses == 1) {
29084 continue
29085 }
29086 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
29087 v0.AddArg2(x, y)
29088 b.resetWithControl(BlockARM64LT, v0)
29089 return true
29090 }
29091 break
29092 }
29093
29094
29095
29096 for b.Controls[0].Op == OpARM64CMPWconst {
29097 v_0 := b.Controls[0]
29098 if auxIntToInt32(v_0.AuxInt) != 0 {
29099 break
29100 }
29101 z := v_0.Args[0]
29102 if z.Op != OpARM64AND {
29103 break
29104 }
29105 _ = z.Args[1]
29106 z_0 := z.Args[0]
29107 z_1 := z.Args[1]
29108 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29109 x := z_0
29110 y := z_1
29111 if !(z.Uses == 1) {
29112 continue
29113 }
29114 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
29115 v0.AddArg2(x, y)
29116 b.resetWithControl(BlockARM64LT, v0)
29117 return true
29118 }
29119 break
29120 }
29121
29122
29123
29124 for b.Controls[0].Op == OpARM64CMPconst {
29125 v_0 := b.Controls[0]
29126 if auxIntToInt64(v_0.AuxInt) != 0 {
29127 break
29128 }
29129 x := v_0.Args[0]
29130 if x.Op != OpARM64ANDconst {
29131 break
29132 }
29133 c := auxIntToInt64(x.AuxInt)
29134 y := x.Args[0]
29135 if !(x.Uses == 1) {
29136 break
29137 }
29138 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
29139 v0.AuxInt = int64ToAuxInt(c)
29140 v0.AddArg(y)
29141 b.resetWithControl(BlockARM64LT, v0)
29142 return true
29143 }
29144
29145
29146
29147 for b.Controls[0].Op == OpARM64CMPconst {
29148 v_0 := b.Controls[0]
29149 if auxIntToInt64(v_0.AuxInt) != 0 {
29150 break
29151 }
29152 x := v_0.Args[0]
29153 if x.Op != OpARM64ADDconst {
29154 break
29155 }
29156 c := auxIntToInt64(x.AuxInt)
29157 y := x.Args[0]
29158 if !(x.Uses == 1) {
29159 break
29160 }
29161 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
29162 v0.AuxInt = int64ToAuxInt(c)
29163 v0.AddArg(y)
29164 b.resetWithControl(BlockARM64LTnoov, v0)
29165 return true
29166 }
29167
29168
29169
29170 for b.Controls[0].Op == OpARM64CMPWconst {
29171 v_0 := b.Controls[0]
29172 if auxIntToInt32(v_0.AuxInt) != 0 {
29173 break
29174 }
29175 x := v_0.Args[0]
29176 if x.Op != OpARM64ADDconst {
29177 break
29178 }
29179 c := auxIntToInt64(x.AuxInt)
29180 y := x.Args[0]
29181 if !(x.Uses == 1) {
29182 break
29183 }
29184 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
29185 v0.AuxInt = int32ToAuxInt(int32(c))
29186 v0.AddArg(y)
29187 b.resetWithControl(BlockARM64LTnoov, v0)
29188 return true
29189 }
29190
29191
29192
29193 for b.Controls[0].Op == OpARM64CMPconst {
29194 v_0 := b.Controls[0]
29195 if auxIntToInt64(v_0.AuxInt) != 0 {
29196 break
29197 }
29198 z := v_0.Args[0]
29199 if z.Op != OpARM64ADD {
29200 break
29201 }
29202 _ = z.Args[1]
29203 z_0 := z.Args[0]
29204 z_1 := z.Args[1]
29205 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29206 x := z_0
29207 y := z_1
29208 if !(z.Uses == 1) {
29209 continue
29210 }
29211 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
29212 v0.AddArg2(x, y)
29213 b.resetWithControl(BlockARM64LTnoov, v0)
29214 return true
29215 }
29216 break
29217 }
29218
29219
29220
29221 for b.Controls[0].Op == OpARM64CMPWconst {
29222 v_0 := b.Controls[0]
29223 if auxIntToInt32(v_0.AuxInt) != 0 {
29224 break
29225 }
29226 z := v_0.Args[0]
29227 if z.Op != OpARM64ADD {
29228 break
29229 }
29230 _ = z.Args[1]
29231 z_0 := z.Args[0]
29232 z_1 := z.Args[1]
29233 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29234 x := z_0
29235 y := z_1
29236 if !(z.Uses == 1) {
29237 continue
29238 }
29239 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
29240 v0.AddArg2(x, y)
29241 b.resetWithControl(BlockARM64LTnoov, v0)
29242 return true
29243 }
29244 break
29245 }
29246
29247
29248
29249 for b.Controls[0].Op == OpARM64CMPconst {
29250 v_0 := b.Controls[0]
29251 if auxIntToInt64(v_0.AuxInt) != 0 {
29252 break
29253 }
29254 z := v_0.Args[0]
29255 if z.Op != OpARM64MADD {
29256 break
29257 }
29258 y := z.Args[2]
29259 a := z.Args[0]
29260 x := z.Args[1]
29261 if !(z.Uses == 1) {
29262 break
29263 }
29264 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
29265 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
29266 v1.AddArg2(x, y)
29267 v0.AddArg2(a, v1)
29268 b.resetWithControl(BlockARM64LTnoov, v0)
29269 return true
29270 }
29271
29272
29273
29274 for b.Controls[0].Op == OpARM64CMPconst {
29275 v_0 := b.Controls[0]
29276 if auxIntToInt64(v_0.AuxInt) != 0 {
29277 break
29278 }
29279 z := v_0.Args[0]
29280 if z.Op != OpARM64MSUB {
29281 break
29282 }
29283 y := z.Args[2]
29284 a := z.Args[0]
29285 x := z.Args[1]
29286 if !(z.Uses == 1) {
29287 break
29288 }
29289 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
29290 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
29291 v1.AddArg2(x, y)
29292 v0.AddArg2(a, v1)
29293 b.resetWithControl(BlockARM64LTnoov, v0)
29294 return true
29295 }
29296
29297
29298
29299 for b.Controls[0].Op == OpARM64CMPWconst {
29300 v_0 := b.Controls[0]
29301 if auxIntToInt32(v_0.AuxInt) != 0 {
29302 break
29303 }
29304 z := v_0.Args[0]
29305 if z.Op != OpARM64MADDW {
29306 break
29307 }
29308 y := z.Args[2]
29309 a := z.Args[0]
29310 x := z.Args[1]
29311 if !(z.Uses == 1) {
29312 break
29313 }
29314 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
29315 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
29316 v1.AddArg2(x, y)
29317 v0.AddArg2(a, v1)
29318 b.resetWithControl(BlockARM64LTnoov, v0)
29319 return true
29320 }
29321
29322
29323
29324 for b.Controls[0].Op == OpARM64CMPWconst {
29325 v_0 := b.Controls[0]
29326 if auxIntToInt32(v_0.AuxInt) != 0 {
29327 break
29328 }
29329 z := v_0.Args[0]
29330 if z.Op != OpARM64MSUBW {
29331 break
29332 }
29333 y := z.Args[2]
29334 a := z.Args[0]
29335 x := z.Args[1]
29336 if !(z.Uses == 1) {
29337 break
29338 }
29339 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
29340 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
29341 v1.AddArg2(x, y)
29342 v0.AddArg2(a, v1)
29343 b.resetWithControl(BlockARM64LTnoov, v0)
29344 return true
29345 }
29346
29347
29348 for b.Controls[0].Op == OpARM64CMPWconst {
29349 v_0 := b.Controls[0]
29350 if auxIntToInt32(v_0.AuxInt) != 0 {
29351 break
29352 }
29353 x := v_0.Args[0]
29354 b.resetWithControl(BlockARM64TBNZ, x)
29355 b.AuxInt = int64ToAuxInt(31)
29356 return true
29357 }
29358
29359
29360 for b.Controls[0].Op == OpARM64CMPconst {
29361 v_0 := b.Controls[0]
29362 if auxIntToInt64(v_0.AuxInt) != 0 {
29363 break
29364 }
29365 x := v_0.Args[0]
29366 b.resetWithControl(BlockARM64TBNZ, x)
29367 b.AuxInt = int64ToAuxInt(63)
29368 return true
29369 }
29370
29371
29372
29373 for b.Controls[0].Op == OpARM64FlagConstant {
29374 v_0 := b.Controls[0]
29375 fc := auxIntToFlagConstant(v_0.AuxInt)
29376 if !(fc.lt()) {
29377 break
29378 }
29379 b.Reset(BlockFirst)
29380 return true
29381 }
29382
29383
29384
29385 for b.Controls[0].Op == OpARM64FlagConstant {
29386 v_0 := b.Controls[0]
29387 fc := auxIntToFlagConstant(v_0.AuxInt)
29388 if !(!fc.lt()) {
29389 break
29390 }
29391 b.Reset(BlockFirst)
29392 b.swapSuccessors()
29393 return true
29394 }
29395
29396
29397 for b.Controls[0].Op == OpARM64InvertFlags {
29398 v_0 := b.Controls[0]
29399 cmp := v_0.Args[0]
29400 b.resetWithControl(BlockARM64GT, cmp)
29401 return true
29402 }
29403 case BlockARM64LTnoov:
29404
29405
29406
29407 for b.Controls[0].Op == OpARM64FlagConstant {
29408 v_0 := b.Controls[0]
29409 fc := auxIntToFlagConstant(v_0.AuxInt)
29410 if !(fc.ltNoov()) {
29411 break
29412 }
29413 b.Reset(BlockFirst)
29414 return true
29415 }
29416
29417
29418
29419 for b.Controls[0].Op == OpARM64FlagConstant {
29420 v_0 := b.Controls[0]
29421 fc := auxIntToFlagConstant(v_0.AuxInt)
29422 if !(!fc.ltNoov()) {
29423 break
29424 }
29425 b.Reset(BlockFirst)
29426 b.swapSuccessors()
29427 return true
29428 }
29429
29430
29431 for b.Controls[0].Op == OpARM64InvertFlags {
29432 v_0 := b.Controls[0]
29433 cmp := v_0.Args[0]
29434 b.resetWithControl(BlockARM64GTnoov, cmp)
29435 return true
29436 }
29437 case BlockARM64NE:
29438
29439
29440
29441 for b.Controls[0].Op == OpARM64CMPWconst {
29442 v_0 := b.Controls[0]
29443 if auxIntToInt32(v_0.AuxInt) != 0 {
29444 break
29445 }
29446 x := v_0.Args[0]
29447 if x.Op != OpARM64ANDconst {
29448 break
29449 }
29450 c := auxIntToInt64(x.AuxInt)
29451 y := x.Args[0]
29452 if !(x.Uses == 1) {
29453 break
29454 }
29455 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags)
29456 v0.AuxInt = int32ToAuxInt(int32(c))
29457 v0.AddArg(y)
29458 b.resetWithControl(BlockARM64NE, v0)
29459 return true
29460 }
29461
29462
29463
29464 for b.Controls[0].Op == OpARM64CMPconst {
29465 v_0 := b.Controls[0]
29466 if auxIntToInt64(v_0.AuxInt) != 0 {
29467 break
29468 }
29469 z := v_0.Args[0]
29470 if z.Op != OpARM64AND {
29471 break
29472 }
29473 _ = z.Args[1]
29474 z_0 := z.Args[0]
29475 z_1 := z.Args[1]
29476 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29477 x := z_0
29478 y := z_1
29479 if !(z.Uses == 1) {
29480 continue
29481 }
29482 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags)
29483 v0.AddArg2(x, y)
29484 b.resetWithControl(BlockARM64NE, v0)
29485 return true
29486 }
29487 break
29488 }
29489
29490
29491
29492 for b.Controls[0].Op == OpARM64CMPWconst {
29493 v_0 := b.Controls[0]
29494 if auxIntToInt32(v_0.AuxInt) != 0 {
29495 break
29496 }
29497 z := v_0.Args[0]
29498 if z.Op != OpARM64AND {
29499 break
29500 }
29501 _ = z.Args[1]
29502 z_0 := z.Args[0]
29503 z_1 := z.Args[1]
29504 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29505 x := z_0
29506 y := z_1
29507 if !(z.Uses == 1) {
29508 continue
29509 }
29510 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags)
29511 v0.AddArg2(x, y)
29512 b.resetWithControl(BlockARM64NE, v0)
29513 return true
29514 }
29515 break
29516 }
29517
29518
29519
29520 for b.Controls[0].Op == OpARM64CMPconst {
29521 v_0 := b.Controls[0]
29522 if auxIntToInt64(v_0.AuxInt) != 0 {
29523 break
29524 }
29525 x := v_0.Args[0]
29526 if x.Op != OpARM64ANDconst {
29527 break
29528 }
29529 c := auxIntToInt64(x.AuxInt)
29530 y := x.Args[0]
29531 if !(x.Uses == 1) {
29532 break
29533 }
29534 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags)
29535 v0.AuxInt = int64ToAuxInt(c)
29536 v0.AddArg(y)
29537 b.resetWithControl(BlockARM64NE, v0)
29538 return true
29539 }
29540
29541
29542
29543 for b.Controls[0].Op == OpARM64CMPconst {
29544 v_0 := b.Controls[0]
29545 if auxIntToInt64(v_0.AuxInt) != 0 {
29546 break
29547 }
29548 x := v_0.Args[0]
29549 if x.Op != OpARM64ADDconst {
29550 break
29551 }
29552 c := auxIntToInt64(x.AuxInt)
29553 y := x.Args[0]
29554 if !(x.Uses == 1) {
29555 break
29556 }
29557 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags)
29558 v0.AuxInt = int64ToAuxInt(c)
29559 v0.AddArg(y)
29560 b.resetWithControl(BlockARM64NE, v0)
29561 return true
29562 }
29563
29564
29565
29566 for b.Controls[0].Op == OpARM64CMPWconst {
29567 v_0 := b.Controls[0]
29568 if auxIntToInt32(v_0.AuxInt) != 0 {
29569 break
29570 }
29571 x := v_0.Args[0]
29572 if x.Op != OpARM64ADDconst {
29573 break
29574 }
29575 c := auxIntToInt64(x.AuxInt)
29576 y := x.Args[0]
29577 if !(x.Uses == 1) {
29578 break
29579 }
29580 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags)
29581 v0.AuxInt = int32ToAuxInt(int32(c))
29582 v0.AddArg(y)
29583 b.resetWithControl(BlockARM64NE, v0)
29584 return true
29585 }
29586
29587
29588
29589 for b.Controls[0].Op == OpARM64CMPconst {
29590 v_0 := b.Controls[0]
29591 if auxIntToInt64(v_0.AuxInt) != 0 {
29592 break
29593 }
29594 z := v_0.Args[0]
29595 if z.Op != OpARM64ADD {
29596 break
29597 }
29598 _ = z.Args[1]
29599 z_0 := z.Args[0]
29600 z_1 := z.Args[1]
29601 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29602 x := z_0
29603 y := z_1
29604 if !(z.Uses == 1) {
29605 continue
29606 }
29607 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
29608 v0.AddArg2(x, y)
29609 b.resetWithControl(BlockARM64NE, v0)
29610 return true
29611 }
29612 break
29613 }
29614
29615
29616
29617 for b.Controls[0].Op == OpARM64CMPWconst {
29618 v_0 := b.Controls[0]
29619 if auxIntToInt32(v_0.AuxInt) != 0 {
29620 break
29621 }
29622 z := v_0.Args[0]
29623 if z.Op != OpARM64ADD {
29624 break
29625 }
29626 _ = z.Args[1]
29627 z_0 := z.Args[0]
29628 z_1 := z.Args[1]
29629 for _i0 := 0; _i0 <= 1; _i0, z_0, z_1 = _i0+1, z_1, z_0 {
29630 x := z_0
29631 y := z_1
29632 if !(z.Uses == 1) {
29633 continue
29634 }
29635 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
29636 v0.AddArg2(x, y)
29637 b.resetWithControl(BlockARM64NE, v0)
29638 return true
29639 }
29640 break
29641 }
29642
29643
29644
29645 for b.Controls[0].Op == OpARM64CMP {
29646 v_0 := b.Controls[0]
29647 _ = v_0.Args[1]
29648 x := v_0.Args[0]
29649 z := v_0.Args[1]
29650 if z.Op != OpARM64NEG {
29651 break
29652 }
29653 y := z.Args[0]
29654 if !(z.Uses == 1) {
29655 break
29656 }
29657 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
29658 v0.AddArg2(x, y)
29659 b.resetWithControl(BlockARM64NE, v0)
29660 return true
29661 }
29662
29663
29664
29665 for b.Controls[0].Op == OpARM64CMPW {
29666 v_0 := b.Controls[0]
29667 _ = v_0.Args[1]
29668 x := v_0.Args[0]
29669 z := v_0.Args[1]
29670 if z.Op != OpARM64NEG {
29671 break
29672 }
29673 y := z.Args[0]
29674 if !(z.Uses == 1) {
29675 break
29676 }
29677 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
29678 v0.AddArg2(x, y)
29679 b.resetWithControl(BlockARM64NE, v0)
29680 return true
29681 }
29682
29683
29684 for b.Controls[0].Op == OpARM64CMPconst {
29685 v_0 := b.Controls[0]
29686 if auxIntToInt64(v_0.AuxInt) != 0 {
29687 break
29688 }
29689 x := v_0.Args[0]
29690 b.resetWithControl(BlockARM64NZ, x)
29691 return true
29692 }
29693
29694
29695 for b.Controls[0].Op == OpARM64CMPWconst {
29696 v_0 := b.Controls[0]
29697 if auxIntToInt32(v_0.AuxInt) != 0 {
29698 break
29699 }
29700 x := v_0.Args[0]
29701 b.resetWithControl(BlockARM64NZW, x)
29702 return true
29703 }
29704
29705
29706
29707 for b.Controls[0].Op == OpARM64CMPconst {
29708 v_0 := b.Controls[0]
29709 if auxIntToInt64(v_0.AuxInt) != 0 {
29710 break
29711 }
29712 z := v_0.Args[0]
29713 if z.Op != OpARM64MADD {
29714 break
29715 }
29716 y := z.Args[2]
29717 a := z.Args[0]
29718 x := z.Args[1]
29719 if !(z.Uses == 1) {
29720 break
29721 }
29722 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags)
29723 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
29724 v1.AddArg2(x, y)
29725 v0.AddArg2(a, v1)
29726 b.resetWithControl(BlockARM64NE, v0)
29727 return true
29728 }
29729
29730
29731
29732 for b.Controls[0].Op == OpARM64CMPconst {
29733 v_0 := b.Controls[0]
29734 if auxIntToInt64(v_0.AuxInt) != 0 {
29735 break
29736 }
29737 z := v_0.Args[0]
29738 if z.Op != OpARM64MSUB {
29739 break
29740 }
29741 y := z.Args[2]
29742 a := z.Args[0]
29743 x := z.Args[1]
29744 if !(z.Uses == 1) {
29745 break
29746 }
29747 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags)
29748 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type)
29749 v1.AddArg2(x, y)
29750 v0.AddArg2(a, v1)
29751 b.resetWithControl(BlockARM64NE, v0)
29752 return true
29753 }
29754
29755
29756
29757 for b.Controls[0].Op == OpARM64CMPWconst {
29758 v_0 := b.Controls[0]
29759 if auxIntToInt32(v_0.AuxInt) != 0 {
29760 break
29761 }
29762 z := v_0.Args[0]
29763 if z.Op != OpARM64MADDW {
29764 break
29765 }
29766 y := z.Args[2]
29767 a := z.Args[0]
29768 x := z.Args[1]
29769 if !(z.Uses == 1) {
29770 break
29771 }
29772 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags)
29773 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
29774 v1.AddArg2(x, y)
29775 v0.AddArg2(a, v1)
29776 b.resetWithControl(BlockARM64NE, v0)
29777 return true
29778 }
29779
29780
29781
29782 for b.Controls[0].Op == OpARM64CMPWconst {
29783 v_0 := b.Controls[0]
29784 if auxIntToInt32(v_0.AuxInt) != 0 {
29785 break
29786 }
29787 z := v_0.Args[0]
29788 if z.Op != OpARM64MSUBW {
29789 break
29790 }
29791 y := z.Args[2]
29792 a := z.Args[0]
29793 x := z.Args[1]
29794 if !(z.Uses == 1) {
29795 break
29796 }
29797 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags)
29798 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type)
29799 v1.AddArg2(x, y)
29800 v0.AddArg2(a, v1)
29801 b.resetWithControl(BlockARM64NE, v0)
29802 return true
29803 }
29804
29805
29806
29807 for b.Controls[0].Op == OpARM64TSTconst {
29808 v_0 := b.Controls[0]
29809 c := auxIntToInt64(v_0.AuxInt)
29810 x := v_0.Args[0]
29811 if !(oneBit(c)) {
29812 break
29813 }
29814 b.resetWithControl(BlockARM64TBNZ, x)
29815 b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
29816 return true
29817 }
29818
29819
29820
29821 for b.Controls[0].Op == OpARM64TSTWconst {
29822 v_0 := b.Controls[0]
29823 c := auxIntToInt32(v_0.AuxInt)
29824 x := v_0.Args[0]
29825 if !(oneBit(int64(uint32(c)))) {
29826 break
29827 }
29828 b.resetWithControl(BlockARM64TBNZ, x)
29829 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
29830 return true
29831 }
29832
29833
29834
29835 for b.Controls[0].Op == OpARM64FlagConstant {
29836 v_0 := b.Controls[0]
29837 fc := auxIntToFlagConstant(v_0.AuxInt)
29838 if !(fc.ne()) {
29839 break
29840 }
29841 b.Reset(BlockFirst)
29842 return true
29843 }
29844
29845
29846
29847 for b.Controls[0].Op == OpARM64FlagConstant {
29848 v_0 := b.Controls[0]
29849 fc := auxIntToFlagConstant(v_0.AuxInt)
29850 if !(!fc.ne()) {
29851 break
29852 }
29853 b.Reset(BlockFirst)
29854 b.swapSuccessors()
29855 return true
29856 }
29857
29858
29859 for b.Controls[0].Op == OpARM64InvertFlags {
29860 v_0 := b.Controls[0]
29861 cmp := v_0.Args[0]
29862 b.resetWithControl(BlockARM64NE, cmp)
29863 return true
29864 }
29865 case BlockARM64NZ:
29866
29867
29868 for b.Controls[0].Op == OpARM64Equal {
29869 v_0 := b.Controls[0]
29870 cc := v_0.Args[0]
29871 b.resetWithControl(BlockARM64EQ, cc)
29872 return true
29873 }
29874
29875
29876 for b.Controls[0].Op == OpARM64NotEqual {
29877 v_0 := b.Controls[0]
29878 cc := v_0.Args[0]
29879 b.resetWithControl(BlockARM64NE, cc)
29880 return true
29881 }
29882
29883
29884 for b.Controls[0].Op == OpARM64LessThan {
29885 v_0 := b.Controls[0]
29886 cc := v_0.Args[0]
29887 b.resetWithControl(BlockARM64LT, cc)
29888 return true
29889 }
29890
29891
29892 for b.Controls[0].Op == OpARM64LessThanU {
29893 v_0 := b.Controls[0]
29894 cc := v_0.Args[0]
29895 b.resetWithControl(BlockARM64ULT, cc)
29896 return true
29897 }
29898
29899
29900 for b.Controls[0].Op == OpARM64LessEqual {
29901 v_0 := b.Controls[0]
29902 cc := v_0.Args[0]
29903 b.resetWithControl(BlockARM64LE, cc)
29904 return true
29905 }
29906
29907
29908 for b.Controls[0].Op == OpARM64LessEqualU {
29909 v_0 := b.Controls[0]
29910 cc := v_0.Args[0]
29911 b.resetWithControl(BlockARM64ULE, cc)
29912 return true
29913 }
29914
29915
29916 for b.Controls[0].Op == OpARM64GreaterThan {
29917 v_0 := b.Controls[0]
29918 cc := v_0.Args[0]
29919 b.resetWithControl(BlockARM64GT, cc)
29920 return true
29921 }
29922
29923
29924 for b.Controls[0].Op == OpARM64GreaterThanU {
29925 v_0 := b.Controls[0]
29926 cc := v_0.Args[0]
29927 b.resetWithControl(BlockARM64UGT, cc)
29928 return true
29929 }
29930
29931
29932 for b.Controls[0].Op == OpARM64GreaterEqual {
29933 v_0 := b.Controls[0]
29934 cc := v_0.Args[0]
29935 b.resetWithControl(BlockARM64GE, cc)
29936 return true
29937 }
29938
29939
29940 for b.Controls[0].Op == OpARM64GreaterEqualU {
29941 v_0 := b.Controls[0]
29942 cc := v_0.Args[0]
29943 b.resetWithControl(BlockARM64UGE, cc)
29944 return true
29945 }
29946
29947
29948 for b.Controls[0].Op == OpARM64LessThanF {
29949 v_0 := b.Controls[0]
29950 cc := v_0.Args[0]
29951 b.resetWithControl(BlockARM64FLT, cc)
29952 return true
29953 }
29954
29955
29956 for b.Controls[0].Op == OpARM64LessEqualF {
29957 v_0 := b.Controls[0]
29958 cc := v_0.Args[0]
29959 b.resetWithControl(BlockARM64FLE, cc)
29960 return true
29961 }
29962
29963
29964 for b.Controls[0].Op == OpARM64GreaterThanF {
29965 v_0 := b.Controls[0]
29966 cc := v_0.Args[0]
29967 b.resetWithControl(BlockARM64FGT, cc)
29968 return true
29969 }
29970
29971
29972 for b.Controls[0].Op == OpARM64GreaterEqualF {
29973 v_0 := b.Controls[0]
29974 cc := v_0.Args[0]
29975 b.resetWithControl(BlockARM64FGE, cc)
29976 return true
29977 }
29978
29979
29980
29981 for b.Controls[0].Op == OpARM64ANDconst {
29982 v_0 := b.Controls[0]
29983 c := auxIntToInt64(v_0.AuxInt)
29984 x := v_0.Args[0]
29985 if !(oneBit(c)) {
29986 break
29987 }
29988 b.resetWithControl(BlockARM64TBNZ, x)
29989 b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
29990 return true
29991 }
29992
29993
29994 for b.Controls[0].Op == OpARM64MOVDconst {
29995 v_0 := b.Controls[0]
29996 if auxIntToInt64(v_0.AuxInt) != 0 {
29997 break
29998 }
29999 b.Reset(BlockFirst)
30000 b.swapSuccessors()
30001 return true
30002 }
30003
30004
30005
30006 for b.Controls[0].Op == OpARM64MOVDconst {
30007 v_0 := b.Controls[0]
30008 c := auxIntToInt64(v_0.AuxInt)
30009 if !(c != 0) {
30010 break
30011 }
30012 b.Reset(BlockFirst)
30013 return true
30014 }
30015 case BlockARM64NZW:
30016
30017
30018
30019 for b.Controls[0].Op == OpARM64ANDconst {
30020 v_0 := b.Controls[0]
30021 c := auxIntToInt64(v_0.AuxInt)
30022 x := v_0.Args[0]
30023 if !(oneBit(int64(uint32(c)))) {
30024 break
30025 }
30026 b.resetWithControl(BlockARM64TBNZ, x)
30027 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
30028 return true
30029 }
30030
30031
30032
30033 for b.Controls[0].Op == OpARM64MOVDconst {
30034 v_0 := b.Controls[0]
30035 c := auxIntToInt64(v_0.AuxInt)
30036 if !(int32(c) == 0) {
30037 break
30038 }
30039 b.Reset(BlockFirst)
30040 b.swapSuccessors()
30041 return true
30042 }
30043
30044
30045
30046 for b.Controls[0].Op == OpARM64MOVDconst {
30047 v_0 := b.Controls[0]
30048 c := auxIntToInt64(v_0.AuxInt)
30049 if !(int32(c) != 0) {
30050 break
30051 }
30052 b.Reset(BlockFirst)
30053 return true
30054 }
30055 case BlockARM64UGE:
30056
30057
30058
30059 for b.Controls[0].Op == OpARM64FlagConstant {
30060 v_0 := b.Controls[0]
30061 fc := auxIntToFlagConstant(v_0.AuxInt)
30062 if !(fc.uge()) {
30063 break
30064 }
30065 b.Reset(BlockFirst)
30066 return true
30067 }
30068
30069
30070
30071 for b.Controls[0].Op == OpARM64FlagConstant {
30072 v_0 := b.Controls[0]
30073 fc := auxIntToFlagConstant(v_0.AuxInt)
30074 if !(!fc.uge()) {
30075 break
30076 }
30077 b.Reset(BlockFirst)
30078 b.swapSuccessors()
30079 return true
30080 }
30081
30082
30083 for b.Controls[0].Op == OpARM64InvertFlags {
30084 v_0 := b.Controls[0]
30085 cmp := v_0.Args[0]
30086 b.resetWithControl(BlockARM64ULE, cmp)
30087 return true
30088 }
30089 case BlockARM64UGT:
30090
30091
30092
30093 for b.Controls[0].Op == OpARM64FlagConstant {
30094 v_0 := b.Controls[0]
30095 fc := auxIntToFlagConstant(v_0.AuxInt)
30096 if !(fc.ugt()) {
30097 break
30098 }
30099 b.Reset(BlockFirst)
30100 return true
30101 }
30102
30103
30104
30105 for b.Controls[0].Op == OpARM64FlagConstant {
30106 v_0 := b.Controls[0]
30107 fc := auxIntToFlagConstant(v_0.AuxInt)
30108 if !(!fc.ugt()) {
30109 break
30110 }
30111 b.Reset(BlockFirst)
30112 b.swapSuccessors()
30113 return true
30114 }
30115
30116
30117 for b.Controls[0].Op == OpARM64InvertFlags {
30118 v_0 := b.Controls[0]
30119 cmp := v_0.Args[0]
30120 b.resetWithControl(BlockARM64ULT, cmp)
30121 return true
30122 }
30123 case BlockARM64ULE:
30124
30125
30126
30127 for b.Controls[0].Op == OpARM64FlagConstant {
30128 v_0 := b.Controls[0]
30129 fc := auxIntToFlagConstant(v_0.AuxInt)
30130 if !(fc.ule()) {
30131 break
30132 }
30133 b.Reset(BlockFirst)
30134 return true
30135 }
30136
30137
30138
30139 for b.Controls[0].Op == OpARM64FlagConstant {
30140 v_0 := b.Controls[0]
30141 fc := auxIntToFlagConstant(v_0.AuxInt)
30142 if !(!fc.ule()) {
30143 break
30144 }
30145 b.Reset(BlockFirst)
30146 b.swapSuccessors()
30147 return true
30148 }
30149
30150
30151 for b.Controls[0].Op == OpARM64InvertFlags {
30152 v_0 := b.Controls[0]
30153 cmp := v_0.Args[0]
30154 b.resetWithControl(BlockARM64UGE, cmp)
30155 return true
30156 }
30157 case BlockARM64ULT:
30158
30159
30160
30161 for b.Controls[0].Op == OpARM64FlagConstant {
30162 v_0 := b.Controls[0]
30163 fc := auxIntToFlagConstant(v_0.AuxInt)
30164 if !(fc.ult()) {
30165 break
30166 }
30167 b.Reset(BlockFirst)
30168 return true
30169 }
30170
30171
30172
30173 for b.Controls[0].Op == OpARM64FlagConstant {
30174 v_0 := b.Controls[0]
30175 fc := auxIntToFlagConstant(v_0.AuxInt)
30176 if !(!fc.ult()) {
30177 break
30178 }
30179 b.Reset(BlockFirst)
30180 b.swapSuccessors()
30181 return true
30182 }
30183
30184
30185 for b.Controls[0].Op == OpARM64InvertFlags {
30186 v_0 := b.Controls[0]
30187 cmp := v_0.Args[0]
30188 b.resetWithControl(BlockARM64UGT, cmp)
30189 return true
30190 }
30191 case BlockARM64Z:
30192
30193
30194
30195 for b.Controls[0].Op == OpARM64ANDconst {
30196 v_0 := b.Controls[0]
30197 c := auxIntToInt64(v_0.AuxInt)
30198 x := v_0.Args[0]
30199 if !(oneBit(c)) {
30200 break
30201 }
30202 b.resetWithControl(BlockARM64TBZ, x)
30203 b.AuxInt = int64ToAuxInt(int64(ntz64(c)))
30204 return true
30205 }
30206
30207
30208 for b.Controls[0].Op == OpARM64MOVDconst {
30209 v_0 := b.Controls[0]
30210 if auxIntToInt64(v_0.AuxInt) != 0 {
30211 break
30212 }
30213 b.Reset(BlockFirst)
30214 return true
30215 }
30216
30217
30218
30219 for b.Controls[0].Op == OpARM64MOVDconst {
30220 v_0 := b.Controls[0]
30221 c := auxIntToInt64(v_0.AuxInt)
30222 if !(c != 0) {
30223 break
30224 }
30225 b.Reset(BlockFirst)
30226 b.swapSuccessors()
30227 return true
30228 }
30229 case BlockARM64ZW:
30230
30231
30232
30233 for b.Controls[0].Op == OpARM64ANDconst {
30234 v_0 := b.Controls[0]
30235 c := auxIntToInt64(v_0.AuxInt)
30236 x := v_0.Args[0]
30237 if !(oneBit(int64(uint32(c)))) {
30238 break
30239 }
30240 b.resetWithControl(BlockARM64TBZ, x)
30241 b.AuxInt = int64ToAuxInt(int64(ntz64(int64(uint32(c)))))
30242 return true
30243 }
30244
30245
30246
30247 for b.Controls[0].Op == OpARM64MOVDconst {
30248 v_0 := b.Controls[0]
30249 c := auxIntToInt64(v_0.AuxInt)
30250 if !(int32(c) == 0) {
30251 break
30252 }
30253 b.Reset(BlockFirst)
30254 return true
30255 }
30256
30257
30258
30259 for b.Controls[0].Op == OpARM64MOVDconst {
30260 v_0 := b.Controls[0]
30261 c := auxIntToInt64(v_0.AuxInt)
30262 if !(int32(c) != 0) {
30263 break
30264 }
30265 b.Reset(BlockFirst)
30266 b.swapSuccessors()
30267 return true
30268 }
30269 }
30270 return false
30271 }
30272
View as plain text