1 // Copyright 2018 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
4
5 // Lowering arithmetic
6 (Add(64|32|16|8|Ptr) ...) => (I64Add ...)
7 (Add(64|32)F ...) => (F(64|32)Add ...)
8
9 (Sub(64|32|16|8|Ptr) ...) => (I64Sub ...)
10 (Sub(64|32)F ...) => (F(64|32)Sub ...)
11
12 (Mul(64|32|16|8) ...) => (I64Mul ...)
13 (Mul(64|32)F ...) => (F(64|32)Mul ...)
14
15 (Div64 [false] x y) => (I64DivS x y)
16 (Div32 [false] x y) => (I64DivS (SignExt32to64 x) (SignExt32to64 y))
17 (Div16 [false] x y) => (I64DivS (SignExt16to64 x) (SignExt16to64 y))
18 (Div8 x y) => (I64DivS (SignExt8to64 x) (SignExt8to64 y))
19 (Div64u ...) => (I64DivU ...)
20 (Div32u x y) => (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
21 (Div16u x y) => (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
22 (Div8u x y) => (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
23 (Div(64|32)F ...) => (F(64|32)Div ...)
24
25 (Mod64 [false] x y) => (I64RemS x y)
26 (Mod32 [false] x y) => (I64RemS (SignExt32to64 x) (SignExt32to64 y))
27 (Mod16 [false] x y) => (I64RemS (SignExt16to64 x) (SignExt16to64 y))
28 (Mod8 x y) => (I64RemS (SignExt8to64 x) (SignExt8to64 y))
29 (Mod64u ...) => (I64RemU ...)
30 (Mod32u x y) => (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
31 (Mod16u x y) => (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
32 (Mod8u x y) => (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y))
33
34 (And(64|32|16|8|B) ...) => (I64And ...)
35
36 (Or(64|32|16|8|B) ...) => (I64Or ...)
37
38 (Xor(64|32|16|8) ...) => (I64Xor ...)
39
40 (Neg(64|32|16|8) x) => (I64Sub (I64Const [0]) x)
41 (Neg(64|32)F ...) => (F(64|32)Neg ...)
42
43 (Com(64|32|16|8) x) => (I64Xor x (I64Const [-1]))
44
45 (Not ...) => (I64Eqz ...)
46
47 // Lowering pointer arithmetic
48 (OffPtr ...) => (I64AddConst ...)
49
50 // Lowering extension
51 // It is unnecessary to extend loads
52 (SignExt32to64 x:(I64Load32S _ _)) => x
53 (SignExt16to(64|32) x:(I64Load16S _ _)) => x
54 (SignExt8to(64|32|16) x:(I64Load8S _ _)) => x
55 (ZeroExt32to64 x:(I64Load32U _ _)) => x
56 (ZeroExt16to(64|32) x:(I64Load16U _ _)) => x
57 (ZeroExt8to(64|32|16) x:(I64Load8U _ _)) => x
58 (SignExt32to64 x) && buildcfg.GOWASM.SignExt => (I64Extend32S x)
59 (SignExt8to(64|32|16) x) && buildcfg.GOWASM.SignExt => (I64Extend8S x)
60 (SignExt16to(64|32) x) && buildcfg.GOWASM.SignExt => (I64Extend16S x)
61 (SignExt32to64 x) => (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32]))
62 (SignExt16to(64|32) x) => (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48]))
63 (SignExt8to(64|32|16) x) => (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
64 (ZeroExt32to64 x) => (I64And x (I64Const [0xffffffff]))
65 (ZeroExt16to(64|32) x) => (I64And x (I64Const [0xffff]))
66 (ZeroExt8to(64|32|16) x) => (I64And x (I64Const [0xff]))
67
68 (Slicemask x) => (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
69
70 // Lowering truncation
71 // Because we ignore the high parts, truncates are just copies.
72 (Trunc64to(32|16|8) ...) => (Copy ...)
73 (Trunc32to(16|8) ...) => (Copy ...)
74 (Trunc16to8 ...) => (Copy ...)
75
76 // Lowering float <=> int
77 (Cvt32to(64|32)F x) => (F(64|32)ConvertI64S (SignExt32to64 x))
78 (Cvt64to(64|32)F ...) => (F(64|32)ConvertI64S ...)
79 (Cvt32Uto(64|32)F x) => (F(64|32)ConvertI64U (ZeroExt32to64 x))
80 (Cvt64Uto(64|32)F ...) => (F(64|32)ConvertI64U ...)
81
82 (Cvt32Fto32 ...) => (I64TruncSatF32S ...)
83 (Cvt32Fto64 ...) => (I64TruncSatF32S ...)
84 (Cvt64Fto32 ...) => (I64TruncSatF64S ...)
85 (Cvt64Fto64 ...) => (I64TruncSatF64S ...)
86 (Cvt32Fto32U ...) => (I64TruncSatF32U ...)
87 (Cvt32Fto64U ...) => (I64TruncSatF32U ...)
88 (Cvt64Fto32U ...) => (I64TruncSatF64U ...)
89 (Cvt64Fto64U ...) => (I64TruncSatF64U ...)
90
91 (Cvt32Fto64F ...) => (F64PromoteF32 ...)
92 (Cvt64Fto32F ...) => (F32DemoteF64 ...)
93
94 (CvtBoolToUint8 ...) => (Copy ...)
95
96 (Round32F ...) => (Copy ...)
97 (Round64F ...) => (Copy ...)
98
99 // Lowering shifts
100 // Unsigned shifts need to return 0 if shift amount is >= width of shifted value.
101
102 (Lsh64x64 x y) && shiftIsBounded(v) => (I64Shl x y)
103 (Lsh64x64 x (I64Const [c])) && uint64(c) < 64 => (I64Shl x (I64Const [c]))
104 (Lsh64x64 x (I64Const [c])) && uint64(c) >= 64 => (I64Const [0])
105 (Lsh64x64 x y) => (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
106 (Lsh64x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
107
108 (Lsh32x64 ...) => (Lsh64x64 ...)
109 (Lsh32x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
110
111 (Lsh16x64 ...) => (Lsh64x64 ...)
112 (Lsh16x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
113
114 (Lsh8x64 ...) => (Lsh64x64 ...)
115 (Lsh8x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
116
117 (Rsh64Ux64 x y) && shiftIsBounded(v) => (I64ShrU x y)
118 (Rsh64Ux64 x (I64Const [c])) && uint64(c) < 64 => (I64ShrU x (I64Const [c]))
119 (Rsh64Ux64 x (I64Const [c])) && uint64(c) >= 64 => (I64Const [0])
120 (Rsh64Ux64 x y) => (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
121 (Rsh64Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] x (ZeroExt(32|16|8)to64 y))
122
123 (Rsh32Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt32to64 x) y)
124 (Rsh32Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt32to64 x) (ZeroExt(32|16|8)to64 y))
125
126 (Rsh16Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt16to64 x) y)
127 (Rsh16Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt16to64 x) (ZeroExt(32|16|8)to64 y))
128
129 (Rsh8Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt8to64 x) y)
130 (Rsh8Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt8to64 x) (ZeroExt(32|16|8)to64 y))
131
132 // Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value.
133 // We implement this by setting the shift value to (width - 1) if the shift value is >= width.
134
135 (Rsh64x64 x y) && shiftIsBounded(v) => (I64ShrS x y)
136 (Rsh64x64 x (I64Const [c])) && uint64(c) < 64 => (I64ShrS x (I64Const [c]))
137 (Rsh64x64 x (I64Const [c])) && uint64(c) >= 64 => (I64ShrS x (I64Const [63]))
138 (Rsh64x64 x y) => (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
139 (Rsh64x(32|16|8) [c] x y) => (Rsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
140
141 (Rsh32x64 [c] x y) => (Rsh64x64 [c] (SignExt32to64 x) y)
142 (Rsh32x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt32to64 x) (ZeroExt(32|16|8)to64 y))
143
144 (Rsh16x64 [c] x y) => (Rsh64x64 [c] (SignExt16to64 x) y)
145 (Rsh16x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt16to64 x) (ZeroExt(32|16|8)to64 y))
146
147 (Rsh8x64 [c] x y) => (Rsh64x64 [c] (SignExt8to64 x) y)
148 (Rsh8x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt8to64 x) (ZeroExt(32|16|8)to64 y))
149
150 // Lowering rotates
151 (RotateLeft8 <t> x (I64Const [c])) => (Or8 (Lsh8x64 <t> x (I64Const [c&7])) (Rsh8Ux64 <t> x (I64Const [-c&7])))
152 (RotateLeft16 <t> x (I64Const [c])) => (Or16 (Lsh16x64 <t> x (I64Const [c&15])) (Rsh16Ux64 <t> x (I64Const [-c&15])))
153 (RotateLeft32 ...) => (I32Rotl ...)
154 (RotateLeft64 ...) => (I64Rotl ...)
155
156 // Lowering comparisons
157 (Less64 ...) => (I64LtS ...)
158 (Less32 x y) => (I64LtS (SignExt32to64 x) (SignExt32to64 y))
159 (Less16 x y) => (I64LtS (SignExt16to64 x) (SignExt16to64 y))
160 (Less8 x y) => (I64LtS (SignExt8to64 x) (SignExt8to64 y))
161 (Less64U ...) => (I64LtU ...)
162 (Less32U x y) => (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
163 (Less16U x y) => (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
164 (Less8U x y) => (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y))
165 (Less(64|32)F ...) => (F(64|32)Lt ...)
166
167 (Leq64 ...) => (I64LeS ...)
168 (Leq32 x y) => (I64LeS (SignExt32to64 x) (SignExt32to64 y))
169 (Leq16 x y) => (I64LeS (SignExt16to64 x) (SignExt16to64 y))
170 (Leq8 x y) => (I64LeS (SignExt8to64 x) (SignExt8to64 y))
171 (Leq64U ...) => (I64LeU ...)
172 (Leq32U x y) => (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
173 (Leq16U x y) => (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
174 (Leq8U x y) => (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y))
175 (Leq(64|32)F ...) => (F(64|32)Le ...)
176
177 (Eq64 ...) => (I64Eq ...)
178 (Eq32 x y) => (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
179 (Eq16 x y) => (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
180 (Eq8 x y) => (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y))
181 (EqB ...) => (I64Eq ...)
182 (EqPtr ...) => (I64Eq ...)
183 (Eq(64|32)F ...) => (F(64|32)Eq ...)
184
185 (Neq64 ...) => (I64Ne ...)
186 (Neq32 x y) => (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
187 (Neq16 x y) => (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
188 (Neq8 x y) => (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y))
189 (NeqB ...) => (I64Ne ...)
190 (NeqPtr ...) => (I64Ne ...)
191 (Neq(64|32)F ...) => (F(64|32)Ne ...)
192
193 // Lowering loads
194 (Load <t> ptr mem) && is32BitFloat(t) => (F32Load ptr mem)
195 (Load <t> ptr mem) && is64BitFloat(t) => (F64Load ptr mem)
196 (Load <t> ptr mem) && t.Size() == 8 => (I64Load ptr mem)
197 (Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() => (I64Load32U ptr mem)
198 (Load <t> ptr mem) && t.Size() == 4 && t.IsSigned() => (I64Load32S ptr mem)
199 (Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() => (I64Load16U ptr mem)
200 (Load <t> ptr mem) && t.Size() == 2 && t.IsSigned() => (I64Load16S ptr mem)
201 (Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() => (I64Load8U ptr mem)
202 (Load <t> ptr mem) && t.Size() == 1 && t.IsSigned() => (I64Load8S ptr mem)
203
204 // Lowering stores
205 (Store {t} ptr val mem) && is64BitFloat(t) => (F64Store ptr val mem)
206 (Store {t} ptr val mem) && is32BitFloat(t) => (F32Store ptr val mem)
207 (Store {t} ptr val mem) && t.Size() == 8 => (I64Store ptr val mem)
208 (Store {t} ptr val mem) && t.Size() == 4 => (I64Store32 ptr val mem)
209 (Store {t} ptr val mem) && t.Size() == 2 => (I64Store16 ptr val mem)
210 (Store {t} ptr val mem) && t.Size() == 1 => (I64Store8 ptr val mem)
211
212 // Lowering moves
213 (Move [0] _ _ mem) => mem
214 (Move [1] dst src mem) => (I64Store8 dst (I64Load8U src mem) mem)
215 (Move [2] dst src mem) => (I64Store16 dst (I64Load16U src mem) mem)
216 (Move [4] dst src mem) => (I64Store32 dst (I64Load32U src mem) mem)
217 (Move [8] dst src mem) => (I64Store dst (I64Load src mem) mem)
218 (Move [16] dst src mem) =>
219 (I64Store [8] dst (I64Load [8] src mem)
220 (I64Store dst (I64Load src mem) mem))
221 (Move [3] dst src mem) =>
222 (I64Store8 [2] dst (I64Load8U [2] src mem)
223 (I64Store16 dst (I64Load16U src mem) mem))
224 (Move [5] dst src mem) =>
225 (I64Store8 [4] dst (I64Load8U [4] src mem)
226 (I64Store32 dst (I64Load32U src mem) mem))
227 (Move [6] dst src mem) =>
228 (I64Store16 [4] dst (I64Load16U [4] src mem)
229 (I64Store32 dst (I64Load32U src mem) mem))
230 (Move [7] dst src mem) =>
231 (I64Store32 [3] dst (I64Load32U [3] src mem)
232 (I64Store32 dst (I64Load32U src mem) mem))
233 (Move [s] dst src mem) && s > 8 && s < 16 =>
234 (I64Store [s-8] dst (I64Load [s-8] src mem)
235 (I64Store dst (I64Load src mem) mem))
236
237 // Adjust moves to be a multiple of 16 bytes.
238 (Move [s] dst src mem)
239 && s > 16 && s%16 != 0 && s%16 <= 8 =>
240 (Move [s-s%16]
241 (OffPtr <dst.Type> dst [s%16])
242 (OffPtr <src.Type> src [s%16])
243 (I64Store dst (I64Load src mem) mem))
244 (Move [s] dst src mem)
245 && s > 16 && s%16 != 0 && s%16 > 8 =>
246 (Move [s-s%16]
247 (OffPtr <dst.Type> dst [s%16])
248 (OffPtr <src.Type> src [s%16])
249 (I64Store [8] dst (I64Load [8] src mem)
250 (I64Store dst (I64Load src mem) mem)))
251
252 // Large copying uses helper.
253 (Move [s] dst src mem) && s%8 == 0 && logLargeCopy(v, s) =>
254 (LoweredMove [s/8] dst src mem)
255
256 // Lowering Zero instructions
257 (Zero [0] _ mem) => mem
258 (Zero [1] destptr mem) => (I64Store8 destptr (I64Const [0]) mem)
259 (Zero [2] destptr mem) => (I64Store16 destptr (I64Const [0]) mem)
260 (Zero [4] destptr mem) => (I64Store32 destptr (I64Const [0]) mem)
261 (Zero [8] destptr mem) => (I64Store destptr (I64Const [0]) mem)
262
263 (Zero [3] destptr mem) =>
264 (I64Store8 [2] destptr (I64Const [0])
265 (I64Store16 destptr (I64Const [0]) mem))
266 (Zero [5] destptr mem) =>
267 (I64Store8 [4] destptr (I64Const [0])
268 (I64Store32 destptr (I64Const [0]) mem))
269 (Zero [6] destptr mem) =>
270 (I64Store16 [4] destptr (I64Const [0])
271 (I64Store32 destptr (I64Const [0]) mem))
272 (Zero [7] destptr mem) =>
273 (I64Store32 [3] destptr (I64Const [0])
274 (I64Store32 destptr (I64Const [0]) mem))
275
276 // Strip off any fractional word zeroing.
277 (Zero [s] destptr mem) && s%8 != 0 && s > 8 =>
278 (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
279 (I64Store destptr (I64Const [0]) mem))
280
281 // Zero small numbers of words directly.
282 (Zero [16] destptr mem) =>
283 (I64Store [8] destptr (I64Const [0])
284 (I64Store destptr (I64Const [0]) mem))
285 (Zero [24] destptr mem) =>
286 (I64Store [16] destptr (I64Const [0])
287 (I64Store [8] destptr (I64Const [0])
288 (I64Store destptr (I64Const [0]) mem)))
289 (Zero [32] destptr mem) =>
290 (I64Store [24] destptr (I64Const [0])
291 (I64Store [16] destptr (I64Const [0])
292 (I64Store [8] destptr (I64Const [0])
293 (I64Store destptr (I64Const [0]) mem))))
294
295 // Large zeroing uses helper.
296 (Zero [s] destptr mem) && s%8 == 0 && s > 32 =>
297 (LoweredZero [s/8] destptr mem)
298
299 // Lowering constants
300 (Const64 ...) => (I64Const ...)
301 (Const(32|16|8) [c]) => (I64Const [int64(c)])
302 (Const(64|32)F ...) => (F(64|32)Const ...)
303 (ConstNil) => (I64Const [0])
304 (ConstBool [c]) => (I64Const [b2i(c)])
305
306 // Lowering calls
307 (StaticCall ...) => (LoweredStaticCall ...)
308 (ClosureCall ...) => (LoweredClosureCall ...)
309 (InterCall ...) => (LoweredInterCall ...)
310 (TailCall ...) => (LoweredTailCall ...)
311
312 // Miscellaneous
313 (Convert ...) => (LoweredConvert ...)
314 (IsNonNil p) => (I64Eqz (I64Eqz p))
315 (IsInBounds ...) => (I64LtU ...)
316 (IsSliceInBounds ...) => (I64LeU ...)
317 (NilCheck ...) => (LoweredNilCheck ...)
318 (GetClosurePtr ...) => (LoweredGetClosurePtr ...)
319 (GetCallerPC ...) => (LoweredGetCallerPC ...)
320 (GetCallerSP ...) => (LoweredGetCallerSP ...)
321 (Addr {sym} base) => (LoweredAddr {sym} [0] base)
322 (LocalAddr {sym} base _) => (LoweredAddr {sym} base)
323
324 // Write barrier.
325 (WB ...) => (LoweredWB ...)
326
327 // --- Intrinsics ---
328 (Sqrt ...) => (F64Sqrt ...)
329 (Trunc ...) => (F64Trunc ...)
330 (Ceil ...) => (F64Ceil ...)
331 (Floor ...) => (F64Floor ...)
332 (RoundToEven ...) => (F64Nearest ...)
333 (Abs ...) => (F64Abs ...)
334 (Copysign ...) => (F64Copysign ...)
335
336 (Sqrt32 ...) => (F32Sqrt ...)
337
338 (Ctz64 ...) => (I64Ctz ...)
339 (Ctz32 x) => (I64Ctz (I64Or x (I64Const [0x100000000])))
340 (Ctz16 x) => (I64Ctz (I64Or x (I64Const [0x10000])))
341 (Ctz8 x) => (I64Ctz (I64Or x (I64Const [0x100])))
342
343 (Ctz(64|32|16|8)NonZero ...) => (I64Ctz ...)
344
345 (BitLen64 x) => (I64Sub (I64Const [64]) (I64Clz x))
346
347 (PopCount64 ...) => (I64Popcnt ...)
348 (PopCount32 x) => (I64Popcnt (ZeroExt32to64 x))
349 (PopCount16 x) => (I64Popcnt (ZeroExt16to64 x))
350 (PopCount8 x) => (I64Popcnt (ZeroExt8to64 x))
351
352 (CondSelect ...) => (Select ...)
353
354 // --- Optimizations ---
355 (I64Add (I64Const [x]) (I64Const [y])) => (I64Const [x + y])
356 (I64Mul (I64Const [x]) (I64Const [y])) => (I64Const [x * y])
357 (I64And (I64Const [x]) (I64Const [y])) => (I64Const [x & y])
358 (I64Or (I64Const [x]) (I64Const [y])) => (I64Const [x | y])
359 (I64Xor (I64Const [x]) (I64Const [y])) => (I64Const [x ^ y])
360 (F64Add (F64Const [x]) (F64Const [y])) => (F64Const [x + y])
361 (F64Mul (F64Const [x]) (F64Const [y])) && !math.IsNaN(x * y) => (F64Const [x * y])
362 (I64Eq (I64Const [x]) (I64Const [y])) && x == y => (I64Const [1])
363 (I64Eq (I64Const [x]) (I64Const [y])) && x != y => (I64Const [0])
364 (I64Ne (I64Const [x]) (I64Const [y])) && x == y => (I64Const [0])
365 (I64Ne (I64Const [x]) (I64Const [y])) && x != y => (I64Const [1])
366
367 (I64Shl (I64Const [x]) (I64Const [y])) => (I64Const [x << uint64(y)])
368 (I64ShrU (I64Const [x]) (I64Const [y])) => (I64Const [int64(uint64(x) >> uint64(y))])
369 (I64ShrS (I64Const [x]) (I64Const [y])) => (I64Const [x >> uint64(y)])
370
371 // TODO: declare these operations as commutative and get rid of these rules?
372 (I64Add (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Add y (I64Const [x]))
373 (I64Mul (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Mul y (I64Const [x]))
374 (I64And (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64And y (I64Const [x]))
375 (I64Or (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Or y (I64Const [x]))
376 (I64Xor (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Xor y (I64Const [x]))
377 (F64Add (F64Const [x]) y) && y.Op != OpWasmF64Const => (F64Add y (F64Const [x]))
378 (F64Mul (F64Const [x]) y) && y.Op != OpWasmF64Const => (F64Mul y (F64Const [x]))
379 (I64Eq (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Eq y (I64Const [x]))
380 (I64Ne (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Ne y (I64Const [x]))
381
382 (I64Eq x (I64Const [0])) => (I64Eqz x)
383 (I64LtU (I64Const [0]) x) => (I64Eqz (I64Eqz x))
384 (I64LeU x (I64Const [0])) => (I64Eqz x)
385 (I64LtU x (I64Const [1])) => (I64Eqz x)
386 (I64LeU (I64Const [1]) x) => (I64Eqz (I64Eqz x))
387 (I64Ne x (I64Const [0])) => (I64Eqz (I64Eqz x))
388
389 (I64Add x (I64Const [y])) => (I64AddConst [y] x)
390 (I64AddConst [0] x) => x
391 (I64Eqz (I64Eqz (I64Eqz x))) => (I64Eqz x)
392
393 // folding offset into load/store
394 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem)
395 && isU32Bit(off+off2) =>
396 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem)
397
398 ((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem)
399 && isU32Bit(off+off2) =>
400 ((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem)
401
402 // folding offset into address
403 (I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+int64(off2)) =>
404 (LoweredAddr {sym} [int32(off)+off2] base)
405 (I64AddConst [off] x:(SP)) && isU32Bit(off) => (LoweredAddr [int32(off)] x) // so it is rematerializeable
406
407 // transforming readonly globals into constants
408 (I64Load [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read64(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
409 (I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
410 (I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
411 (I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read8(sym, off+int64(off2)))])
412
View as plain text