1 package compiler
2
3 import (
4 "encoding/binary"
5 "math"
6 "testing"
7
8 "github.com/tetratelabs/wazero/internal/asm"
9 "github.com/tetratelabs/wazero/internal/asm/amd64"
10 "github.com/tetratelabs/wazero/internal/testing/require"
11 "github.com/tetratelabs/wazero/internal/wasm"
12 "github.com/tetratelabs/wazero/internal/wazeroir"
13 )
14
15
16
17 func TestAmd64Compiler_V128Shuffle_ConstTable_MiddleOfFunction(t *testing.T) {
18 env := newCompilerEnvironment()
19 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler,
20 &wazeroir.CompilationResult{HasMemory: true})
21
22 err := compiler.compilePreamble()
23 require.NoError(t, err)
24
25 lanes := []uint64{1, 1, 1, 1, 0, 0, 0, 0, 10, 10, 10, 10, 0, 0, 0, 0}
26 v := [16]byte{0: 0xa, 1: 0xb, 10: 0xc}
27 w := [16]byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff}
28 exp := [16]byte{
29 0xb, 0xb, 0xb, 0xb,
30 0xa, 0xa, 0xa, 0xa,
31 0xc, 0xc, 0xc, 0xc,
32 0xa, 0xa, 0xa, 0xa,
33 }
34
35 err = compiler.compileV128Const(operationPtr(wazeroir.NewOperationV128Const(binary.LittleEndian.Uint64(v[:8]), binary.LittleEndian.Uint64(v[8:]))))
36 require.NoError(t, err)
37
38 err = compiler.compileV128Const(operationPtr(wazeroir.NewOperationV128Const(binary.LittleEndian.Uint64(w[:8]), binary.LittleEndian.Uint64(w[8:]))))
39 require.NoError(t, err)
40
41 err = compiler.compileV128Shuffle(operationPtr(wazeroir.NewOperationV128Shuffle(lanes)))
42 require.NoError(t, err)
43
44 assembler := compiler.(*amd64Compiler).assembler.(*amd64.AssemblerImpl)
45 assembler.MaxDisplacementForConstantPool = 0
46
47 err = compiler.compileReturnFunction()
48 require.NoError(t, err)
49
50 code := asm.CodeSegment{}
51 defer func() { require.NoError(t, code.Unmap()) }()
52
53
54 _, err = compiler.compile(code.NextCodeSection())
55 require.NoError(t, err)
56 env.exec(code.Bytes())
57
58 lo, hi := env.stackTopAsV128()
59 var actual [16]byte
60 binary.LittleEndian.PutUint64(actual[:8], lo)
61 binary.LittleEndian.PutUint64(actual[8:], hi)
62 require.Equal(t, exp, actual)
63 }
64
65 func TestAmd64Compiler_compileV128ShrI64x2SignedImpl(t *testing.T) {
66 x := [16]byte{
67 0, 0, 0, 0x80, 0, 0, 0, 0x80,
68 0, 0, 0, 0x80, 0, 0, 0, 0x80,
69 }
70 exp := [16]byte{
71 0, 0, 0, 0x40, 0, 0, 0, 0x80 | 0x80>>1,
72 0, 0, 0, 0x40, 0, 0, 0, 0x80 | 0x80>>1,
73 }
74 shiftAmount := uint32(1)
75
76 tests := []struct {
77 name string
78 shiftAmountSetupFn func(t *testing.T, c *amd64Compiler)
79 verifyFn func(t *testing.T, env *compilerEnv)
80 }{
81 {
82 name: "RegR10/CX not in use",
83 shiftAmountSetupFn: func(t *testing.T, c *amd64Compiler) {
84
85 loc := c.locationStack.peek()
86 oldReg, newReg := loc.register, amd64.RegR10
87 c.assembler.CompileRegisterToRegister(amd64.MOVQ, oldReg, newReg)
88 loc.setRegister(newReg)
89 c.locationStack.markRegisterUnused(oldReg)
90 c.locationStack.markRegisterUsed(newReg)
91 },
92 verifyFn: func(t *testing.T, env *compilerEnv) {},
93 },
94 {
95 name: "RegR10/CX not in use and CX is next free register",
96 shiftAmountSetupFn: func(t *testing.T, c *amd64Compiler) {
97
98 loc := c.locationStack.peek()
99 oldReg, newReg := loc.register, amd64.RegR10
100 c.assembler.CompileRegisterToRegister(amd64.MOVQ, oldReg, newReg)
101 loc.setRegister(newReg)
102 c.locationStack.markRegisterUnused(oldReg)
103 c.locationStack.markRegisterUsed(newReg)
104
105
106 newUnreservedRegs := make([]asm.Register, len(c.locationStack.unreservedVectorRegisters))
107 copy(newUnreservedRegs, c.locationStack.unreservedGeneralPurposeRegisters)
108 for i, r := range newUnreservedRegs {
109
110
111 if r == amd64.RegCX {
112 newUnreservedRegs[0], newUnreservedRegs[i] = newUnreservedRegs[i], newUnreservedRegs[0]
113 }
114 }
115 c.locationStack.unreservedGeneralPurposeRegisters = newUnreservedRegs
116 },
117 verifyFn: func(t *testing.T, env *compilerEnv) {},
118 },
119 {
120 name: "RegR10/CX in use",
121 shiftAmountSetupFn: func(t *testing.T, c *amd64Compiler) {
122
123 shiftAmountLocation := c.locationStack.pop()
124 vecReg := c.locationStack.popV128().register
125
126
127 oldReg, newReg := shiftAmountLocation.register, amd64.RegR10
128 c.assembler.CompileRegisterToRegister(amd64.MOVQ, oldReg, newReg)
129 c.locationStack.markRegisterUnused(oldReg)
130 c.locationStack.markRegisterUsed(newReg)
131
132
133 c.pushRuntimeValueLocationOnRegister(amd64.RegCX, runtimeValueTypeI32)
134 c.assembler.CompileConstToRegister(amd64.MOVQ, 100, amd64.RegCX)
135
136
137 c.pushVectorRuntimeValueLocationOnRegister(vecReg)
138 c.pushRuntimeValueLocationOnRegister(newReg, runtimeValueTypeI32)
139 },
140 verifyFn: func(t *testing.T, env *compilerEnv) {
141
142 actual := env.stack()[callFrameDataSizeInUint64]
143 require.Equal(t, uint64(100), actual)
144 },
145 },
146 {
147 name: "Stack/CX not in use",
148 shiftAmountSetupFn: func(t *testing.T, c *amd64Compiler) {
149
150 loc := c.locationStack.peek()
151 c.compileReleaseRegisterToStack(loc)
152 },
153 verifyFn: func(t *testing.T, env *compilerEnv) {},
154 },
155 {
156 name: "Stack/CX in use",
157 shiftAmountSetupFn: func(t *testing.T, c *amd64Compiler) {
158
159 shiftAmountReg := c.locationStack.pop().register
160 require.NotEqual(t, amd64.RegCX, shiftAmountReg)
161 vecReg := c.locationStack.popV128().register
162
163
164 c.pushRuntimeValueLocationOnRegister(amd64.RegCX, runtimeValueTypeI32)
165 c.assembler.CompileConstToRegister(amd64.MOVQ, 100, amd64.RegCX)
166
167
168 c.pushVectorRuntimeValueLocationOnRegister(vecReg)
169
170 loc := c.pushRuntimeValueLocationOnRegister(shiftAmountReg, runtimeValueTypeI32)
171 c.compileReleaseRegisterToStack(loc)
172 },
173 verifyFn: func(t *testing.T, env *compilerEnv) {
174
175 actual := env.stack()[callFrameDataSizeInUint64]
176 require.Equal(t, uint64(100), actual)
177 },
178 },
179 {
180 name: "CondReg/CX not in use",
181 shiftAmountSetupFn: func(t *testing.T, c *amd64Compiler) {
182
183 loc := c.locationStack.pop()
184 c.locationStack.markRegisterUnused(loc.register)
185
186
187 err := c.compileConstI32(operationPtr(wazeroir.NewOperationConstI32(0)))
188 require.NoError(t, err)
189 err = c.compileConstI32(operationPtr(wazeroir.NewOperationConstI32(0)))
190 require.NoError(t, err)
191 err = c.compileEq(operationPtr(wazeroir.NewOperationEq(wazeroir.UnsignedTypeI32)))
192 require.NoError(t, err)
193 },
194 verifyFn: func(t *testing.T, env *compilerEnv) {},
195 },
196 }
197
198 for _, tc := range tests {
199 tc := tc
200 t.Run(tc.name, func(t *testing.T) {
201 env := newCompilerEnvironment()
202 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler,
203 &wazeroir.CompilationResult{HasMemory: true})
204
205 err := compiler.compilePreamble()
206 require.NoError(t, err)
207
208 err = compiler.compileV128Const(operationPtr(wazeroir.NewOperationV128Const(binary.LittleEndian.Uint64(x[:8]), binary.LittleEndian.Uint64(x[8:]))))
209 require.NoError(t, err)
210
211 err = compiler.compileConstI32(operationPtr(wazeroir.NewOperationConstI32(shiftAmount)))
212 require.NoError(t, err)
213
214 amdCompiler := compiler.(*amd64Compiler)
215 tc.shiftAmountSetupFn(t, amdCompiler)
216
217 err = amdCompiler.compileV128ShrI64x2SignedImpl()
218 require.NoError(t, err)
219
220 require.Equal(t, 1, len(compiler.runtimeValueLocationStack().usedRegisters.list()))
221
222 err = compiler.compileReturnFunction()
223 require.NoError(t, err)
224
225 code := asm.CodeSegment{}
226 defer func() { require.NoError(t, code.Unmap()) }()
227
228
229 _, err = compiler.compile(code.NextCodeSection())
230 require.NoError(t, err)
231 env.exec(code.Bytes())
232
233 lo, hi := env.stackTopAsV128()
234 var actual [16]byte
235 binary.LittleEndian.PutUint64(actual[:8], lo)
236 binary.LittleEndian.PutUint64(actual[8:], hi)
237 require.Equal(t, exp, actual)
238
239 tc.verifyFn(t, env)
240 })
241 }
242 }
243
244
245
246 func TestAmd64Compiler_compileV128Neg_NaNOnTemporary(t *testing.T) {
247 tests := []struct {
248 name string
249 shape wazeroir.Shape
250 v, exp [16]byte
251 }{
252 {
253 name: "f32x4",
254 shape: wazeroir.ShapeF32x4,
255 v: f32x4(51234.12341, -123, float32(math.Inf(1)), 0.1),
256 exp: f32x4(-51234.12341, 123, float32(math.Inf(-1)), -0.1),
257 },
258 {
259 name: "f32x4",
260 shape: wazeroir.ShapeF32x4,
261 v: f32x4(51234.12341, 0, float32(math.Inf(1)), 0.1),
262 exp: f32x4(-51234.12341, float32(math.Copysign(0, -1)), float32(math.Inf(-1)), -0.1),
263 },
264 {
265 name: "f64x2",
266 shape: wazeroir.ShapeF64x2,
267 v: f64x2(1.123, math.Inf(-1)),
268 exp: f64x2(-1.123, math.Inf(1)),
269 },
270 {
271 name: "f64x2",
272 shape: wazeroir.ShapeF64x2,
273 v: f64x2(0, math.Inf(-1)),
274 exp: f64x2(math.Copysign(0, -1), math.Inf(1)),
275 },
276 }
277
278 for _, tc := range tests {
279 tc := tc
280 t.Run(tc.name, func(t *testing.T) {
281 env := newCompilerEnvironment()
282 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler,
283 &wazeroir.CompilationResult{HasMemory: true})
284
285 err := compiler.compilePreamble()
286 require.NoError(t, err)
287
288 err = compiler.compileV128Const(operationPtr(wazeroir.NewOperationV128Const(binary.LittleEndian.Uint64(tc.v[:8]), binary.LittleEndian.Uint64(tc.v[8:]))))
289 require.NoError(t, err)
290
291
292
293 err = compiler.compileV128Const(operationPtr(wazeroir.NewOperationV128Const(math.Float64bits(math.NaN()), math.Float64bits(math.NaN()))))
294 require.NoError(t, err)
295
296
297 loc := compiler.runtimeValueLocationStack().popV128()
298 compiler.runtimeValueLocationStack().markRegisterUnused(loc.register)
299
300
301 err = compiler.compileV128Neg(operationPtr(wazeroir.NewOperationV128Neg(tc.shape)))
302 require.NoError(t, err)
303
304 err = compiler.compileReturnFunction()
305 require.NoError(t, err)
306
307 code := asm.CodeSegment{}
308 defer func() { require.NoError(t, code.Unmap()) }()
309
310
311 _, err = compiler.compile(code.NextCodeSection())
312 require.NoError(t, err)
313 env.exec(code.Bytes())
314
315 require.Equal(t, nativeCallStatusCodeReturned, env.callEngine().statusCode)
316
317 lo, hi := env.stackTopAsV128()
318 var actual [16]byte
319 binary.LittleEndian.PutUint64(actual[:8], lo)
320 binary.LittleEndian.PutUint64(actual[8:], hi)
321 require.Equal(t, tc.exp, actual)
322 })
323 }
324 }
325
View as plain text