1 package compiler
2
3 import (
4 "encoding/hex"
5 "testing"
6 "unsafe"
7
8 "github.com/tetratelabs/wazero/internal/asm"
9 "github.com/tetratelabs/wazero/internal/asm/amd64"
10 "github.com/tetratelabs/wazero/internal/platform"
11 "github.com/tetratelabs/wazero/internal/testing/require"
12 "github.com/tetratelabs/wazero/internal/wasm"
13 "github.com/tetratelabs/wazero/internal/wazeroir"
14 )
15
16
17
18
19 func TestAmd64Compiler_indirectCallWithTargetOnCallingConvReg(t *testing.T) {
20 code := asm.CodeSegment{}
21 defer func() { require.NoError(t, code.Unmap()) }()
22
23 env := newCompilerEnvironment()
24 table := make([]wasm.Reference, 1)
25 env.addTable(&wasm.TableInstance{References: table})
26
27
28 operation := operationPtr(wazeroir.NewOperationCallIndirect(0, 0))
29 env.module().TypeIDs = []wasm.FunctionTypeID{0}
30 env.module().Engine = &moduleEngine{functions: []function{}}
31
32 me := env.moduleEngine()
33 {
34 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, nil)
35 err := compiler.compilePreamble()
36 require.NoError(t, err)
37 err = compiler.compileReturnFunction()
38 require.NoError(t, err)
39
40 _, err = compiler.compile(code.NextCodeSection())
41 require.NoError(t, err)
42
43 executable := code.Bytes()
44 makeExecutable(executable)
45
46 f := function{
47 parent: &compiledFunction{parent: &compiledCode{executable: code}},
48 codeInitialAddress: code.Addr(),
49 moduleInstance: env.moduleInstance,
50 typeID: 0,
51 }
52 me.functions = append(me.functions, f)
53 table[0] = uintptr(unsafe.Pointer(&f))
54 }
55
56 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, &wazeroir.CompilationResult{
57 Types: []wasm.FunctionType{{}},
58 HasTable: true,
59 }).(*amd64Compiler)
60 err := compiler.compilePreamble()
61 require.NoError(t, err)
62
63
64 offsetLoc := compiler.pushRuntimeValueLocationOnRegister(amd64CallingConventionDestinationFunctionModuleInstanceAddressRegister,
65 runtimeValueTypeI32)
66 compiler.assembler.CompileConstToRegister(amd64.MOVQ, 0, offsetLoc.register)
67
68 require.NoError(t, compiler.compileCallIndirect(operation))
69
70 err = compiler.compileReturnFunction()
71 require.NoError(t, err)
72
73
74 _, err = compiler.compile(code.NextCodeSection())
75 require.NoError(t, err)
76 env.exec(code.Bytes())
77 }
78
79 func TestAmd64Compiler_compile_Mul_Div_Rem(t *testing.T) {
80 for _, kind := range []wazeroir.OperationKind{
81 wazeroir.OperationKindMul,
82 wazeroir.OperationKindDiv,
83 wazeroir.OperationKindRem,
84 } {
85 kind := kind
86 t.Run(kind.String(), func(t *testing.T) {
87 t.Run("int32", func(t *testing.T) {
88 tests := []struct {
89 name string
90 x1Reg, x2Reg asm.Register
91 }{
92 {
93 name: "x1:ax,x2:random_reg",
94 x1Reg: amd64.RegAX,
95 x2Reg: amd64.RegR10,
96 },
97 {
98 name: "x1:ax,x2:stack",
99 x1Reg: amd64.RegAX,
100 x2Reg: asm.NilRegister,
101 },
102 {
103 name: "x1:random_reg,x2:ax",
104 x1Reg: amd64.RegR10,
105 x2Reg: amd64.RegAX,
106 },
107 {
108 name: "x1:stack,x2:ax",
109 x1Reg: asm.NilRegister,
110 x2Reg: amd64.RegAX,
111 },
112 {
113 name: "x1:random_reg,x2:random_reg",
114 x1Reg: amd64.RegR10,
115 x2Reg: amd64.RegR9,
116 },
117 {
118 name: "x1:stack,x2:random_reg",
119 x1Reg: asm.NilRegister,
120 x2Reg: amd64.RegR9,
121 },
122 {
123 name: "x1:random_reg,x2:stack",
124 x1Reg: amd64.RegR9,
125 x2Reg: asm.NilRegister,
126 },
127 {
128 name: "x1:stack,x2:stack",
129 x1Reg: asm.NilRegister,
130 x2Reg: asm.NilRegister,
131 },
132 }
133
134 for _, tt := range tests {
135 tc := tt
136 t.Run(tc.name, func(t *testing.T) {
137 env := newCompilerEnvironment()
138
139 const x1Value uint32 = 1 << 11
140 const x2Value uint32 = 51
141 const dxValue uint64 = 111111
142
143 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newAmd64Compiler, nil).(*amd64Compiler)
144
145
146
147 compiler.runtimeValueLocationStack().stack = make([]runtimeValueLocation, 100)
148
149 err := compiler.compilePreamble()
150 require.NoError(t, err)
151
152
153
154
155 compiler.assembler.CompileConstToRegister(amd64.MOVQ, int64(dxValue), amd64.RegDX)
156 prevOnDX := compiler.pushRuntimeValueLocationOnRegister(amd64.RegDX, runtimeValueTypeI32)
157
158
159 if tc.x1Reg != asm.NilRegister {
160 compiler.assembler.CompileConstToRegister(amd64.MOVQ, int64(x1Value), tc.x1Reg)
161 compiler.pushRuntimeValueLocationOnRegister(tc.x1Reg, runtimeValueTypeI32)
162 } else {
163 loc := compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack()
164 loc.valueType = runtimeValueTypeI32
165 env.stack()[loc.stackPointer] = uint64(x1Value)
166 }
167 if tc.x2Reg != asm.NilRegister {
168 compiler.assembler.CompileConstToRegister(amd64.MOVQ, int64(x2Value), tc.x2Reg)
169 compiler.pushRuntimeValueLocationOnRegister(tc.x2Reg, runtimeValueTypeI32)
170 } else {
171 loc := compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack()
172 loc.valueType = runtimeValueTypeI32
173 env.stack()[loc.stackPointer] = uint64(x2Value)
174 }
175
176 switch kind {
177 case wazeroir.OperationKindDiv:
178 err = compiler.compileDiv(operationPtr(wazeroir.NewOperationDiv(wazeroir.SignedTypeUint32)))
179 case wazeroir.OperationKindMul:
180 err = compiler.compileMul(operationPtr(wazeroir.NewOperationMul(wazeroir.UnsignedTypeI32)))
181 case wazeroir.OperationKindRem:
182 err = compiler.compileRem(operationPtr(wazeroir.NewOperationRem(wazeroir.SignedUint32)))
183 }
184 require.NoError(t, err)
185
186 require.Equal(t, registerTypeGeneralPurpose, compiler.runtimeValueLocationStack().peek().getRegisterType())
187 requireRuntimeLocationStackPointerEqual(t, uint64(2), compiler)
188 require.Equal(t, 1, len(compiler.runtimeValueLocationStack().usedRegisters.list()))
189
190 require.True(t, prevOnDX.onStack())
191
192
193
194
195 err = compiler.compileAdd(operationPtr(wazeroir.NewOperationAdd(wazeroir.UnsignedTypeI32)))
196 require.NoError(t, err)
197 require.NoError(t, compiler.compileReturnFunction())
198
199 code := asm.CodeSegment{}
200 defer func() { require.NoError(t, code.Unmap()) }()
201
202
203 _, err = compiler.compile(code.NextCodeSection())
204 require.NoError(t, err)
205
206 env.exec(code.Bytes())
207
208
209 require.Equal(t, uint64(1), env.stackPointer())
210 switch kind {
211 case wazeroir.OperationKindDiv:
212 require.Equal(t, x1Value/x2Value+uint32(dxValue), env.stackTopAsUint32())
213 case wazeroir.OperationKindMul:
214 require.Equal(t, x1Value*x2Value+uint32(dxValue), env.stackTopAsUint32())
215 case wazeroir.OperationKindRem:
216 require.Equal(t, x1Value%x2Value+uint32(dxValue), env.stackTopAsUint32())
217 }
218 })
219 }
220 })
221 t.Run("int64", func(t *testing.T) {
222 tests := []struct {
223 name string
224 x1Reg, x2Reg asm.Register
225 }{
226 {
227 name: "x1:ax,x2:random_reg",
228 x1Reg: amd64.RegAX,
229 x2Reg: amd64.RegR10,
230 },
231 {
232 name: "x1:ax,x2:stack",
233 x1Reg: amd64.RegAX,
234 x2Reg: asm.NilRegister,
235 },
236 {
237 name: "x1:random_reg,x2:ax",
238 x1Reg: amd64.RegR10,
239 x2Reg: amd64.RegAX,
240 },
241 {
242 name: "x1:stack,x2:ax",
243 x1Reg: asm.NilRegister,
244 x2Reg: amd64.RegAX,
245 },
246 {
247 name: "x1:random_reg,x2:random_reg",
248 x1Reg: amd64.RegR10,
249 x2Reg: amd64.RegR9,
250 },
251 {
252 name: "x1:stack,x2:random_reg",
253 x1Reg: asm.NilRegister,
254 x2Reg: amd64.RegR9,
255 },
256 {
257 name: "x1:random_reg,x2:stack",
258 x1Reg: amd64.RegR9,
259 x2Reg: asm.NilRegister,
260 },
261 {
262 name: "x1:stack,x2:stack",
263 x1Reg: asm.NilRegister,
264 x2Reg: asm.NilRegister,
265 },
266 }
267
268 for _, tt := range tests {
269 tc := tt
270 t.Run(tc.name, func(t *testing.T) {
271 const x1Value uint64 = 1 << 35
272 const x2Value uint64 = 51
273 const dxValue uint64 = 111111
274
275 env := newCompilerEnvironment()
276 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newAmd64Compiler, nil).(*amd64Compiler)
277
278
279
280 compiler.runtimeValueLocationStack().stack = make([]runtimeValueLocation, 100)
281
282 err := compiler.compilePreamble()
283 require.NoError(t, err)
284
285
286
287
288 compiler.assembler.CompileConstToRegister(amd64.MOVQ, int64(dxValue), amd64.RegDX)
289 prevOnDX := compiler.pushRuntimeValueLocationOnRegister(amd64.RegDX, runtimeValueTypeI64)
290
291
292 if tc.x1Reg != asm.NilRegister {
293 compiler.assembler.CompileConstToRegister(amd64.MOVQ, int64(x1Value), tc.x1Reg)
294 compiler.pushRuntimeValueLocationOnRegister(tc.x1Reg, runtimeValueTypeI64)
295 } else {
296 loc := compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack()
297 loc.valueType = runtimeValueTypeI64
298 env.stack()[loc.stackPointer] = uint64(x1Value)
299 }
300 if tc.x2Reg != asm.NilRegister {
301 compiler.assembler.CompileConstToRegister(amd64.MOVQ, int64(x2Value), tc.x2Reg)
302 compiler.pushRuntimeValueLocationOnRegister(tc.x2Reg, runtimeValueTypeI64)
303 } else {
304 loc := compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack()
305 loc.valueType = runtimeValueTypeI64
306 env.stack()[loc.stackPointer] = uint64(x2Value)
307 }
308
309 switch kind {
310 case wazeroir.OperationKindDiv:
311 err = compiler.compileDiv(operationPtr(wazeroir.NewOperationDiv(wazeroir.SignedTypeInt64)))
312 case wazeroir.OperationKindMul:
313 err = compiler.compileMul(operationPtr(wazeroir.NewOperationMul(wazeroir.UnsignedTypeI64)))
314 case wazeroir.OperationKindRem:
315 err = compiler.compileRem(operationPtr(wazeroir.NewOperationRem(wazeroir.SignedUint64)))
316 }
317 require.NoError(t, err)
318
319 require.Equal(t, registerTypeGeneralPurpose, compiler.runtimeValueLocationStack().peek().getRegisterType())
320 requireRuntimeLocationStackPointerEqual(t, uint64(2), compiler)
321 require.Equal(t, 1, len(compiler.runtimeValueLocationStack().usedRegisters.list()))
322
323 require.True(t, prevOnDX.onStack())
324
325
326
327
328 err = compiler.compileAdd(operationPtr(wazeroir.NewOperationAdd(wazeroir.UnsignedTypeI64)))
329 require.NoError(t, err)
330 require.NoError(t, compiler.compileReturnFunction())
331
332 code := asm.CodeSegment{}
333 defer func() { require.NoError(t, code.Unmap()) }()
334
335
336 _, err = compiler.compile(code.NextCodeSection())
337 require.NoError(t, err)
338
339
340 env.exec(code.Bytes())
341
342
343 switch kind {
344 case wazeroir.OperationKindDiv:
345 require.Equal(t, uint64(1), env.stackPointer())
346 require.Equal(t, uint64(x1Value/x2Value)+dxValue, env.stackTopAsUint64())
347 case wazeroir.OperationKindMul:
348 require.Equal(t, uint64(1), env.stackPointer())
349 require.Equal(t, uint64(x1Value*x2Value)+dxValue, env.stackTopAsUint64())
350 case wazeroir.OperationKindRem:
351 require.Equal(t, uint64(1), env.stackPointer())
352 require.Equal(t, x1Value%x2Value+dxValue, env.stackTopAsUint64())
353 }
354 })
355 }
356 })
357 })
358 }
359 }
360
361 func TestAmd64Compiler_readInstructionAddress(t *testing.T) {
362 t.Run("invalid", func(t *testing.T) {
363 env := newCompilerEnvironment()
364 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newAmd64Compiler, nil).(*amd64Compiler)
365
366 err := compiler.compilePreamble()
367 require.NoError(t, err)
368
369
370 compiler.assembler.CompileReadInstructionAddress(amd64.RegAX, amd64.JMP)
371
372 code := asm.CodeSegment{}
373 defer func() { require.NoError(t, code.Unmap()) }()
374
375
376
377 _, err = compiler.compile(code.NextCodeSection())
378 require.Error(t, err)
379 })
380
381 t.Run("ok", func(t *testing.T) {
382 env := newCompilerEnvironment()
383 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newAmd64Compiler, nil).(*amd64Compiler)
384
385 err := compiler.compilePreamble()
386 require.NoError(t, err)
387
388 const destinationRegister = amd64.RegAX
389
390
391 compiler.assembler.CompileReadInstructionAddress(destinationRegister, amd64.RET)
392
393
394
395 compiler.assembler.CompileJumpToRegister(amd64.JMP, destinationRegister)
396
397 compiler.assembler.CompileStandAlone(amd64.RET)
398
399
400
401
402 const expectedReturnValue uint32 = 10000
403 err = compiler.compileConstI32(operationPtr(wazeroir.NewOperationConstI32(expectedReturnValue)))
404 require.NoError(t, err)
405
406 err = compiler.compileReturnFunction()
407 require.NoError(t, err)
408
409 code := asm.CodeSegment{}
410 defer func() { require.NoError(t, code.Unmap()) }()
411
412
413 _, err = compiler.compile(code.NextCodeSection())
414 require.NoError(t, err)
415
416
417 env.exec(code.Bytes())
418
419 require.Equal(t, nativeCallStatusCodeReturned, env.compilerStatus())
420 require.Equal(t, uint64(1), env.stackPointer())
421 require.Equal(t, expectedReturnValue, env.stackTopAsUint32())
422 })
423 }
424
425 func TestAmd64Compiler_preventCrossedTargetdRegisters(t *testing.T) {
426 env := newCompilerEnvironment()
427 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newAmd64Compiler, nil).(*amd64Compiler)
428
429 tests := []struct {
430 initial []*runtimeValueLocation
431 desired, expected []asm.Register
432 }{
433 {
434 initial: []*runtimeValueLocation{{register: amd64.RegAX}, {register: amd64.RegCX}, {register: amd64.RegDX}},
435 desired: []asm.Register{amd64.RegDX, amd64.RegCX, amd64.RegAX},
436 expected: []asm.Register{amd64.RegDX, amd64.RegCX, amd64.RegAX},
437 },
438 {
439 initial: []*runtimeValueLocation{{register: amd64.RegAX}, {register: amd64.RegCX}, {register: amd64.RegDX}},
440 desired: []asm.Register{amd64.RegDX, amd64.RegAX, amd64.RegCX},
441 expected: []asm.Register{amd64.RegDX, amd64.RegAX, amd64.RegCX},
442 },
443 {
444 initial: []*runtimeValueLocation{{register: amd64.RegR8}, {register: amd64.RegR9}, {register: amd64.RegR10}},
445 desired: []asm.Register{amd64.RegR8, amd64.RegR9, amd64.RegR10},
446 expected: []asm.Register{amd64.RegR8, amd64.RegR9, amd64.RegR10},
447 },
448 {
449 initial: []*runtimeValueLocation{{register: amd64.RegBX}, {register: amd64.RegDX}, {register: amd64.RegCX}},
450 desired: []asm.Register{amd64.RegR8, amd64.RegR9, amd64.RegR10},
451 expected: []asm.Register{amd64.RegBX, amd64.RegDX, amd64.RegCX},
452 },
453 {
454 initial: []*runtimeValueLocation{{register: amd64.RegR8}, {register: amd64.RegR9}, {register: amd64.RegR10}},
455 desired: []asm.Register{amd64.RegAX, amd64.RegCX, amd64.RegR9},
456 expected: []asm.Register{amd64.RegR8, amd64.RegR10, amd64.RegR9},
457 },
458 }
459
460 for _, tt := range tests {
461 initialRegisters := collectRegistersFromRuntimeValues(tt.initial)
462 restoreCrossing := compiler.compilePreventCrossedTargetRegisters(tt.initial, tt.desired)
463
464 require.Equal(t, tt.expected, collectRegistersFromRuntimeValues(tt.initial))
465 restoreCrossing()
466
467 require.Equal(t, initialRegisters, collectRegistersFromRuntimeValues(tt.initial))
468 }
469 }
470
471
472 type mockCpuFlags struct {
473 flags uint64
474 extraFlags uint64
475 }
476
477
478 func (f *mockCpuFlags) Has(flag uint64) bool {
479 return (f.flags & flag) != 0
480 }
481
482
483 func (f *mockCpuFlags) HasExtra(flag uint64) bool {
484 return (f.extraFlags & flag) != 0
485 }
486
487
488
489
490 func TestAmd64Compiler_ensureClz_ABM(t *testing.T) {
491 tests := []struct {
492 name string
493 cpuFeatures platform.CpuFeatureFlags
494 expectedCode string
495 }{
496 {
497 name: "with ABM",
498 expectedCode: "b80a000000f3480fbdc0",
499 cpuFeatures: &mockCpuFlags{
500 flags: 0,
501 extraFlags: platform.CpuExtraFeatureABM,
502 },
503 },
504 {
505 name: "without ABM",
506 expectedCode: "b80a0000004885c07507b840000000eb08480fbdc04883f03f",
507 cpuFeatures: &mockCpuFlags{
508 flags: 0,
509 extraFlags: 0,
510 },
511 },
512 }
513 for _, tt := range tests {
514 t.Run(tt.name, func(t *testing.T) {
515 env := newCompilerEnvironment()
516
517 newCompiler := func() compiler {
518 c := newCompiler().(*amd64Compiler)
519
520 c.cpuFeatures = tt.cpuFeatures
521 return c
522 }
523
524 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, nil)
525
526 err := compiler.compileConstI32(operationPtr(wazeroir.NewOperationConstI32(10)))
527 require.NoError(t, err)
528
529 err = compiler.compileClz(operationPtr(wazeroir.NewOperationClz(wazeroir.UnsignedInt64)))
530 require.NoError(t, err)
531
532 compiler.compileNOP()
533
534 code := asm.CodeSegment{}
535 defer func() { require.NoError(t, code.Unmap()) }()
536
537 buf := code.NextCodeSection()
538 _, err = compiler.compile(buf)
539 require.NoError(t, err)
540 require.Equal(t, tt.expectedCode, hex.EncodeToString(buf.Bytes()))
541 })
542 }
543 }
544
545
546
547
548 func TestAmd64Compiler_ensureCtz_ABM(t *testing.T) {
549 tests := []struct {
550 name string
551 cpuFeatures platform.CpuFeatureFlags
552 expectedCode string
553 }{
554 {
555 name: "with ABM",
556 expectedCode: "b80a000000f3480fbcc0",
557 cpuFeatures: &mockCpuFlags{
558 flags: 0,
559 extraFlags: platform.CpuExtraFeatureABM,
560 },
561 },
562 {
563 name: "without ABM",
564 expectedCode: "b80a0000004885c07507b840000000eb05f3480fbcc0",
565 cpuFeatures: &mockCpuFlags{
566 flags: 0,
567 extraFlags: 0,
568 },
569 },
570 }
571 for _, tt := range tests {
572 t.Run(tt.name, func(t *testing.T) {
573 env := newCompilerEnvironment()
574
575 newCompiler := func() compiler {
576 c := newCompiler().(*amd64Compiler)
577
578 c.cpuFeatures = tt.cpuFeatures
579 return c
580 }
581
582 compiler := env.requireNewCompiler(t, &wasm.FunctionType{}, newCompiler, nil)
583
584 err := compiler.compileConstI32(operationPtr(wazeroir.NewOperationConstI32(10)))
585 require.NoError(t, err)
586
587 err = compiler.compileCtz(operationPtr(wazeroir.NewOperationCtz(wazeroir.UnsignedInt64)))
588 require.NoError(t, err)
589
590 compiler.compileNOP()
591
592 code := asm.CodeSegment{}
593 defer func() { require.NoError(t, code.Unmap()) }()
594
595 buf := code.NextCodeSection()
596 _, err = compiler.compile(buf)
597 require.NoError(t, err)
598 require.Equal(t, tt.expectedCode, hex.EncodeToString(buf.Bytes()))
599 })
600 }
601 }
602
603
604 func collectRegistersFromRuntimeValues(locs []*runtimeValueLocation) []asm.Register {
605 out := make([]asm.Register, len(locs))
606 for i := range locs {
607 out[i] = locs[i].register
608 }
609 return out
610 }
611
612
613 func (c *amd64Compiler) setStackPointerCeil(v uint64) {
614 c.stackPointerCeil = v
615 }
616
617
618 func (c *amd64Compiler) setRuntimeValueLocationStack(s *runtimeValueLocationStack) {
619 c.locationStack = s
620 }
621
622 func TestAmd64Compiler_label(t *testing.T) {
623 c := &amd64Compiler{}
624 c.label(wazeroir.NewLabel(wazeroir.LabelKindContinuation, 100))
625 require.Equal(t, 100, c.frameIDMax)
626 require.Equal(t, 101, len(c.labels[wazeroir.LabelKindContinuation]))
627
628
629 c.label(wazeroir.NewLabel(wazeroir.LabelKindHeader, 2))
630 require.Equal(t, 100, c.frameIDMax)
631 require.Equal(t, 3, len(c.labels[wazeroir.LabelKindHeader]))
632 }
633
634 func TestAmd64Compiler_Init(t *testing.T) {
635 c := &amd64Compiler{
636 locationStackForEntrypoint: newRuntimeValueLocationStack(),
637 assembler: amd64.NewAssembler(),
638 }
639 const stackCap = 12345
640 c.locationStackForEntrypoint.stack = make([]runtimeValueLocation, stackCap)
641 c.locationStackForEntrypoint.sp = 5555
642
643 c.Init(&wasm.FunctionType{}, nil, false)
644
645
646 require.Equal(t, c.locationStack, &c.locationStackForEntrypoint)
647
648 require.Equal(t, stackCap, cap(c.locationStack.stack))
649 require.Equal(t, stackCap, cap(c.locationStackForEntrypoint.stack))
650 }
651
652 func TestAmd64Compiler_resetLabels(t *testing.T) {
653 c := newAmd64Compiler().(*amd64Compiler)
654 nop := c.compileNOP()
655
656 const (
657 frameIDMax = 50
658 capacity = 12345
659 )
660 c.frameIDMax = frameIDMax
661 for i := range c.labels {
662 ifs := make([]amd64LabelInfo, frameIDMax*2)
663 c.labels[i] = ifs
664 for j := 0; j <= frameIDMax; j++ {
665 ifs[j].stackInitialized = true
666 ifs[j].initialInstruction = nop
667 ifs[j].initialStack = newRuntimeValueLocationStack()
668 ifs[j].initialStack.sp = 5555
669 ifs[j].initialStack.stack = make([]runtimeValueLocation, 0, capacity)
670 }
671 }
672 c.resetLabels()
673 for i := range c.labels {
674 for j := 0; j < len(c.labels[i]); j++ {
675 l := &c.labels[i][j]
676 require.False(t, l.stackInitialized)
677 require.Nil(t, l.initialInstruction)
678 require.Equal(t, 0, len(l.initialStack.stack))
679 if j > frameIDMax {
680 require.Equal(t, 0, cap(l.initialStack.stack))
681 } else {
682 require.Equal(t, capacity, cap(l.initialStack.stack))
683 }
684 require.Equal(t, uint64(0), l.initialStack.sp)
685 }
686 }
687 }
688
689 func TestAmd64Compiler_getSavedTemporaryLocationStack(t *testing.T) {
690 t.Run("len(brTableTmp)<len(current)", func(t *testing.T) {
691 st := newRuntimeValueLocationStack()
692 c := &amd64Compiler{locationStack: &st}
693
694 c.locationStack.sp = 3
695 c.locationStack.stack = []runtimeValueLocation{{stackPointer: 150}, {stackPointer: 200}, {stackPointer: 300}}
696
697 actual := c.getSavedTemporaryLocationStack()
698 require.Equal(t, uint64(3), actual.sp)
699 require.Equal(t, 3, len(actual.stack))
700 require.Equal(t, c.locationStack.stack[:3], actual.stack)
701 })
702 t.Run("len(brTableTmp)==len(current)", func(t *testing.T) {
703 st := newRuntimeValueLocationStack()
704 c := &amd64Compiler{locationStack: &st, brTableTmp: make([]runtimeValueLocation, 3)}
705 initSlicePtr := &c.brTableTmp
706
707 c.locationStack.sp = 3
708 c.locationStack.stack = []runtimeValueLocation{{stackPointer: 150}, {stackPointer: 200}, {stackPointer: 300}}
709
710 actual := c.getSavedTemporaryLocationStack()
711 require.Equal(t, uint64(3), actual.sp)
712 require.Equal(t, 3, len(actual.stack))
713 require.Equal(t, c.locationStack.stack[:3], actual.stack)
714
715 require.Equal(t, initSlicePtr, &c.brTableTmp)
716 })
717
718 t.Run("len(brTableTmp)>len(current)", func(t *testing.T) {
719 const temporarySliceSize = 100
720 st := newRuntimeValueLocationStack()
721 c := &amd64Compiler{locationStack: &st, brTableTmp: make([]runtimeValueLocation, temporarySliceSize)}
722
723 c.locationStack.sp = 3
724 c.locationStack.stack = []runtimeValueLocation{
725 {stackPointer: 150},
726 {stackPointer: 200},
727 {stackPointer: 300},
728 {},
729 {},
730 {},
731 {},
732 {stackPointer: 1231455},
733 }
734
735 actual := c.getSavedTemporaryLocationStack()
736 require.Equal(t, uint64(3), actual.sp)
737 require.Equal(t, temporarySliceSize, len(actual.stack))
738 require.Equal(t, c.locationStack.stack[:3], actual.stack[:3])
739 for i := int(actual.sp); i < len(actual.stack); i++ {
740
741 require.Zero(t, actual.stack[i].stackPointer)
742 }
743 })
744 }
745
View as plain text