1
2
3
18
19 package encoder
20
21 import (
22 `fmt`
23 `reflect`
24 `strconv`
25 `unsafe`
26
27 `github.com/bytedance/sonic/internal/cpu`
28 `github.com/bytedance/sonic/internal/jit`
29 `github.com/bytedance/sonic/internal/native/types`
30 `github.com/twitchyliquid64/golang-asm/obj`
31 `github.com/twitchyliquid64/golang-asm/obj/x86`
32
33 `github.com/bytedance/sonic/internal/native`
34 `github.com/bytedance/sonic/internal/rt`
35 )
36
37
55
56
67
68 const (
69 _S_cond = iota
70 _S_init
71 )
72
73 const (
74 _FP_args = 32
75 _FP_fargs = 40
76 _FP_saves = 64
77 _FP_locals = 24
78 )
79
80 const (
81 _FP_loffs = _FP_fargs + _FP_saves
82 _FP_offs = _FP_loffs + _FP_locals
83
84 _FP_size = _FP_offs + 8
85 _FP_base = _FP_size + 8
86 )
87
88 const (
89 _FM_exp32 = 0x7f800000
90 _FM_exp64 = 0x7ff0000000000000
91 )
92
93 const (
94 _IM_null = 0x6c6c756e
95 _IM_true = 0x65757274
96 _IM_fals = 0x736c6166
97 _IM_open = 0x00225c22
98 _IM_array = 0x5d5b
99 _IM_object = 0x7d7b
100 _IM_mulv = -0x5555555555555555
101 )
102
103 const (
104 _LB_more_space = "_more_space"
105 _LB_more_space_return = "_more_space_return_"
106 )
107
108 const (
109 _LB_error = "_error"
110 _LB_error_too_deep = "_error_too_deep"
111 _LB_error_invalid_number = "_error_invalid_number"
112 _LB_error_nan_or_infinite = "_error_nan_or_infinite"
113 _LB_panic = "_panic"
114 )
115
116 var (
117 _AX = jit.Reg("AX")
118 _BX = jit.Reg("BX")
119 _CX = jit.Reg("CX")
120 _DX = jit.Reg("DX")
121 _DI = jit.Reg("DI")
122 _SI = jit.Reg("SI")
123 _BP = jit.Reg("BP")
124 _SP = jit.Reg("SP")
125 _R8 = jit.Reg("R8")
126 _R9 = jit.Reg("R9")
127 )
128
129 var (
130 _X0 = jit.Reg("X0")
131 _X15 = jit.Reg("X15")
132 _Y0 = jit.Reg("Y0")
133 )
134
135 var (
136 _ST = jit.Reg("R15")
137 _RP = jit.Reg("DI")
138 _RL = jit.Reg("SI")
139 _RC = jit.Reg("DX")
140 )
141
142 var (
143 _LR = jit.Reg("R9")
144 _ET = jit.Reg("AX")
145 _EP = jit.Reg("BX")
146 )
147
148 var (
149 _SP_p = jit.Reg("R10")
150 _SP_q = jit.Reg("R11")
151 _SP_x = jit.Reg("R12")
152 _SP_f = jit.Reg("R13")
153 )
154
155 var (
156 _ARG_rb = jit.Ptr(_SP, _FP_base)
157 _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
158 _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
159 _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
160 )
161
162 var (
163 _RET_et = _ET
164 _RET_ep = _EP
165 )
166
167 var (
168 _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
169 _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
170 _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
171 )
172
173 var (
174 _REG_ffi = []obj.Addr{ _RP, _RL, _RC, _SP_q}
175 _REG_b64 = []obj.Addr{_SP_p, _SP_q}
176
177 _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
178 _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
179 _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
180 )
181
182 type _Assembler struct {
183 jit.BaseAssembler
184 p _Program
185 x int
186 name string
187 }
188
189 func newAssembler(p _Program) *_Assembler {
190 return new(_Assembler).Init(p)
191 }
192
193
194
195 func (self *_Assembler) Load() _Encoder {
196 return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
197 }
198
199 func (self *_Assembler) Init(p _Program) *_Assembler {
200 self.p = p
201 self.BaseAssembler.Init(self.compile)
202 return self
203 }
204
205 func (self *_Assembler) compile() {
206 self.prologue()
207 self.instrs()
208 self.epilogue()
209 self.builtins()
210 }
211
212
213
214 var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
215 _OP_null : (*_Assembler)._asm_OP_null,
216 _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
217 _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
218 _OP_bool : (*_Assembler)._asm_OP_bool,
219 _OP_i8 : (*_Assembler)._asm_OP_i8,
220 _OP_i16 : (*_Assembler)._asm_OP_i16,
221 _OP_i32 : (*_Assembler)._asm_OP_i32,
222 _OP_i64 : (*_Assembler)._asm_OP_i64,
223 _OP_u8 : (*_Assembler)._asm_OP_u8,
224 _OP_u16 : (*_Assembler)._asm_OP_u16,
225 _OP_u32 : (*_Assembler)._asm_OP_u32,
226 _OP_u64 : (*_Assembler)._asm_OP_u64,
227 _OP_f32 : (*_Assembler)._asm_OP_f32,
228 _OP_f64 : (*_Assembler)._asm_OP_f64,
229 _OP_str : (*_Assembler)._asm_OP_str,
230 _OP_bin : (*_Assembler)._asm_OP_bin,
231 _OP_quote : (*_Assembler)._asm_OP_quote,
232 _OP_number : (*_Assembler)._asm_OP_number,
233 _OP_eface : (*_Assembler)._asm_OP_eface,
234 _OP_iface : (*_Assembler)._asm_OP_iface,
235 _OP_byte : (*_Assembler)._asm_OP_byte,
236 _OP_text : (*_Assembler)._asm_OP_text,
237 _OP_deref : (*_Assembler)._asm_OP_deref,
238 _OP_index : (*_Assembler)._asm_OP_index,
239 _OP_load : (*_Assembler)._asm_OP_load,
240 _OP_save : (*_Assembler)._asm_OP_save,
241 _OP_drop : (*_Assembler)._asm_OP_drop,
242 _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
243 _OP_recurse : (*_Assembler)._asm_OP_recurse,
244 _OP_is_nil : (*_Assembler)._asm_OP_is_nil,
245 _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
246 _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
247 _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
248 _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
249 _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
250 _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
251 _OP_goto : (*_Assembler)._asm_OP_goto,
252 _OP_map_iter : (*_Assembler)._asm_OP_map_iter,
253 _OP_map_stop : (*_Assembler)._asm_OP_map_stop,
254 _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
255 _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
256 _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
257 _OP_slice_len : (*_Assembler)._asm_OP_slice_len,
258 _OP_slice_next : (*_Assembler)._asm_OP_slice_next,
259 _OP_marshal : (*_Assembler)._asm_OP_marshal,
260 _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
261 _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
262 _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
263 _OP_cond_set : (*_Assembler)._asm_OP_cond_set,
264 _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
265 }
266
267 func (self *_Assembler) instr(v *_Instr) {
268 if fn := _OpFuncTab[v.op()]; fn != nil {
269 fn(self, v)
270 } else {
271 panic(fmt.Sprintf("invalid opcode: %d", v.op()))
272 }
273 }
274
275 func (self *_Assembler) instrs() {
276 for i, v := range self.p {
277 self.Mark(i)
278 self.instr(&v)
279 self.debug_instr(i, &v)
280 }
281 }
282
283 func (self *_Assembler) builtins() {
284 self.more_space()
285 self.error_too_deep()
286 self.error_invalid_number()
287 self.error_nan_or_infinite()
288 self.go_panic()
289 }
290
291 func (self *_Assembler) epilogue() {
292 self.Mark(len(self.p))
293 self.Emit("XORL", _ET, _ET)
294 self.Emit("XORL", _EP, _EP)
295 self.Link(_LB_error)
296 self.Emit("MOVQ", _ARG_rb, _CX)
297 self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))
298 self.Emit("MOVQ", jit.Imm(0), _ARG_rb)
299 self.Emit("MOVQ", jit.Imm(0), _ARG_vp)
300 self.Emit("MOVQ", jit.Imm(0), _ARG_sb)
301 self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)
302 self.Emit("ADDQ", jit.Imm(_FP_size), _SP)
303 self.Emit("RET")
304 }
305
306 func (self *_Assembler) prologue() {
307 self.Emit("SUBQ", jit.Imm(_FP_size), _SP)
308 self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))
309 self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)
310 self.Emit("MOVQ", _AX, _ARG_rb)
311 self.Emit("MOVQ", _BX, _ARG_vp)
312 self.Emit("MOVQ", _CX, _ARG_sb)
313 self.Emit("MOVQ", _DI, _ARG_fv)
314 self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)
315 self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)
316 self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)
317 self.Emit("MOVQ", _BX, _SP_p)
318 self.Emit("MOVQ", _CX, _ST)
319 self.Emit("XORL", _SP_x, _SP_x)
320 self.Emit("XORL", _SP_f, _SP_f)
321 self.Emit("XORL", _SP_q, _SP_q)
322 }
323
324
325
326 func (self *_Assembler) xsave(reg ...obj.Addr) {
327 for i, v := range reg {
328 if i > _FP_saves / 8 - 1 {
329 panic("too many registers to save")
330 } else {
331 self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
332 }
333 }
334 }
335
336 func (self *_Assembler) xload(reg ...obj.Addr) {
337 for i, v := range reg {
338 if i > _FP_saves / 8 - 1 {
339 panic("too many registers to load")
340 } else {
341 self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
342 }
343 }
344 }
345
346 func (self *_Assembler) rbuf_di() {
347 if _RP.Reg != x86.REG_DI {
348 panic("register allocation messed up: RP != DI")
349 } else {
350 self.Emit("ADDQ", _RL, _RP)
351 }
352 }
353
354 func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
355 self.check_size(nd)
356 self.save_c()
357 self.rbuf_di()
358 self.Emit(ins, jit.Ptr(_SP_p, 0), _SI)
359 self.call_c(fn)
360 self.Emit("ADDQ", _AX, _RL)
361 }
362
363 func (self *_Assembler) store_str(s string) {
364 i := 0
365 m := rt.Str2Mem(s)
366
367
368 for i <= len(m) - 8 {
369 self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)
370 self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i)))
371 i += 8
372 }
373
374
375 if i <= len(m) - 4 {
376 self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))
377 i += 4
378 }
379
380
381 if i <= len(m) - 2 {
382 self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))
383 i += 2
384 }
385
386
387 if i < len(m) {
388 self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i)))
389 }
390 }
391
392 func (self *_Assembler) check_size(n int) {
393 self.check_size_rl(jit.Ptr(_RL, int64(n)))
394 }
395
396 func (self *_Assembler) check_size_r(r obj.Addr, d int) {
397 self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
398 }
399
400 func (self *_Assembler) check_size_rl(v obj.Addr) {
401 idx := self.x
402 key := _LB_more_space_return + strconv.Itoa(idx)
403
404
405 if _LR.Reg != x86.REG_R9 {
406 panic("register allocation messed up: LR != R9")
407 }
408
409
410 self.x++
411 self.Emit("LEAQ", v, _AX)
412 self.Emit("CMPQ", _AX, _RC)
413 self.Sjmp("JBE" , key)
414 self.slice_grow_ax(key)
415 self.Link(key)
416 }
417
418 func (self *_Assembler) slice_grow_ax(ret string) {
419 self.Byte(0x4c, 0x8d, 0x0d)
420 self.Sref(ret, 4)
421 self.Sjmp("JMP" , _LB_more_space)
422 }
423
424
425
426 const (
427 _StateSize = int64(unsafe.Sizeof(_State{}))
428 _StackLimit = _MaxStack * _StateSize
429 )
430
431 func (self *_Assembler) save_state() {
432 self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)
433 self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9)
434 self.Emit("CMPQ", _R9, jit.Imm(_StackLimit))
435 self.Sjmp("JAE" , _LB_error_too_deep)
436 self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))
437 self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16))
438 self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24))
439 self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32))
440 self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0))
441 }
442
443 func (self *_Assembler) drop_state(decr int64) {
444 self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)
445 self.Emit("SUBQ" , jit.Imm(decr), _AX)
446 self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))
447 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x)
448 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f)
449 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p)
450 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q)
451 self.Emit("PXOR" , _X0, _X0)
452 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))
453 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))
454 }
455
456
457
458 func (self *_Assembler) add_char(ch byte) {
459 self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))
460 self.Emit("ADDQ", jit.Imm(1), _RL)
461 }
462
463 func (self *_Assembler) add_long(ch uint32, n int64) {
464 self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))
465 self.Emit("ADDQ", jit.Imm(n), _RL)
466 }
467
468 func (self *_Assembler) add_text(ss string) {
469 self.store_str(ss)
470 self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL)
471 }
472
473
474 func (self *_Assembler) prep_buffer_AX() {
475 self.Emit("MOVQ", _ARG_rb, _AX)
476 self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))
477 }
478
479 func (self *_Assembler) save_buffer() {
480 self.Emit("MOVQ", _ARG_rb, _CX)
481 self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0))
482 self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))
483 self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16))
484 }
485
486
487 func (self *_Assembler) load_buffer_AX() {
488 self.Emit("MOVQ", _ARG_rb, _AX)
489 self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)
490 self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)
491 self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)
492 }
493
494
495
496 func (self *_Assembler) call(pc obj.Addr) {
497 self.Emit("MOVQ", pc, _LR)
498 self.Rjmp("CALL", _LR)
499 }
500
501 func (self *_Assembler) save_c() {
502 self.xsave(_REG_ffi...)
503 }
504
505 func (self *_Assembler) call_b64(pc obj.Addr) {
506 self.xsave(_REG_b64...)
507 self.call(pc)
508 self.xload(_REG_b64...)
509 }
510
511 func (self *_Assembler) call_c(pc obj.Addr) {
512 self.Emit("XCHGQ", _SP_p, _BX)
513 self.call(pc)
514 self.xload(_REG_ffi...)
515 self.Emit("XCHGQ", _SP_p, _BX)
516 self.Emit("XORPS", _X15, _X15)
517 }
518
519 func (self *_Assembler) call_go(pc obj.Addr) {
520 self.xsave(_REG_all...)
521 self.call(pc)
522 self.xload(_REG_all...)
523 }
524
525 func (self *_Assembler) call_more_space(pc obj.Addr) {
526 self.xsave(_REG_ms...)
527 self.call(pc)
528 self.xload(_REG_ms...)
529 }
530
531 func (self *_Assembler) call_encoder(pc obj.Addr) {
532 self.xsave(_REG_enc...)
533 self.call(pc)
534 self.xload(_REG_enc...)
535 }
536
537 func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
538 switch vt.Kind() {
539 case reflect.Interface : self.call_marshaler_i(fn, it)
540 case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true)
541
542 default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
543 }
544 }
545
546 func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
547 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
548 self.Emit("TESTQ", _AX, _AX)
549 self.Sjmp("JZ" , "_null_{n}")
550 self.Emit("MOVQ" , _AX, _BX)
551 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)
552 self.Emit("MOVQ" , jit.Gtype(it), _AX)
553 self.call_go(_F_assertI2I)
554 self.Emit("TESTQ", _AX, _AX)
555 self.Sjmp("JZ" , "_null_{n}")
556 self.Emit("MOVQ", _BX, _CX)
557 self.Emit("MOVQ", _AX, _BX)
558 self.prep_buffer_AX()
559 self.Emit("MOVQ", _ARG_fv, _DI)
560 self.call_go(fn)
561 self.Emit("TESTQ", _ET, _ET)
562 self.Sjmp("JNZ" , _LB_error)
563 self.load_buffer_AX()
564 self.Sjmp("JMP" , "_done_{n}")
565 self.Link("_null_{n}")
566 self.check_size(4)
567 self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))
568 self.Emit("ADDQ", jit.Imm(4), _RL)
569 self.Link("_done_{n}")
570 }
571
572 func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
573 self.prep_buffer_AX()
574 self.Emit("MOVQ", jit.Itab(it, vt), _BX)
575
576
577 if !deref {
578 self.Emit("MOVQ", _SP_p, _CX)
579 } else {
580 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX)
581 }
582
583
584 self.Emit("MOVQ", _ARG_fv, _DI)
585 self.call_go(fn)
586 self.Emit("TESTQ", _ET, _ET)
587 self.Sjmp("JNZ" , _LB_error)
588 self.load_buffer_AX()
589 }
590
591
592
593 var (
594 _T_byte = jit.Type(byteType)
595 _F_growslice = jit.Func(growslice)
596 )
597
598
599 func (self *_Assembler) more_space() {
600 self.Link(_LB_more_space)
601 self.Emit("MOVQ", _RP, _BX)
602 self.Emit("MOVQ", _RL, _CX)
603 self.Emit("MOVQ", _RC, _DI)
604 self.Emit("MOVQ", _AX, _SI)
605 self.Emit("MOVQ", _T_byte, _AX)
606 self.call_more_space(_F_growslice)
607 self.Emit("MOVQ", _AX, _RP)
608 self.Emit("MOVQ", _BX, _RL)
609 self.Emit("MOVQ", _CX, _RC)
610 self.save_buffer()
611 self.Rjmp("JMP" , _LR)
612 }
613
614
615
616 var (
617 _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
618 _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
619 _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
620 )
621
622 func (self *_Assembler) error_too_deep() {
623 self.Link(_LB_error_too_deep)
624 self.Emit("MOVQ", _V_ERR_too_deep, _EP)
625 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
626 self.Sjmp("JMP" , _LB_error)
627 }
628
629 func (self *_Assembler) error_invalid_number() {
630 self.Link(_LB_error_invalid_number)
631 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)
632 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX)
633 self.call_go(_F_error_number)
634 self.Sjmp("JMP" , _LB_error)
635 }
636
637 func (self *_Assembler) error_nan_or_infinite() {
638 self.Link(_LB_error_nan_or_infinite)
639 self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)
640 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
641 self.Sjmp("JMP" , _LB_error)
642 }
643
644
645
646 var (
647 _F_quote = jit.Imm(int64(native.S_quote))
648 _F_panic = jit.Func(goPanic)
649 )
650
651 func (self *_Assembler) go_panic() {
652 self.Link(_LB_panic)
653 self.Emit("MOVQ", _SP_p, _BX)
654 self.call_go(_F_panic)
655 }
656
657 func (self *_Assembler) encode_string(doubleQuote bool) {
658 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)
659 self.Emit("TESTQ", _AX, _AX)
660 self.Sjmp("JZ" , "_str_empty_{n}")
661 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
662 self.Sjmp("JNE" , "_str_next_{n}")
663 self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
664 self.Sjmp("JMP", _LB_panic)
665 self.Link("_str_next_{n}")
666
667
668 if !doubleQuote {
669 self.check_size_r(_AX, 2)
670 self.add_char('"')
671 } else {
672 self.check_size_r(_AX, 6)
673 self.add_long(_IM_open, 3)
674 }
675
676
677 self.Emit("XORL", _AX, _AX)
678 self.Emit("MOVQ", _AX, _VAR_sp)
679 self.Link("_str_loop_{n}")
680 self.save_c()
681
682
684 self.Emit("MOVQ", _RC, _CX)
685 self.Emit("SUBQ", _RL, _CX)
686 self.Emit("MOVQ", _CX, _VAR_dn)
687 self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX)
688 self.Emit("LEAQ", _VAR_dn, _CX)
689 self.Emit("MOVQ", _VAR_sp, _AX)
690 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)
691 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)
692 self.Emit("ADDQ", _AX, _DI)
693 self.Emit("SUBQ", _AX, _SI)
694
695
696 if !doubleQuote {
697 self.Emit("XORL", _R8, _R8)
698 } else {
699 self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)
700 }
701
702
703 self.call_c(_F_quote)
704 self.Emit("ADDQ" , _VAR_dn, _RL)
705
706 self.Emit("TESTQ", _AX, _AX)
707 self.Sjmp("JS" , "_str_space_{n}")
708
709
710 if !doubleQuote {
711 self.check_size(1)
712 self.add_char('"')
713 self.Sjmp("JMP", "_str_end_{n}")
714 } else {
715 self.check_size(3)
716 self.add_text("\\\"\"")
717 self.Sjmp("JMP", "_str_end_{n}")
718 }
719
720
721 self.Link("_str_space_{n}")
722 self.Emit("NOTQ", _AX)
723 self.Emit("ADDQ", _AX, _VAR_sp)
724 self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX)
725 self.slice_grow_ax("_str_loop_{n}")
726
727
728 if !doubleQuote {
729 self.Link("_str_empty_{n}")
730 self.check_size(2)
731 self.add_text("\"\"")
732 self.Link("_str_end_{n}")
733 } else {
734 self.Link("_str_empty_{n}")
735 self.check_size(6)
736 self.add_text("\"\\\"\\\"\"")
737 self.Link("_str_end_{n}")
738 }
739 }
740
741
742
743 var (
744 _T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
745 _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
746 )
747
748 var (
749 _F_f64toa = jit.Imm(int64(native.S_f64toa))
750 _F_f32toa = jit.Imm(int64(native.S_f32toa))
751 _F_i64toa = jit.Imm(int64(native.S_i64toa))
752 _F_u64toa = jit.Imm(int64(native.S_u64toa))
753 _F_b64encode = jit.Imm(int64(_subr__b64encode))
754 )
755
756 var (
757 _F_memmove = jit.Func(memmove)
758 _F_error_number = jit.Func(error_number)
759 _F_isValidNumber = jit.Func(isValidNumber)
760 )
761
762 var (
763 _F_iteratorStop = jit.Func(iteratorStop)
764 _F_iteratorNext = jit.Func(iteratorNext)
765 _F_iteratorStart = jit.Func(iteratorStart)
766 )
767
768 var (
769 _F_encodeTypedPointer obj.Addr
770 _F_encodeJsonMarshaler obj.Addr
771 _F_encodeTextMarshaler obj.Addr
772 )
773
774 const (
775 _MODE_AVX2 = 1 << 2
776 )
777
778 func init() {
779 _F_encodeTypedPointer = jit.Func(encodeTypedPointer)
780 _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
781 _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
782 }
783
784 func (self *_Assembler) _asm_OP_null(_ *_Instr) {
785 self.check_size(4)
786 self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))
787 self.Emit("ADDQ", jit.Imm(4), _RL)
788 }
789
790 func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
791 self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
792 self.Sjmp("JC", "_empty_arr_{n}")
793 self._asm_OP_null(nil)
794 self.Sjmp("JMP", "_empty_arr_end_{n}")
795 self.Link("_empty_arr_{n}")
796 self.check_size(2)
797 self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
798 self.Emit("ADDQ", jit.Imm(2), _RL)
799 self.Link("_empty_arr_end_{n}")
800 }
801
802 func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
803 self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
804 self.Sjmp("JC", "_empty_obj_{n}")
805 self._asm_OP_null(nil)
806 self.Sjmp("JMP", "_empty_obj_end_{n}")
807 self.Link("_empty_obj_{n}")
808 self.check_size(2)
809 self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
810 self.Emit("ADDQ", jit.Imm(2), _RL)
811 self.Link("_empty_obj_end_{n}")
812 }
813
814 func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
815 self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))
816 self.Sjmp("JE" , "_false_{n}")
817 self.check_size(4)
818 self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0))
819 self.Emit("ADDQ", jit.Imm(4), _RL)
820 self.Sjmp("JMP" , "_end_{n}")
821 self.Link("_false_{n}")
822 self.check_size(5)
823 self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0))
824 self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))
825 self.Emit("ADDQ", jit.Imm(5), _RL)
826 self.Link("_end_{n}")
827 }
828
829 func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
830 self.store_int(4, _F_i64toa, "MOVBQSX")
831 }
832
833 func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
834 self.store_int(6, _F_i64toa, "MOVWQSX")
835 }
836
837 func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
838 self.store_int(17, _F_i64toa, "MOVLQSX")
839 }
840
841 func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
842 self.store_int(21, _F_i64toa, "MOVQ")
843 }
844
845 func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
846 self.store_int(3, _F_u64toa, "MOVBQZX")
847 }
848
849 func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
850 self.store_int(5, _F_u64toa, "MOVWQZX")
851 }
852
853 func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
854 self.store_int(16, _F_u64toa, "MOVLQZX")
855 }
856
857 func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
858 self.store_int(20, _F_u64toa, "MOVQ")
859 }
860
861 func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
862 self.check_size(32)
863 self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX)
864 self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX)
865 self.Emit("XORL" , jit.Imm(_FM_exp32), _AX)
866 self.Sjmp("JZ" , _LB_error_nan_or_infinite)
867 self.save_c()
868 self.rbuf_di()
869 self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0)
870 self.call_c(_F_f32toa)
871 self.Emit("ADDQ" , _AX, _RL)
872 }
873
874 func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
875 self.check_size(32)
876 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
877 self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX)
878 self.Emit("ANDQ" , _CX, _AX)
879 self.Emit("XORQ" , _CX, _AX)
880 self.Sjmp("JZ" , _LB_error_nan_or_infinite)
881 self.save_c()
882 self.rbuf_di()
883 self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0)
884 self.call_c(_F_f64toa)
885 self.Emit("ADDQ" , _AX, _RL)
886 }
887
888 func (self *_Assembler) _asm_OP_str(_ *_Instr) {
889 self.encode_string(false)
890 }
891
892 func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
893 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)
894 self.Emit("ADDQ", jit.Imm(2), _AX)
895 self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)
896 self.Emit("MOVQ", _DX, _BX)
897 self.From("MULQ", _CX)
898 self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX)
899 self.Emit("ORQ" , jit.Imm(2), _AX)
900 self.Emit("MOVQ", _BX, _DX)
901 self.check_size_r(_AX, 0)
902 self.add_char('"')
903 self.Emit("MOVQ", _ARG_rb, _DI)
904 self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8))
905 self.Emit("MOVQ", _SP_p, _SI)
906
907
908 if !cpu.HasAVX2 {
909 self.Emit("XORL", _DX, _DX)
910 } else {
911 self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX)
912 }
913
914
915 self.call_b64(_F_b64encode)
916 self.load_buffer_AX()
917 self.add_char('"')
918 }
919
920 func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
921 self.encode_string(true)
922 }
923
924 func (self *_Assembler) _asm_OP_number(_ *_Instr) {
925 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX)
926 self.Emit("TESTQ", _BX, _BX)
927 self.Sjmp("JZ" , "_empty_{n}")
928 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
929 self.Emit("TESTQ", _AX, _AX)
930 self.Sjmp("JNZ" , "_number_next_{n}")
931 self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
932 self.Sjmp("JMP", _LB_panic)
933 self.Link("_number_next_{n}")
934 self.call_go(_F_isValidNumber)
935 self.Emit("CMPB" , _AX, jit.Imm(0))
936 self.Sjmp("JE" , _LB_error_invalid_number)
937 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX)
938 self.check_size_r(_BX, 0)
939 self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX)
940 self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL)
941 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX)
942 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX)
943 self.call_go(_F_memmove)
944 self.Emit("MOVQ", _ARG_rb, _AX)
945 self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))
946 self.Sjmp("JMP" , "_done_{n}")
947 self.Link("_empty_{n}")
948 self.check_size(1)
949 self.add_char('0')
950 self.Link("_done_{n}")
951 }
952
953 func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
954 self.prep_buffer_AX()
955 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX)
956 self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX)
957 self.Emit("MOVQ" , _ST, _DI)
958 self.Emit("MOVQ" , _ARG_fv, _SI)
959 self.call_encoder(_F_encodeTypedPointer)
960 self.Emit("TESTQ", _ET, _ET)
961 self.Sjmp("JNZ" , _LB_error)
962 self.load_buffer_AX()
963 }
964
965 func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
966 self.prep_buffer_AX()
967 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX)
968 self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX)
969 self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX)
970 self.Emit("MOVQ" , _ST, _DI)
971 self.Emit("MOVQ" , _ARG_fv, _SI)
972 self.call_encoder(_F_encodeTypedPointer)
973 self.Emit("TESTQ", _ET, _ET)
974 self.Sjmp("JNZ" , _LB_error)
975 self.load_buffer_AX()
976 }
977
978 func (self *_Assembler) _asm_OP_byte(p *_Instr) {
979 self.check_size(1)
980 self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0))
981 self.Emit("ADDQ", jit.Imm(1), _RL)
982 }
983
984 func (self *_Assembler) _asm_OP_text(p *_Instr) {
985 self.check_size(len(p.vs()))
986 self.add_text(p.vs())
987 }
988
989 func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
990 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)
991 }
992
993 func (self *_Assembler) _asm_OP_index(p *_Instr) {
994 self.Emit("MOVQ", jit.Imm(p.i64()), _AX)
995 self.Emit("ADDQ", _AX, _SP_p)
996 }
997
998 func (self *_Assembler) _asm_OP_load(_ *_Instr) {
999 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1000 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x)
1001 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)
1002 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)
1003 }
1004
1005 func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1006 self.save_state()
1007 }
1008
1009 func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1010 self.drop_state(_StateSize)
1011 }
1012
1013 func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1014 self.drop_state(_StateSize * 2)
1015 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56))
1016 }
1017
1018 func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1019 self.prep_buffer_AX()
1020 vt, pv := p.vp()
1021 self.Emit("MOVQ", jit.Type(vt), _BX)
1022
1023
1024 if !rt.UnpackType(vt).Indirect() {
1025 self.Emit("MOVQ", _SP_p, _CX)
1026 } else {
1027 self.Emit("MOVQ", _SP_p, _VAR_vp)
1028 self.Emit("LEAQ", _VAR_vp, _CX)
1029 }
1030
1031
1032 self.Emit("MOVQ" , _ST, _DI)
1033 self.Emit("MOVQ" , _ARG_fv, _SI)
1034 if pv {
1035 self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI)
1036 }
1037 self.call_encoder(_F_encodeTypedPointer)
1038 self.Emit("TESTQ", _ET, _ET)
1039 self.Sjmp("JNZ" , _LB_error)
1040 self.load_buffer_AX()
1041 }
1042
1043 func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
1044 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
1045 self.Xjmp("JE" , p.vi())
1046 }
1047
1048 func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
1049 self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0))
1050 self.Xjmp("JE" , p.vi())
1051 }
1052
1053 func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
1054 self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))
1055 self.Xjmp("JE" , p.vi())
1056 }
1057
1058 func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
1059 self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0))
1060 self.Xjmp("JE" , p.vi())
1061 }
1062
1063 func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
1064 self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0))
1065 self.Xjmp("JE" , p.vi())
1066 }
1067
1068 func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
1069 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
1070 self.Xjmp("JE" , p.vi())
1071 }
1072
1073 func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
1074 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
1075 self.Emit("TESTQ", _AX, _AX)
1076 self.Xjmp("JZ" , p.vi())
1077 self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0))
1078 self.Xjmp("JE" , p.vi())
1079 }
1080
1081 func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1082 self.Xjmp("JMP", p.vi())
1083 }
1084
1085 func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
1086 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1087 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX)
1088 self.Emit("MOVQ" , _ARG_fv, _CX)
1089 self.call_go(_F_iteratorStart)
1090 self.Emit("MOVQ" , _AX, _SP_q)
1091 self.Emit("MOVQ" , _BX, _ET)
1092 self.Emit("MOVQ" , _CX, _EP)
1093 self.Emit("TESTQ", _ET, _ET)
1094 self.Sjmp("JNZ" , _LB_error)
1095 }
1096
1097 func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
1098 self.Emit("MOVQ", _SP_q, _AX)
1099 self.call_go(_F_iteratorStop)
1100 self.Emit("XORL", _SP_q, _SP_q)
1101 }
1102
1103 func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
1104 self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p)
1105 self.Emit("TESTQ", _SP_p, _SP_p)
1106 self.Xjmp("JZ" , p.vi())
1107 }
1108
1109 func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
1110 self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv)
1111 self.Sjmp("JNC", "_unordered_key_{n}")
1112 self.encode_string(false)
1113 self.Xjmp("JMP", p.vi())
1114 self.Link("_unordered_key_{n}")
1115 }
1116
1117 func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
1118 self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p)
1119 self.Emit("MOVQ", _SP_q, _AX)
1120 self.call_go(_F_iteratorNext)
1121 }
1122
1123 func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
1124 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x)
1125 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p)
1126 self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f)
1127 }
1128
1129 func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
1130 self.Emit("TESTQ" , _SP_x, _SP_x)
1131 self.Xjmp("JZ" , p.vi())
1132 self.Emit("SUBQ" , jit.Imm(1), _SP_x)
1133 self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f)
1134 self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX)
1135 self.Emit("CMOVQCC", _AX, _SP_p)
1136 }
1137
1138 func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
1139 self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
1140 }
1141
1142 func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
1143 if p.vk() != reflect.Ptr {
1144 panic("marshal_p: invalid type")
1145 } else {
1146 self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
1147 }
1148 }
1149
1150 func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
1151 self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
1152 }
1153
1154 func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
1155 if p.vk() != reflect.Ptr {
1156 panic("marshal_text_p: invalid type")
1157 } else {
1158 self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
1159 }
1160 }
1161
1162 func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
1163 self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f)
1164 }
1165
1166 func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
1167 self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f)
1168 self.Xjmp("JC" , p.vi())
1169 }
1170
1171 func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1172 self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX)
1173 self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX)
1174 self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
1175 self.call_go(_F_println)
1176 }
1177
View as plain text