1
2
3
18
19 package decoder
20
21 import (
22 `encoding/json`
23 `fmt`
24 `math`
25 `reflect`
26 `unsafe`
27
28 `github.com/bytedance/sonic/internal/caching`
29 `github.com/bytedance/sonic/internal/jit`
30 `github.com/bytedance/sonic/internal/native`
31 `github.com/bytedance/sonic/internal/native/types`
32 `github.com/bytedance/sonic/internal/rt`
33 `github.com/twitchyliquid64/golang-asm/obj`
34 )
35
36
51
52
66
67 const (
68 _FP_args = 72
69 _FP_fargs = 80
70 _FP_saves = 48
71 _FP_locals = 144
72 )
73
74 const (
75 _FP_offs = _FP_fargs + _FP_saves + _FP_locals
76 _FP_size = _FP_offs + 8
77 _FP_base = _FP_size + 8
78 )
79
80 const (
81 _IM_null = 0x6c6c756e
82 _IM_true = 0x65757274
83 _IM_alse = 0x65736c61
84 )
85
86 const (
87 _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
88 )
89
90 const (
91 _MODE_JSON = 1 << 3
92 )
93
94 const (
95 _LB_error = "_error"
96 _LB_im_error = "_im_error"
97 _LB_eof_error = "_eof_error"
98 _LB_type_error = "_type_error"
99 _LB_field_error = "_field_error"
100 _LB_range_error = "_range_error"
101 _LB_stack_error = "_stack_error"
102 _LB_base64_error = "_base64_error"
103 _LB_unquote_error = "_unquote_error"
104 _LB_parsing_error = "_parsing_error"
105 _LB_parsing_error_v = "_parsing_error_v"
106 _LB_mismatch_error = "_mismatch_error"
107 )
108
109 const (
110 _LB_char_0_error = "_char_0_error"
111 _LB_char_1_error = "_char_1_error"
112 _LB_char_2_error = "_char_2_error"
113 _LB_char_3_error = "_char_3_error"
114 _LB_char_4_error = "_char_4_error"
115 _LB_char_m2_error = "_char_m2_error"
116 _LB_char_m3_error = "_char_m3_error"
117 )
118
119 const (
120 _LB_skip_one = "_skip_one"
121 _LB_skip_key_value = "_skip_key_value"
122 )
123
124 var (
125 _AX = jit.Reg("AX")
126 _BX = jit.Reg("BX")
127 _CX = jit.Reg("CX")
128 _DX = jit.Reg("DX")
129 _DI = jit.Reg("DI")
130 _SI = jit.Reg("SI")
131 _BP = jit.Reg("BP")
132 _SP = jit.Reg("SP")
133 _R8 = jit.Reg("R8")
134 _R9 = jit.Reg("R9")
135 _X0 = jit.Reg("X0")
136 _X1 = jit.Reg("X1")
137 _X15 = jit.Reg("X15")
138 )
139
140 var (
141 _IP = jit.Reg("R10")
142 _IC = jit.Reg("R11")
143 _IL = jit.Reg("R12")
144 _ST = jit.Reg("R13")
145 _VP = jit.Reg("R15")
146 )
147
148 var (
149 _DF = jit.Reg("AX")
150 _ET = jit.Reg("AX")
151 _EP = jit.Reg("BX")
152 )
153
154
155
156 var (
157 _ARG_s = _ARG_sp
158 _ARG_sp = jit.Ptr(_SP, _FP_base + 0)
159 _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
160 _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
161 _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
162 _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
163 _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
164 )
165
166 var (
167 _ARG_sv = _ARG_sv_p
168 _ARG_sv_p = jit.Ptr(_SP, _FP_base + 48)
169 _ARG_sv_n = jit.Ptr(_SP, _FP_base + 56)
170 _ARG_vk = jit.Ptr(_SP, _FP_base + 64)
171 )
172
173 var (
174 _VAR_st = _VAR_st_Vt
175 _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
176 )
177
178 var (
179 _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
180 _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
181 _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
182 _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
183 _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
184 _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
185 )
186
187 var (
188 _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
189 _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
190 _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
191 _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
192 _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
193 )
194
195 var (
196 _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
197 _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
198 _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
199 )
200
201 var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
202
203 var (
204 _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120)
205 _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128)
206 _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136)
207 )
208
209 type _Assembler struct {
210 jit.BaseAssembler
211 p _Program
212 name string
213 }
214
215 func newAssembler(p _Program) *_Assembler {
216 return new(_Assembler).Init(p)
217 }
218
219
220
221 func (self *_Assembler) Load() _Decoder {
222 return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
223 }
224
225 func (self *_Assembler) Init(p _Program) *_Assembler {
226 self.p = p
227 self.BaseAssembler.Init(self.compile)
228 return self
229 }
230
231 func (self *_Assembler) compile() {
232 self.prologue()
233 self.instrs()
234 self.epilogue()
235 self.copy_string()
236 self.escape_string()
237 self.escape_string_twice()
238 self.skip_one()
239 self.skip_key_value()
240 self.type_error()
241 self.mismatch_error()
242 self.field_error()
243 self.range_error()
244 self.stack_error()
245 self.base64_error()
246 self.parsing_error()
247 }
248
249
250
251 var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
252 _OP_any : (*_Assembler)._asm_OP_any,
253 _OP_dyn : (*_Assembler)._asm_OP_dyn,
254 _OP_str : (*_Assembler)._asm_OP_str,
255 _OP_bin : (*_Assembler)._asm_OP_bin,
256 _OP_bool : (*_Assembler)._asm_OP_bool,
257 _OP_num : (*_Assembler)._asm_OP_num,
258 _OP_i8 : (*_Assembler)._asm_OP_i8,
259 _OP_i16 : (*_Assembler)._asm_OP_i16,
260 _OP_i32 : (*_Assembler)._asm_OP_i32,
261 _OP_i64 : (*_Assembler)._asm_OP_i64,
262 _OP_u8 : (*_Assembler)._asm_OP_u8,
263 _OP_u16 : (*_Assembler)._asm_OP_u16,
264 _OP_u32 : (*_Assembler)._asm_OP_u32,
265 _OP_u64 : (*_Assembler)._asm_OP_u64,
266 _OP_f32 : (*_Assembler)._asm_OP_f32,
267 _OP_f64 : (*_Assembler)._asm_OP_f64,
268 _OP_unquote : (*_Assembler)._asm_OP_unquote,
269 _OP_nil_1 : (*_Assembler)._asm_OP_nil_1,
270 _OP_nil_2 : (*_Assembler)._asm_OP_nil_2,
271 _OP_nil_3 : (*_Assembler)._asm_OP_nil_3,
272 _OP_deref : (*_Assembler)._asm_OP_deref,
273 _OP_index : (*_Assembler)._asm_OP_index,
274 _OP_is_null : (*_Assembler)._asm_OP_is_null,
275 _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,
276 _OP_map_init : (*_Assembler)._asm_OP_map_init,
277 _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,
278 _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,
279 _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,
280 _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,
281 _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,
282 _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,
283 _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,
284 _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,
285 _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,
286 _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,
287 _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,
288 _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,
289 _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,
290 _OP_array_skip : (*_Assembler)._asm_OP_array_skip,
291 _OP_array_clear : (*_Assembler)._asm_OP_array_clear,
292 _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
293 _OP_slice_init : (*_Assembler)._asm_OP_slice_init,
294 _OP_slice_append : (*_Assembler)._asm_OP_slice_append,
295 _OP_object_skip : (*_Assembler)._asm_OP_object_skip,
296 _OP_object_next : (*_Assembler)._asm_OP_object_next,
297 _OP_struct_field : (*_Assembler)._asm_OP_struct_field,
298 _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
299 _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,
300 _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,
301 _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
302 _OP_lspace : (*_Assembler)._asm_OP_lspace,
303 _OP_match_char : (*_Assembler)._asm_OP_match_char,
304 _OP_check_char : (*_Assembler)._asm_OP_check_char,
305 _OP_load : (*_Assembler)._asm_OP_load,
306 _OP_save : (*_Assembler)._asm_OP_save,
307 _OP_drop : (*_Assembler)._asm_OP_drop,
308 _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
309 _OP_recurse : (*_Assembler)._asm_OP_recurse,
310 _OP_goto : (*_Assembler)._asm_OP_goto,
311 _OP_switch : (*_Assembler)._asm_OP_switch,
312 _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
313 _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
314 _OP_go_skip : (*_Assembler)._asm_OP_go_skip,
315 _OP_add : (*_Assembler)._asm_OP_add,
316 _OP_check_empty : (*_Assembler)._asm_OP_check_empty,
317 _OP_debug : (*_Assembler)._asm_OP_debug,
318 }
319
320 func (self *_Assembler) _asm_OP_debug(_ *_Instr) {
321 self.Byte(0xcc)
322 }
323
324 func (self *_Assembler) instr(v *_Instr) {
325 if fn := _OpFuncTab[v.op()]; fn != nil {
326 fn(self, v)
327 } else {
328 panic(fmt.Sprintf("invalid opcode: %d", v.op()))
329 }
330 }
331
332 func (self *_Assembler) instrs() {
333 for i, v := range self.p {
334 self.Mark(i)
335 self.instr(&v)
336 self.debug_instr(i, &v)
337 }
338 }
339
340 func (self *_Assembler) epilogue() {
341 self.Mark(len(self.p))
342 self.Emit("XORL", _EP, _EP)
343 self.Emit("MOVQ", _VAR_et, _ET)
344 self.Emit("TESTQ", _ET, _ET)
345 self.Sjmp("JNZ", _LB_mismatch_error)
346 self.Link(_LB_error)
347 self.Emit("MOVQ", _EP, _CX)
348 self.Emit("MOVQ", _ET, _BX)
349 self.Emit("MOVQ", _IC, _AX)
350 self.Emit("MOVQ", jit.Imm(0), _ARG_sp)
351 self.Emit("MOVQ", jit.Imm(0), _ARG_vp)
352 self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)
353 self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
354 self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)
355 self.Emit("ADDQ", jit.Imm(_FP_size), _SP)
356 self.Emit("RET")
357 }
358
359 func (self *_Assembler) prologue() {
360 self.Emit("SUBQ", jit.Imm(_FP_size), _SP)
361 self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))
362 self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)
363 self.Emit("MOVQ", _AX, _ARG_sp)
364 self.Emit("MOVQ", _AX, _IP)
365 self.Emit("MOVQ", _BX, _ARG_sl)
366 self.Emit("MOVQ", _BX, _IL)
367 self.Emit("MOVQ", _CX, _ARG_ic)
368 self.Emit("MOVQ", _CX, _IC)
369 self.Emit("MOVQ", _DI, _ARG_vp)
370 self.Emit("MOVQ", _DI, _VP)
371 self.Emit("MOVQ", _SI, _ARG_sb)
372 self.Emit("MOVQ", _SI, _ST)
373 self.Emit("MOVQ", _R8, _ARG_fv)
374 self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)
375 self.Emit("MOVQ", jit.Imm(0), _ARG_sv_n)
376 self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
377 self.Emit("MOVQ", jit.Imm(0), _VAR_et)
378
379 self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc)
380 self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)
381 self.Emit("MOVQ", _AX, _VAR_st_Db)
382 }
383
384
385
386 var (
387 _REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
388 _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL }
389 )
390
391 func (self *_Assembler) save(r ...obj.Addr) {
392 for i, v := range r {
393 if i > _FP_saves / 8 - 1 {
394 panic("too many registers to save")
395 } else {
396 self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
397 }
398 }
399 }
400
401 func (self *_Assembler) load(r ...obj.Addr) {
402 for i, v := range r {
403 if i > _FP_saves / 8 - 1 {
404 panic("too many registers to load")
405 } else {
406 self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
407 }
408 }
409 }
410
411 func (self *_Assembler) call(fn obj.Addr) {
412 self.Emit("MOVQ", fn, _R9)
413 self.Rjmp("CALL", _R9)
414 }
415
416 func (self *_Assembler) call_go(fn obj.Addr) {
417 self.save(_REG_go...)
418 self.call(fn)
419 self.load(_REG_go...)
420 }
421
422 func (self *_Assembler) callc(fn obj.Addr) {
423 self.save(_IP)
424 self.call(fn)
425 self.Emit("XORPS", _X15, _X15)
426 self.load(_IP)
427 }
428
429 func (self *_Assembler) call_c(fn obj.Addr) {
430 self.Emit("XCHGQ", _IC, _BX)
431 self.callc(fn)
432 self.Emit("XCHGQ", _IC, _BX)
433 }
434
435 func (self *_Assembler) call_sf(fn obj.Addr) {
436 self.Emit("LEAQ", _ARG_s, _DI)
437 self.Emit("MOVQ", _IC, _ARG_ic)
438 self.Emit("LEAQ", _ARG_ic, _SI)
439 self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX)
440 self.Emit("MOVQ", _ARG_fv, _CX)
441 self.callc(fn)
442 self.Emit("MOVQ", _ARG_ic, _IC)
443 }
444
445 func (self *_Assembler) call_vf(fn obj.Addr) {
446 self.Emit("LEAQ", _ARG_s, _DI)
447 self.Emit("MOVQ", _IC, _ARG_ic)
448 self.Emit("LEAQ", _ARG_ic, _SI)
449 self.Emit("LEAQ", _VAR_st, _DX)
450 self.callc(fn)
451 self.Emit("MOVQ", _ARG_ic, _IC)
452 }
453
454
455
456 var (
457 _F_convT64 = jit.Func(convT64)
458 _F_error_wrap = jit.Func(error_wrap)
459 _F_error_type = jit.Func(error_type)
460 _F_error_field = jit.Func(error_field)
461 _F_error_value = jit.Func(error_value)
462 _F_error_mismatch = jit.Func(error_mismatch)
463 )
464
465 var (
466 _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))
467 _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))
468 _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))
469 _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))
470 _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))
471 _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))
472 _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
473 )
474
475 var (
476 _T_error = rt.UnpackType(errorType)
477 _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
478 )
479
480 var (
481 _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
482 _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
483 _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
484 )
485
486 func (self *_Assembler) type_error() {
487 self.Link(_LB_type_error)
488 self.call_go(_F_error_type)
489 self.Sjmp("JMP" , _LB_error)
490 }
491
492 func (self *_Assembler) mismatch_error() {
493 self.Link(_LB_mismatch_error)
494 self.Emit("MOVQ", _VAR_et, _ET)
495 self.Emit("MOVQ", _VAR_ic, _EP)
496 self.Emit("MOVQ", _I_json_MismatchTypeError, _CX)
497 self.Emit("CMPQ", _ET, _CX)
498 self.Sjmp("JE" , _LB_error)
499 self.Emit("MOVQ", _ARG_sp, _AX)
500 self.Emit("MOVQ", _ARG_sl, _BX)
501 self.Emit("MOVQ", _VAR_ic, _CX)
502 self.Emit("MOVQ", _VAR_et, _DI)
503 self.call_go(_F_error_mismatch)
504 self.Sjmp("JMP" , _LB_error)
505 }
506
507 func (self *_Assembler) field_error() {
508 self.Link(_LB_field_error)
509 self.Emit("MOVQ", _ARG_sv_p, _AX)
510 self.Emit("MOVQ", _ARG_sv_n, _BX)
511 self.call_go(_F_error_field)
512 self.Sjmp("JMP" , _LB_error)
513 }
514
515 func (self *_Assembler) range_error() {
516 self.Link(_LB_range_error)
517 self.Emit("MOVQ", _ET, _CX)
518 self.slice_from(_VAR_st_Ep, 0)
519 self.Emit("MOVQ", _DI, _AX)
520 self.Emit("MOVQ", _EP, _DI)
521 self.Emit("MOVQ", _SI, _BX)
522 self.call_go(_F_error_value)
523 self.Sjmp("JMP" , _LB_error)
524 }
525
526 func (self *_Assembler) stack_error() {
527 self.Link(_LB_stack_error)
528 self.Emit("MOVQ", _V_stackOverflow, _EP)
529 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
530 self.Sjmp("JMP" , _LB_error)
531 }
532
533 func (self *_Assembler) base64_error() {
534 self.Link(_LB_base64_error)
535 self.Emit("NEGQ", _AX)
536 self.Emit("SUBQ", jit.Imm(1), _AX)
537 self.call_go(_F_convT64)
538 self.Emit("MOVQ", _AX, _EP)
539 self.Emit("MOVQ", _I_base64_CorruptInputError, _ET)
540 self.Sjmp("JMP" , _LB_error)
541 }
542
543 func (self *_Assembler) parsing_error() {
544 self.Link(_LB_eof_error)
545 self.Emit("MOVQ" , _IL, _IC)
546 self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP)
547 self.Sjmp("JMP" , _LB_parsing_error)
548 self.Link(_LB_unquote_error)
549 self.Emit("SUBQ" , _VAR_sr, _SI)
550 self.Emit("SUBQ" , _SI, _IC)
551 self.Link(_LB_parsing_error_v)
552 self.Emit("MOVQ" , _AX, _EP)
553 self.Emit("NEGQ" , _EP)
554 self.Sjmp("JMP" , _LB_parsing_error)
555 self.Link(_LB_char_m3_error)
556 self.Emit("SUBQ" , jit.Imm(1), _IC)
557 self.Link(_LB_char_m2_error)
558 self.Emit("SUBQ" , jit.Imm(2), _IC)
559 self.Sjmp("JMP" , _LB_char_0_error)
560 self.Link(_LB_im_error)
561 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0))
562 self.Sjmp("JNE" , _LB_char_0_error)
563 self.Emit("SHRL" , jit.Imm(8), _CX)
564 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1))
565 self.Sjmp("JNE" , _LB_char_1_error)
566 self.Emit("SHRL" , jit.Imm(8), _CX)
567 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2))
568 self.Sjmp("JNE" , _LB_char_2_error)
569 self.Sjmp("JMP" , _LB_char_3_error)
570 self.Link(_LB_char_4_error)
571 self.Emit("ADDQ" , jit.Imm(1), _IC)
572 self.Link(_LB_char_3_error)
573 self.Emit("ADDQ" , jit.Imm(1), _IC)
574 self.Link(_LB_char_2_error)
575 self.Emit("ADDQ" , jit.Imm(1), _IC)
576 self.Link(_LB_char_1_error)
577 self.Emit("ADDQ" , jit.Imm(1), _IC)
578 self.Link(_LB_char_0_error)
579 self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP)
580 self.Link(_LB_parsing_error)
581 self.Emit("MOVQ" , _EP, _DI)
582 self.Emit("MOVQ", _ARG_sp, _AX)
583 self.Emit("MOVQ", _ARG_sl, _BX)
584 self.Emit("MOVQ" , _IC, _CX)
585 self.call_go(_F_error_wrap)
586 self.Sjmp("JMP" , _LB_error)
587 }
588
589 func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
590 self.Emit("MOVQ", _IC, _VAR_ic)
591 self.Emit("MOVQ", jit.Type(p.vt()), _ET)
592 self.Emit("MOVQ", _ET, _VAR_et)
593 }
594
595 func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
596 self.Byte(0x4c, 0x8d, 0x0d)
597 self.Xref(p.vi(), 4)
598
599 self.Emit("MOVQ", _R9, _VAR_pc)
600 self.Sjmp("JMP" , _LB_skip_one)
601 }
602
603 func (self *_Assembler) skip_one() {
604 self.Link(_LB_skip_one)
605 self.Emit("MOVQ", _VAR_ic, _IC)
606 self.call_sf(_F_skip_one)
607 self.Emit("TESTQ", _AX, _AX)
608 self.Sjmp("JS" , _LB_parsing_error_v)
609 self.Emit("MOVQ" , _VAR_pc, _R9)
610 self.Rjmp("JMP" , _R9)
611 }
612
613 func (self *_Assembler) skip_key_value() {
614 self.Link(_LB_skip_key_value)
615
616 self.Emit("MOVQ", _VAR_ic, _IC)
617 self.call_sf(_F_skip_one)
618 self.Emit("TESTQ", _AX, _AX)
619 self.Sjmp("JS" , _LB_parsing_error_v)
620
621 self.lspace("_global_1")
622 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
623 self.Sjmp("JNE" , _LB_parsing_error_v)
624 self.Emit("ADDQ", jit.Imm(1), _IC)
625 self.lspace("_global_2")
626
627 self.call_sf(_F_skip_one)
628 self.Emit("TESTQ", _AX, _AX)
629 self.Sjmp("JS" , _LB_parsing_error_v)
630
631 self.Emit("MOVQ" , _VAR_pc, _R9)
632 self.Rjmp("JMP" , _R9)
633 }
634
635
636
637
638 var (
639 _T_byte = jit.Type(byteType)
640 _F_mallocgc = jit.Func(mallocgc)
641 )
642
643 func (self *_Assembler) malloc_AX(nb obj.Addr, ret obj.Addr) {
644 self.Emit("MOVQ", nb, _AX)
645 self.Emit("MOVQ", _T_byte, _BX)
646 self.Emit("XORL", _CX, _CX)
647 self.call_go(_F_mallocgc)
648 self.Emit("MOVQ", _AX, ret)
649 }
650
651 func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
652 self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)
653 self.Emit("MOVQ", jit.Type(vt), _BX)
654 self.Emit("MOVB", jit.Imm(1), _CX)
655 self.call_go(_F_mallocgc)
656 self.Emit("MOVQ", _AX, ret)
657 }
658
659 func (self *_Assembler) valloc_AX(vt reflect.Type) {
660 self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)
661 self.Emit("MOVQ", jit.Type(vt), _BX)
662 self.Emit("MOVB", jit.Imm(1), _CX)
663 self.call_go(_F_mallocgc)
664 }
665
666 func (self *_Assembler) vfollow(vt reflect.Type) {
667 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
668 self.Emit("TESTQ", _AX, _AX)
669 self.Sjmp("JNZ" , "_end_{n}")
670 self.valloc_AX(vt)
671 self.WritePtrAX(1, jit.Ptr(_VP, 0), true)
672 self.Link("_end_{n}")
673 self.Emit("MOVQ" , _AX, _VP)
674 }
675
676
677
678 var (
679 _F_vstring = jit.Imm(int64(native.S_vstring))
680 _F_vnumber = jit.Imm(int64(native.S_vnumber))
681 _F_vsigned = jit.Imm(int64(native.S_vsigned))
682 _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
683 )
684
685 func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
686 self.Emit("MOVQ" , _VAR_st_Vt, _AX)
687 self.Emit("TESTQ", _AX, _AX)
688
689 if vt != nil {
690 self.Sjmp("JNS" , "_check_err_{n}")
691 self.Emit("MOVQ", jit.Type(vt), _ET)
692 self.Emit("MOVQ", _ET, _VAR_et)
693 if pin2 != -1 {
694 self.Emit("SUBQ", jit.Imm(1), _BX)
695 self.Emit("MOVQ", _BX, _VAR_ic)
696 self.Byte(0x4c , 0x8d, 0x0d)
697 self.Xref(pin2, 4)
698 self.Emit("MOVQ", _R9, _VAR_pc)
699 self.Sjmp("JMP" , _LB_skip_key_value)
700 } else {
701 self.Emit("MOVQ", _BX, _VAR_ic)
702 self.Byte(0x4c , 0x8d, 0x0d)
703 self.Sref(pin, 4)
704 self.Emit("MOVQ", _R9, _VAR_pc)
705 self.Sjmp("JMP" , _LB_skip_one)
706 }
707 self.Link("_check_err_{n}")
708 } else {
709 self.Sjmp("JS" , _LB_parsing_error_v)
710 }
711 }
712
713 func (self *_Assembler) check_eof(d int64) {
714 if d == 1 {
715 self.Emit("CMPQ", _IC, _IL)
716 self.Sjmp("JAE" , _LB_eof_error)
717 } else {
718 self.Emit("LEAQ", jit.Ptr(_IC, d), _AX)
719 self.Emit("CMPQ", _AX, _IL)
720 self.Sjmp("JA" , _LB_eof_error)
721 }
722 }
723
724
725 func (self *_Assembler) parse_string() {
726 self.Emit("MOVQ", _ARG_fv, _CX)
727 self.call_vf(_F_vstring)
728 self.check_err(nil, "", -1)
729 }
730
731 func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
732 self.Emit("MOVQ", _IC, _BX)
733 self.call_vf(_F_vnumber)
734 self.check_err(vt, pin, pin2)
735 }
736
737 func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
738 self.Emit("MOVQ", _IC, _BX)
739 self.call_vf(_F_vsigned)
740 self.check_err(vt, pin, pin2)
741 }
742
743 func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
744 self.Emit("MOVQ", _IC, _BX)
745 self.call_vf(_F_vunsigned)
746 self.check_err(vt, pin, pin2)
747 }
748
749
750 func (self *_Assembler) copy_string() {
751 self.Link("_copy_string")
752 self.Emit("MOVQ", _DI, _VAR_bs_p)
753 self.Emit("MOVQ", _SI, _VAR_bs_n)
754 self.Emit("MOVQ", _R9, _VAR_bs_LR)
755 self.malloc_AX(_SI, _ARG_sv_p)
756 self.Emit("MOVQ", _VAR_bs_p, _BX)
757 self.Emit("MOVQ", _VAR_bs_n, _CX)
758 self.call_go(_F_memmove)
759 self.Emit("MOVQ", _ARG_sv_p, _DI)
760 self.Emit("MOVQ", _VAR_bs_n, _SI)
761 self.Emit("MOVQ", _VAR_bs_LR, _R9)
762 self.Rjmp("JMP", _R9)
763 }
764
765
766 func (self *_Assembler) escape_string() {
767 self.Link("_escape_string")
768 self.Emit("MOVQ" , _DI, _VAR_bs_p)
769 self.Emit("MOVQ" , _SI, _VAR_bs_n)
770 self.Emit("MOVQ" , _R9, _VAR_bs_LR)
771 self.malloc_AX(_SI, _DX)
772 self.Emit("MOVQ" , _DX, _ARG_sv_p)
773 self.Emit("MOVQ" , _VAR_bs_p, _DI)
774 self.Emit("MOVQ" , _VAR_bs_n, _SI)
775 self.Emit("LEAQ" , _VAR_sr, _CX)
776 self.Emit("XORL" , _R8, _R8)
777 self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv)
778 self.Emit("SETCC", _R8)
779 self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)
780 self.call_c(_F_unquote)
781 self.Emit("MOVQ" , _VAR_bs_n, _SI)
782 self.Emit("ADDQ" , jit.Imm(1), _SI)
783 self.Emit("TESTQ", _AX, _AX)
784 self.Sjmp("JS" , _LB_unquote_error)
785 self.Emit("MOVQ" , _AX, _SI)
786 self.Emit("MOVQ" , _ARG_sv_p, _DI)
787 self.Emit("MOVQ" , _VAR_bs_LR, _R9)
788 self.Rjmp("JMP", _R9)
789 }
790
791 func (self *_Assembler) escape_string_twice() {
792 self.Link("_escape_string_twice")
793 self.Emit("MOVQ" , _DI, _VAR_bs_p)
794 self.Emit("MOVQ" , _SI, _VAR_bs_n)
795 self.Emit("MOVQ" , _R9, _VAR_bs_LR)
796 self.malloc_AX(_SI, _DX)
797 self.Emit("MOVQ" , _DX, _ARG_sv_p)
798 self.Emit("MOVQ" , _VAR_bs_p, _DI)
799 self.Emit("MOVQ" , _VAR_bs_n, _SI)
800 self.Emit("LEAQ" , _VAR_sr, _CX)
801 self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)
802 self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv)
803 self.Emit("XORL" , _AX, _AX)
804 self.Emit("SETCC", _AX)
805 self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX)
806 self.Emit("ORQ" , _AX, _R8)
807 self.call_c(_F_unquote)
808 self.Emit("MOVQ" , _VAR_bs_n, _SI)
809 self.Emit("ADDQ" , jit.Imm(3), _SI)
810 self.Emit("TESTQ", _AX, _AX)
811 self.Sjmp("JS" , _LB_unquote_error)
812 self.Emit("MOVQ" , _AX, _SI)
813 self.Emit("MOVQ" , _ARG_sv_p, _DI)
814 self.Emit("MOVQ" , _VAR_bs_LR, _R9)
815 self.Rjmp("JMP", _R9)
816 }
817
818
819
820 var (
821 _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
822 _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
823 )
824
825 var (
826 _Vp_max_f32 = new(float32)
827 _Vp_min_f32 = new(float32)
828 )
829
830 func init() {
831 *_Vp_max_f32 = math.MaxFloat32
832 *_Vp_min_f32 = -math.MaxFloat32
833 }
834
835 func (self *_Assembler) range_single_X0() {
836 self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)
837 self.Emit("MOVQ" , _V_max_f32, _CX)
838 self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET)
839 self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP)
840 self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0)
841 self.Sjmp("JA" , _LB_range_error)
842 self.Emit("MOVQ" , _V_min_f32, _CX)
843 self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0)
844 self.Sjmp("JB" , _LB_range_error)
845 }
846
847 func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
848 self.Emit("MOVQ", _VAR_st_Iv, _CX)
849 self.Emit("MOVQ", jit.Gitab(i), _ET)
850 self.Emit("MOVQ", jit.Gtype(t), _EP)
851 self.Emit("CMPQ", _CX, jit.Imm(a))
852 self.Sjmp("JL" , _LB_range_error)
853 self.Emit("CMPQ", _CX, jit.Imm(b))
854 self.Sjmp("JG" , _LB_range_error)
855 }
856
857 func (self *_Assembler) range_unsigned_CX(i *rt.GoItab, t *rt.GoType, v uint64) {
858 self.Emit("MOVQ" , _VAR_st_Iv, _CX)
859 self.Emit("MOVQ" , jit.Gitab(i), _ET)
860 self.Emit("MOVQ" , jit.Gtype(t), _EP)
861 self.Emit("TESTQ", _CX, _CX)
862 self.Sjmp("JS" , _LB_range_error)
863 self.Emit("CMPQ" , _CX, jit.Imm(int64(v)))
864 self.Sjmp("JA" , _LB_range_error)
865 }
866
867
868
869 var (
870 _F_unquote = jit.Imm(int64(native.S_unquote))
871 )
872
873 func (self *_Assembler) slice_from(p obj.Addr, d int64) {
874 self.Emit("MOVQ", p, _SI)
875 self.slice_from_r(_SI, d)
876 }
877
878 func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
879 self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI)
880 self.Emit("NEGQ", p)
881 self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI)
882 }
883
884 func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
885 self.slice_from(_VAR_st_Iv, -1)
886 self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1))
887 self.Sjmp("JE" , "_noescape_{n}")
888 self.Byte(0x4c, 0x8d, 0x0d)
889 self.Sref("_unquote_once_write_{n}", 4)
890 self.Sjmp("JMP" , "_escape_string")
891 self.Link("_noescape_{n}")
892 if copy {
893 self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
894 self.Sjmp("JNC", "_unquote_once_write_{n}")
895 self.Byte(0x4c, 0x8d, 0x0d)
896 self.Sref("_unquote_once_write_{n}", 4)
897 self.Sjmp("JMP", "_copy_string")
898 }
899 self.Link("_unquote_once_write_{n}")
900 self.Emit("MOVQ", _SI, n)
901 if stack {
902 self.Emit("MOVQ", _DI, p)
903 } else {
904 self.WriteRecNotAX(10, _DI, p, false, false)
905 }
906 }
907
908 func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
909 self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))
910 self.Sjmp("JE" , _LB_eof_error)
911 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\'))
912 self.Sjmp("JNE" , _LB_char_m3_error)
913 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))
914 self.Sjmp("JNE" , _LB_char_m2_error)
915 self.slice_from(_VAR_st_Iv, -3)
916 self.Emit("MOVQ" , _SI, _AX)
917 self.Emit("ADDQ" , _VAR_st_Iv, _AX)
918 self.Emit("CMPQ" , _VAR_st_Ep, _AX)
919 self.Sjmp("JE" , "_noescape_{n}")
920 self.Byte(0x4c, 0x8d, 0x0d)
921 self.Sref("_unquote_twice_write_{n}", 4)
922 self.Sjmp("JMP" , "_escape_string_twice")
923 self.Link("_noescape_{n}")
924 self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
925 self.Sjmp("JNC", "_unquote_twice_write_{n}")
926 self.Byte(0x4c, 0x8d, 0x0d)
927 self.Sref("_unquote_twice_write_{n}", 4)
928 self.Sjmp("JMP", "_copy_string")
929 self.Link("_unquote_twice_write_{n}")
930 self.Emit("MOVQ" , _SI, n)
931 if stack {
932 self.Emit("MOVQ", _DI, p)
933 } else {
934 self.WriteRecNotAX(12, _DI, p, false, false)
935 }
936 self.Link("_unquote_twice_end_{n}")
937 }
938
939
940
941 var (
942 _F_memclrHasPointers = jit.Func(memclrHasPointers)
943 _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
944 )
945
946 func (self *_Assembler) mem_clear_fn(ptrfree bool) {
947 if !ptrfree {
948 self.call_go(_F_memclrHasPointers)
949 } else {
950 self.call_go(_F_memclrNoHeapPointers)
951 }
952 }
953
954 func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
955 self.Emit("MOVQ", jit.Imm(size), _BX)
956 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
957 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX)
958 self.Emit("SUBQ", _VP, _AX)
959 self.Emit("ADDQ", _AX, _BX)
960 self.Emit("MOVQ", _VP, _AX)
961 self.mem_clear_fn(ptrfree)
962 }
963
964
965
966 var (
967 _F_mapassign = jit.Func(mapassign)
968 _F_mapassign_fast32 = jit.Func(mapassign_fast32)
969 _F_mapassign_faststr = jit.Func(mapassign_faststr)
970 _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
971 )
972
973 var (
974 _F_decodeJsonUnmarshaler obj.Addr
975 _F_decodeTextUnmarshaler obj.Addr
976 )
977
978 func init() {
979 _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
980 _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
981 }
982
983 func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
984 if rt.MapType(rt.UnpackType(t)).IndirectElem() {
985 self.vfollow(t.Elem())
986 }
987 }
988
989 func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
990 self.Emit("LEAQ", v, _AX)
991 self.mapassign_call_from_AX(t, _F_mapassign)
992 }
993
994 func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
995 self.Emit("MOVQ", jit.Type(t), _AX)
996 self.Emit("MOVQ", _VP, _BX)
997 self.Emit("MOVQ", p, _CX)
998 self.Emit("MOVQ", n, _DI)
999 self.call_go(_F_mapassign_faststr)
1000 self.Emit("MOVQ", _AX, _VP)
1001 self.mapaccess_ptr(t)
1002 }
1003
1004 func (self *_Assembler) mapassign_call_from_AX(t reflect.Type, fn obj.Addr) {
1005 self.Emit("MOVQ", _AX, _CX)
1006 self.Emit("MOVQ", jit.Type(t), _AX)
1007 self.Emit("MOVQ", _VP, _BX)
1008 self.call_go(fn)
1009 self.Emit("MOVQ", _AX, _VP)
1010 }
1011
1012 func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
1013 self.mapassign_call_from_AX(t, fn)
1014 self.mapaccess_ptr(t)
1015 }
1016
1017 func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
1018 pv := false
1019 vk := t.Key()
1020 tk := t.Key()
1021
1022
1023 if vk.Kind() == reflect.Ptr {
1024 pv = true
1025 vk = vk.Elem()
1026 }
1027
1028
1029 if addressable {
1030 pv = false
1031 tk = reflect.PtrTo(tk)
1032 }
1033
1034
1035 self.valloc(vk, _BX)
1036
1037 self.Emit("MOVQ" , _BX, _ARG_vk)
1038 self.Emit("MOVQ" , jit.Type(tk), _AX)
1039 self.Emit("MOVQ" , _ARG_sv_p, _CX)
1040 self.Emit("MOVQ" , _ARG_sv_n, _DI)
1041 self.call_go(_F_decodeTextUnmarshaler)
1042 self.Emit("TESTQ", _ET, _ET)
1043 self.Sjmp("JNZ" , _LB_error)
1044 self.Emit("MOVQ" , _ARG_vk, _AX)
1045 self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
1046
1047
1048 if !pv {
1049 self.mapassign_call_from_AX(t, _F_mapassign)
1050 } else {
1051 self.mapassign_fastx(t, _F_mapassign_fast64ptr)
1052 }
1053 }
1054
1055
1056
1057 var (
1058 _F_skip_one = jit.Imm(int64(native.S_skip_one))
1059 _F_skip_array = jit.Imm(int64(native.S_skip_array))
1060 _F_skip_object = jit.Imm(int64(native.S_skip_object))
1061 _F_skip_number = jit.Imm(int64(native.S_skip_number))
1062 )
1063
1064 func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
1065 self.call_sf(_F_skip_one)
1066 self.Emit("TESTQ", _AX, _AX)
1067 self.Sjmp("JS" , _LB_parsing_error_v)
1068 self.slice_from_r(_AX, 0)
1069 self.Emit("MOVQ" , _DI, _ARG_sv_p)
1070 self.Emit("MOVQ" , _SI, _ARG_sv_n)
1071 self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref)
1072 }
1073
1074 func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
1075 self.parse_string()
1076 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1077 self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref)
1078 }
1079
1080 func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
1081 pt := t
1082 vk := t.Kind()
1083
1084
1085 if deref && vk == reflect.Ptr {
1086 self.Emit("MOVQ" , _VP, _BX)
1087 self.Emit("MOVQ" , jit.Ptr(_BX, 0), _BX)
1088 self.Emit("TESTQ", _BX, _BX)
1089 self.Sjmp("JNZ" , "_deref_{n}")
1090 self.valloc(t.Elem(), _BX)
1091 self.WriteRecNotAX(3, _BX, jit.Ptr(_VP, 0), false, false)
1092 self.Link("_deref_{n}")
1093 } else {
1094
1095 self.Emit("MOVQ", _VP, _BX)
1096 }
1097
1098
1099 self.Emit("MOVQ", jit.Type(pt), _AX)
1100
1101
1102 self.Emit("MOVQ" , _ARG_sv_p, _CX)
1103 self.Emit("MOVQ" , _ARG_sv_n, _DI)
1104 self.call_go(fn)
1105 self.Emit("TESTQ", _ET, _ET)
1106 self.Sjmp("JNZ" , _LB_error)
1107 }
1108
1109
1110
1111 var (
1112 _F_decodeTypedPointer obj.Addr
1113 )
1114
1115 func init() {
1116 _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
1117 }
1118
1119 func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
1120 self.Emit("MOVQ" , vp, _SI)
1121 self.Emit("MOVQ" , vt, _DI)
1122 self.Emit("MOVQ", _ARG_sp, _AX)
1123 self.Emit("MOVQ", _ARG_sl, _BX)
1124 self.Emit("MOVQ" , _IC, _CX)
1125 self.Emit("MOVQ" , _ST, _R8)
1126 self.Emit("MOVQ" , _ARG_fv, _R9)
1127 self.save(_REG_rt...)
1128 self.Emit("MOVQ", _F_decodeTypedPointer, _IL)
1129 self.Rjmp("CALL", _IL)
1130 self.load(_REG_rt...)
1131 self.Emit("MOVQ" , _AX, _IC)
1132 self.Emit("MOVQ" , _BX, _ET)
1133 self.Emit("MOVQ" , _CX, _EP)
1134 self.Emit("TESTQ", _ET, _ET)
1135 self.Sjmp("JE", "_decode_dynamic_end_{n}")
1136 self.Emit("MOVQ", _I_json_MismatchTypeError, _CX)
1137 self.Emit("CMPQ", _ET, _CX)
1138 self.Sjmp("JNE", _LB_error)
1139 self.Emit("MOVQ", _EP, _VAR_ic)
1140 self.Emit("MOVQ", _ET, _VAR_et)
1141 self.Link("_decode_dynamic_end_{n}")
1142 }
1143
1144
1145
1146 var (
1147 _F_memequal = jit.Func(memequal)
1148 _F_memmove = jit.Func(memmove)
1149 _F_growslice = jit.Func(growslice)
1150 _F_makeslice = jit.Func(makeslice)
1151 _F_makemap_small = jit.Func(makemap_small)
1152 _F_mapassign_fast64 = jit.Func(mapassign_fast64)
1153 )
1154
1155 var (
1156 _F_lspace = jit.Imm(int64(native.S_lspace))
1157 _F_strhash = jit.Imm(int64(caching.S_strhash))
1158 )
1159
1160 var (
1161 _F_b64decode = jit.Imm(int64(_subr__b64decode))
1162 _F_decodeValue = jit.Imm(int64(_subr_decode_value))
1163 )
1164
1165 var (
1166 _F_FieldMap_GetCaseInsensitive obj.Addr
1167 _Empty_Slice = []byte{}
1168 _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
1169 )
1170
1171 const (
1172 _MODE_AVX2 = 1 << 2
1173 )
1174
1175 const (
1176 _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
1177 _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
1178 _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
1179 )
1180
1181 const (
1182 _Vk_Ptr = int64(reflect.Ptr)
1183 _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
1184 )
1185
1186 func init() {
1187 _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
1188 }
1189
1190 func (self *_Assembler) _asm_OP_any(_ *_Instr) {
1191 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX)
1192 self.Emit("TESTQ" , _CX, _CX)
1193 self.Sjmp("JZ" , "_decode_{n}")
1194 self.Emit("CMPQ" , _CX, _VP)
1195 self.Sjmp("JE" , "_decode_{n}")
1196 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1197 self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)
1198 self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX)
1199 self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr))
1200 self.Sjmp("JNE" , "_decode_{n}")
1201 self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI)
1202 self.decode_dynamic(_AX, _DI)
1203 self.Sjmp("JMP" , "_decode_end_{n}")
1204 self.Link("_decode_{n}")
1205 self.Emit("MOVQ" , _ARG_fv, _DF)
1206 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0))
1207 self.call(_F_decodeValue)
1208 self.Emit("MOVQ" , jit.Imm(0), jit.Ptr(_SP, 0))
1209 self.Emit("TESTQ" , _EP, _EP)
1210 self.Sjmp("JNZ" , _LB_parsing_error)
1211 self.Link("_decode_end_{n}")
1212 }
1213
1214 func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
1215 self.Emit("MOVQ" , jit.Type(p.vt()), _ET)
1216 self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0))
1217 self.Sjmp("JE" , _LB_type_error)
1218 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _CX)
1219 self.Emit("MOVQ" , jit.Ptr(_CX, 8), _CX)
1220 self.Emit("MOVBLZX", jit.Ptr(_CX, _Gt_KindFlags), _DX)
1221 self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX)
1222 self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr))
1223 self.Sjmp("JNE" , _LB_type_error)
1224 self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI)
1225 self.decode_dynamic(_CX, _DI)
1226 self.Link("_decode_end_{n}")
1227 }
1228
1229 func (self *_Assembler) _asm_OP_str(_ *_Instr) {
1230 self.parse_string()
1231 self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true)
1232 }
1233
1234 func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
1235 self.parse_string()
1236 self.slice_from(_VAR_st_Iv, -1)
1237 self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0))
1238 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8))
1239 self.Emit("SHRQ" , jit.Imm(2), _SI)
1240 self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI)
1241 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))
1242 self.malloc_AX(_SI, _SI)
1243
1244
1245 self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX)
1246
1247
1248 self.Emit("XORL" , _DX, _DX)
1249 self.Emit("MOVQ" , _VP, _DI)
1250
1251 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R8)
1252 self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false)
1253 self.Emit("MOVQ" , _R8, _SI)
1254
1255 self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8))
1256 self.call_c(_F_b64decode)
1257 self.Emit("TESTQ", _AX, _AX)
1258 self.Sjmp("JS" , _LB_base64_error)
1259 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1260 }
1261
1262 func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
1263 self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)
1264 self.Emit("CMPQ", _AX, _IL)
1265 self.Sjmp("JA" , _LB_eof_error)
1266 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f'))
1267 self.Sjmp("JE" , "_false_{n}")
1268 self.Emit("MOVL", jit.Imm(_IM_true), _CX)
1269 self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))
1270 self.Sjmp("JE" , "_bool_true_{n}")
1271
1272 self.Emit("MOVQ", _IC, _VAR_ic)
1273 self.Emit("MOVQ", _T_bool, _ET)
1274 self.Emit("MOVQ", _ET, _VAR_et)
1275 self.Byte(0x4c, 0x8d, 0x0d)
1276 self.Sref("_end_{n}", 4)
1277 self.Emit("MOVQ", _R9, _VAR_pc)
1278 self.Sjmp("JMP" , _LB_skip_one)
1279
1280 self.Link("_bool_true_{n}")
1281 self.Emit("MOVQ", _AX, _IC)
1282 self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))
1283 self.Sjmp("JMP" , "_end_{n}")
1284 self.Link("_false_{n}")
1285 self.Emit("ADDQ", jit.Imm(1), _AX)
1286 self.Emit("ADDQ", jit.Imm(1), _IC)
1287 self.Emit("CMPQ", _AX, _IL)
1288 self.Sjmp("JA" , _LB_eof_error)
1289 self.Emit("MOVL", jit.Imm(_IM_alse), _CX)
1290 self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))
1291 self.Sjmp("JNE" , _LB_im_error)
1292 self.Emit("MOVQ", _AX, _IC)
1293 self.Emit("XORL", _AX, _AX)
1294 self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))
1295 self.Link("_end_{n}")
1296 }
1297
1298 func (self *_Assembler) _asm_OP_num(_ *_Instr) {
1299 self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
1300 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1301 self.Emit("MOVQ", _IC, _BX)
1302 self.Sjmp("JNE", "_skip_number_{n}")
1303 self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
1304 self.Emit("ADDQ", jit.Imm(1), _IC)
1305 self.Link("_skip_number_{n}")
1306
1307
1308 self.Emit("LEAQ", _ARG_s, _DI)
1309 self.Emit("MOVQ", _IC, _ARG_ic)
1310 self.Emit("LEAQ", _ARG_ic, _SI)
1311 self.callc(_F_skip_number)
1312 self.Emit("MOVQ", _ARG_ic, _IC)
1313 self.Emit("TESTQ", _AX, _AX)
1314 self.Sjmp("JNS" , "_num_next_{n}")
1315
1316
1317 self.Emit("MOVQ", _BX, _VAR_ic)
1318 self.Emit("MOVQ", _T_number, _ET)
1319 self.Emit("MOVQ", _ET, _VAR_et)
1320 self.Byte(0x4c, 0x8d, 0x0d)
1321 self.Sref("_num_end_{n}", 4)
1322 self.Emit("MOVQ", _R9, _VAR_pc)
1323 self.Sjmp("JMP" , _LB_skip_one)
1324
1325
1326 self.Link("_num_next_{n}")
1327 self.slice_from_r(_AX, 0)
1328 self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
1329 self.Sjmp("JNC", "_num_write_{n}")
1330 self.Byte(0x4c, 0x8d, 0x0d)
1331 self.Sref("_num_write_{n}", 4)
1332 self.Sjmp("JMP", "_copy_string")
1333 self.Link("_num_write_{n}")
1334 self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))
1335 self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
1336 self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
1337 self.Sjmp("JNE", "_num_end_{n}")
1338 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1339 self.Sjmp("JNE", _LB_char_0_error)
1340 self.Emit("ADDQ", jit.Imm(1), _IC)
1341 self.Link("_num_end_{n}")
1342 }
1343
1344 func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
1345 var pin = "_i8_end_{n}"
1346 self.parse_signed(int8Type, pin, -1)
1347 self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)
1348 self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))
1349 self.Link(pin)
1350 }
1351
1352 func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
1353 var pin = "_i16_end_{n}"
1354 self.parse_signed(int16Type, pin, -1)
1355 self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)
1356 self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))
1357 self.Link(pin)
1358 }
1359
1360 func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
1361 var pin = "_i32_end_{n}"
1362 self.parse_signed(int32Type, pin, -1)
1363 self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)
1364 self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))
1365 self.Link(pin)
1366 }
1367
1368 func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
1369 var pin = "_i64_end_{n}"
1370 self.parse_signed(int64Type, pin, -1)
1371 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1372 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1373 self.Link(pin)
1374 }
1375
1376 func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
1377 var pin = "_u8_end_{n}"
1378 self.parse_unsigned(uint8Type, pin, -1)
1379 self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8)
1380 self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))
1381 self.Link(pin)
1382 }
1383
1384 func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
1385 var pin = "_u16_end_{n}"
1386 self.parse_unsigned(uint16Type, pin, -1)
1387 self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16)
1388 self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))
1389 self.Link(pin)
1390 }
1391
1392 func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
1393 var pin = "_u32_end_{n}"
1394 self.parse_unsigned(uint32Type, pin, -1)
1395 self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32)
1396 self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))
1397 self.Link(pin)
1398 }
1399
1400 func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
1401 var pin = "_u64_end_{n}"
1402 self.parse_unsigned(uint64Type, pin, -1)
1403 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1404 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1405 self.Link(pin)
1406 }
1407
1408 func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
1409 var pin = "_f32_end_{n}"
1410 self.parse_number(float32Type, pin, -1)
1411 self.range_single_X0()
1412 self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0))
1413 self.Link(pin)
1414 }
1415
1416 func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
1417 var pin = "_f64_end_{n}"
1418 self.parse_number(float64Type, pin, -1)
1419 self.Emit("MOVSD", _VAR_st_Dv, _X0)
1420 self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0))
1421 self.Link(pin)
1422 }
1423
1424 func (self *_Assembler) _asm_OP_unquote(_ *_Instr) {
1425 self.check_eof(2)
1426 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))
1427 self.Sjmp("JNE" , _LB_char_0_error)
1428 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))
1429 self.Sjmp("JNE" , _LB_char_1_error)
1430 self.Emit("ADDQ", jit.Imm(2), _IC)
1431 self.parse_string()
1432 self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false)
1433 }
1434
1435 func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
1436 self.Emit("XORL", _AX, _AX)
1437 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1438 }
1439
1440 func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
1441 self.Emit("PXOR" , _X0, _X0)
1442 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))
1443 }
1444
1445 func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
1446 self.Emit("XORL" , _AX, _AX)
1447 self.Emit("PXOR" , _X0, _X0)
1448 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))
1449 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16))
1450 }
1451
1452 func (self *_Assembler) _asm_OP_deref(p *_Instr) {
1453 self.vfollow(p.vt())
1454 }
1455
1456 func (self *_Assembler) _asm_OP_index(p *_Instr) {
1457 self.Emit("MOVQ", jit.Imm(p.i64()), _AX)
1458 self.Emit("ADDQ", _AX, _VP)
1459 }
1460
1461 func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
1462 self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX)
1463 self.Emit("CMPQ" , _AX, _IL)
1464 self.Sjmp("JA" , "_not_null_{n}")
1465 self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))
1466 self.Emit("CMOVQEQ", _AX, _IC)
1467 self.Xjmp("JE" , p.vi())
1468 self.Link("_not_null_{n}")
1469 }
1470
1471 func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
1472 self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX)
1473 self.Emit("CMPQ" , _AX, _IL)
1474 self.Sjmp("JA" , "_not_null_quote_{n}")
1475 self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))
1476 self.Sjmp("JNE" , "_not_null_quote_{n}")
1477 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))
1478 self.Emit("CMOVQEQ", _AX, _IC)
1479 self.Xjmp("JE" , p.vi())
1480 self.Link("_not_null_quote_{n}")
1481 }
1482
1483 func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
1484 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1485 self.Emit("TESTQ", _AX, _AX)
1486 self.Sjmp("JNZ" , "_end_{n}")
1487 self.call_go(_F_makemap_small)
1488 self.WritePtrAX(6, jit.Ptr(_VP, 0), false)
1489 self.Link("_end_{n}")
1490 self.Emit("MOVQ" , _AX, _VP)
1491 }
1492
1493 func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
1494 self.parse_signed(int8Type, "", p.vi())
1495 self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)
1496 self.match_char('"')
1497 self.mapassign_std(p.vt(), _VAR_st_Iv)
1498 }
1499
1500 func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
1501 self.parse_signed(int16Type, "", p.vi())
1502 self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)
1503 self.match_char('"')
1504 self.mapassign_std(p.vt(), _VAR_st_Iv)
1505 }
1506
1507 func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
1508 self.parse_signed(int32Type, "", p.vi())
1509 self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)
1510 self.match_char('"')
1511 if vt := p.vt(); !mapfast(vt) {
1512 self.mapassign_std(vt, _VAR_st_Iv)
1513 } else {
1514 self.Emit("MOVQ", _CX, _AX)
1515 self.mapassign_fastx(vt, _F_mapassign_fast32)
1516 }
1517 }
1518
1519 func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
1520 self.parse_signed(int64Type, "", p.vi())
1521 self.match_char('"')
1522 if vt := p.vt(); !mapfast(vt) {
1523 self.mapassign_std(vt, _VAR_st_Iv)
1524 } else {
1525 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1526 self.mapassign_fastx(vt, _F_mapassign_fast64)
1527 }
1528 }
1529
1530 func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
1531 self.parse_unsigned(uint8Type, "", p.vi())
1532 self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8)
1533 self.match_char('"')
1534 self.mapassign_std(p.vt(), _VAR_st_Iv)
1535 }
1536
1537 func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
1538 self.parse_unsigned(uint16Type, "", p.vi())
1539 self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16)
1540 self.match_char('"')
1541 self.mapassign_std(p.vt(), _VAR_st_Iv)
1542 }
1543
1544 func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
1545 self.parse_unsigned(uint32Type, "", p.vi())
1546 self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32)
1547 self.match_char('"')
1548 if vt := p.vt(); !mapfast(vt) {
1549 self.mapassign_std(vt, _VAR_st_Iv)
1550 } else {
1551 self.Emit("MOVQ", _CX, _AX)
1552 self.mapassign_fastx(vt, _F_mapassign_fast32)
1553 }
1554 }
1555
1556 func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
1557 self.parse_unsigned(uint64Type, "", p.vi())
1558 self.match_char('"')
1559 if vt := p.vt(); !mapfast(vt) {
1560 self.mapassign_std(vt, _VAR_st_Iv)
1561 } else {
1562 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1563 self.mapassign_fastx(vt, _F_mapassign_fast64)
1564 }
1565 }
1566
1567 func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
1568 self.parse_number(float32Type, "", p.vi())
1569 self.range_single_X0()
1570 self.Emit("MOVSS", _X0, _VAR_st_Dv)
1571 self.match_char('"')
1572 self.mapassign_std(p.vt(), _VAR_st_Dv)
1573 }
1574
1575 func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
1576 self.parse_number(float64Type, "", p.vi())
1577 self.match_char('"')
1578 self.mapassign_std(p.vt(), _VAR_st_Dv)
1579 }
1580
1581 func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
1582 self.parse_string()
1583 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1584 if vt := p.vt(); !mapfast(vt) {
1585 self.valloc(vt.Key(), _DI)
1586 self.Emit("MOVOU", _ARG_sv, _X0)
1587 self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
1588 self.mapassign_std(vt, jit.Ptr(_DI, 0))
1589 } else {
1590 self.mapassign_str_fast(vt, _ARG_sv_p, _ARG_sv_n)
1591 }
1592 }
1593
1594 func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
1595 self.parse_string()
1596 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1597 self.mapassign_utext(p.vt(), false)
1598 }
1599
1600 func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
1601 self.parse_string()
1602 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1603 self.mapassign_utext(p.vt(), true)
1604 }
1605
1606 func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
1607 self.call_sf(_F_skip_array)
1608 self.Emit("TESTQ", _AX, _AX)
1609 self.Sjmp("JS" , _LB_parsing_error_v)
1610 }
1611
1612 func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
1613 self.mem_clear_rem(p.i64(), true)
1614 }
1615
1616 func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
1617 self.mem_clear_rem(p.i64(), false)
1618 }
1619
1620 func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
1621 self.Emit("XORL" , _AX, _AX)
1622 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1623 self.Emit("MOVQ" , jit.Ptr(_VP, 16), _BX)
1624 self.Emit("TESTQ", _BX, _BX)
1625 self.Sjmp("JNZ" , "_done_{n}")
1626 self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX)
1627 self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))
1628 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1629 self.call_go(_F_makeslice)
1630 self.WritePtrAX(7, jit.Ptr(_VP, 0), false)
1631 self.Emit("XORL" , _AX, _AX)
1632 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1633 self.Link("_done_{n}")
1634 }
1635
1636 func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
1637 rbracket := p.vb()
1638 if rbracket == ']' {
1639 self.check_eof(1)
1640 self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)
1641 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket)))
1642 self.Sjmp("JNE" , "_not_empty_array_{n}")
1643 self.Emit("MOVQ", _AX, _IC)
1644 self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
1645 self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
1646 self.Emit("PXOR", _X0, _X0)
1647 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8))
1648 self.Xjmp("JMP" , p.vi())
1649 self.Link("_not_empty_array_{n}")
1650 } else {
1651 panic("only implement check empty array here!")
1652 }
1653 }
1654
1655 func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
1656 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX)
1657 self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16))
1658 self.Sjmp("JB" , "_index_{n}")
1659 self.Emit("MOVQ" , _AX, _SI)
1660 self.Emit("SHLQ" , jit.Imm(1), _SI)
1661 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1662 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _BX)
1663 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX)
1664 self.Emit("MOVQ" , jit.Ptr(_VP, 16), _DI)
1665 self.call_go(_F_growslice)
1666 self.WritePtrAX(8, jit.Ptr(_VP, 0), false)
1667 self.Emit("MOVQ" , _BX, jit.Ptr(_VP, 8))
1668 self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))
1669
1670
1671
1672 if rt.UnpackType(p.vt()).PtrData == 0 {
1673 self.Emit("MOVQ" , _CX, _DI)
1674 self.Emit("SUBQ" , _BX, _DI)
1675
1676 self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))
1677 self.Emit("MOVQ" , _AX, _VP)
1678 self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)
1679 self.Emit("MOVQ" , _BX, _AX)
1680 self.From("MULQ" , _CX)
1681 self.Emit("ADDQ" , _AX, _VP)
1682
1683 self.Emit("MOVQ" , _DI, _AX)
1684 self.From("MULQ" , _CX)
1685 self.Emit("MOVQ" , _AX, _BX)
1686 self.Emit("MOVQ" , _VP, _AX)
1687 self.mem_clear_fn(true)
1688 self.Sjmp("JMP", "_append_slice_end_{n}")
1689 }
1690
1691 self.Emit("MOVQ" , _BX, _AX)
1692 self.Link("_index_{n}")
1693 self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))
1694 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP)
1695 self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)
1696 self.From("MULQ" , _CX)
1697 self.Emit("ADDQ" , _AX, _VP)
1698 self.Link("_append_slice_end_{n}")
1699 }
1700
1701 func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
1702 self.call_sf(_F_skip_object)
1703 self.Emit("TESTQ", _AX, _AX)
1704 self.Sjmp("JS" , _LB_parsing_error_v)
1705 }
1706
1707 func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
1708 self.call_sf(_F_skip_one)
1709 self.Emit("TESTQ", _AX, _AX)
1710 self.Sjmp("JS" , _LB_parsing_error_v)
1711 }
1712
1713 func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
1714 assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
1715 self.Emit("MOVQ" , jit.Imm(-1), _AX)
1716 self.Emit("MOVQ" , _AX, _VAR_sr)
1717 self.parse_string()
1718 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, false)
1719 self.Emit("LEAQ" , _ARG_sv, _AX)
1720 self.Emit("XORL" , _BX, _BX)
1721 self.call_go(_F_strhash)
1722 self.Emit("MOVQ" , _AX, _R9)
1723 self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX)
1724 self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI)
1725 self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX)
1726 self.Emit("TESTQ", _CX, _CX)
1727 self.Sjmp("JZ" , "_try_lowercase_{n}")
1728 self.Link("_loop_{n}")
1729 self.Emit("XORL" , _DX, _DX)
1730 self.From("DIVQ" , _CX)
1731 self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX)
1732 self.Emit("SHLQ" , jit.Imm(5), _DX)
1733 self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI)
1734 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8)
1735 self.Emit("TESTQ", _R8, _R8)
1736 self.Sjmp("JZ" , "_try_lowercase_{n}")
1737 self.Emit("CMPQ" , _R8, _R9)
1738 self.Sjmp("JNE" , "_loop_{n}")
1739 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX)
1740 self.Emit("CMPQ" , _DX, _ARG_sv_n)
1741 self.Sjmp("JNE" , "_loop_{n}")
1742 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8)
1743 self.Emit("MOVQ" , _AX, _VAR_ss_AX)
1744 self.Emit("MOVQ" , _CX, _VAR_ss_CX)
1745 self.Emit("MOVQ" , _SI, _VAR_ss_SI)
1746 self.Emit("MOVQ" , _R8, _VAR_ss_R8)
1747 self.Emit("MOVQ" , _R9, _VAR_ss_R9)
1748 self.Emit("MOVQ" , _ARG_sv_p, _AX)
1749 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX)
1750 self.Emit("MOVQ" , _CX, _BX)
1751 self.Emit("MOVQ" , _DX, _CX)
1752 self.call_go(_F_memequal)
1753 self.Emit("MOVB" , _AX, _DX)
1754 self.Emit("MOVQ" , _VAR_ss_AX, _AX)
1755 self.Emit("MOVQ" , _VAR_ss_CX, _CX)
1756 self.Emit("MOVQ" , _VAR_ss_SI, _SI)
1757 self.Emit("MOVQ" , _VAR_ss_R9, _R9)
1758 self.Emit("TESTB", _DX, _DX)
1759 self.Sjmp("JZ" , "_loop_{n}")
1760 self.Emit("MOVQ" , _VAR_ss_R8, _R8)
1761 self.Emit("MOVQ" , _R8, _VAR_sr)
1762 self.Sjmp("JMP" , "_end_{n}")
1763 self.Link("_try_lowercase_{n}")
1764 self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX)
1765 self.Emit("MOVQ", _ARG_sv_p, _BX)
1766 self.Emit("MOVQ", _ARG_sv_n, _CX)
1767 self.call_go(_F_FieldMap_GetCaseInsensitive)
1768 self.Emit("MOVQ" , _AX, _VAR_sr)
1769 self.Emit("TESTQ", _AX, _AX)
1770 self.Sjmp("JNS" , "_end_{n}")
1771 self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv)
1772 self.Sjmp("JC" , _LB_field_error)
1773 self.Link("_end_{n}")
1774 }
1775
1776 func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
1777 self.unmarshal_json(p.vt(), true)
1778 }
1779
1780 func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
1781 self.unmarshal_json(p.vt(), false)
1782 }
1783
1784 func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
1785 self.unmarshal_text(p.vt(), true)
1786 }
1787
1788 func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
1789 self.unmarshal_text(p.vt(), false)
1790 }
1791
1792 func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
1793 self.lspace("_{n}")
1794 }
1795
1796 func (self *_Assembler) lspace(subfix string) {
1797 var label = "_lspace" + subfix
1798 self.Emit("CMPQ" , _IC, _IL)
1799 self.Sjmp("JAE" , _LB_eof_error)
1800 self.Emit("MOVQ" , jit.Imm(_BM_space), _DX)
1801 self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)
1802 self.Emit("CMPQ" , _AX, jit.Imm(' '))
1803 self.Sjmp("JA" , label)
1804 self.Emit("BTQ" , _AX, _DX)
1805 self.Sjmp("JNC" , label)
1806
1807
1808 for i := 0; i < 3; i++ {
1809 self.Emit("ADDQ" , jit.Imm(1), _IC)
1810 self.Emit("CMPQ" , _IC, _IL)
1811 self.Sjmp("JAE" , _LB_eof_error)
1812 self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)
1813 self.Emit("CMPQ" , _AX, jit.Imm(' '))
1814 self.Sjmp("JA" , label)
1815 self.Emit("BTQ" , _AX, _DX)
1816 self.Sjmp("JNC" , label)
1817 }
1818
1819
1820 self.Emit("MOVQ" , _IP, _DI)
1821 self.Emit("MOVQ" , _IL, _SI)
1822 self.Emit("MOVQ" , _IC, _DX)
1823 self.callc(_F_lspace)
1824 self.Emit("TESTQ" , _AX, _AX)
1825 self.Sjmp("JS" , _LB_parsing_error_v)
1826 self.Emit("CMPQ" , _AX, _IL)
1827 self.Sjmp("JAE" , _LB_eof_error)
1828 self.Emit("MOVQ" , _AX, _IC)
1829 self.Link(label)
1830 }
1831
1832 func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
1833 self.match_char(p.vb())
1834 }
1835
1836 func (self *_Assembler) match_char(char byte) {
1837 self.check_eof(1)
1838 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char)))
1839 self.Sjmp("JNE" , _LB_char_0_error)
1840 self.Emit("ADDQ", jit.Imm(1), _IC)
1841 }
1842
1843 func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
1844 self.check_eof(1)
1845 self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX)
1846 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))
1847 self.Emit("CMOVQEQ", _AX, _IC)
1848 self.Xjmp("JE" , p.vi())
1849 }
1850
1851 func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
1852 self.check_eof(1)
1853 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))
1854 self.Xjmp("JE" , p.vi())
1855 }
1856
1857 func (self *_Assembler) _asm_OP_add(p *_Instr) {
1858 self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC)
1859 }
1860
1861 func (self *_Assembler) _asm_OP_load(_ *_Instr) {
1862 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1863 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP)
1864 }
1865
1866 func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1867 self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)
1868 self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))
1869 self.Sjmp("JAE" , _LB_stack_error)
1870 self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false)
1871 self.Emit("ADDQ", jit.Imm(8), _CX)
1872 self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))
1873 }
1874
1875 func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1876 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1877 self.Emit("SUBQ", jit.Imm(8), _AX)
1878 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)
1879 self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))
1880 self.Emit("XORL", _BX, _BX)
1881 self.Emit("MOVQ", _BX, jit.Sib(_ST, _AX, 1, 8))
1882 }
1883
1884 func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1885 self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)
1886 self.Emit("SUBQ" , jit.Imm(16), _AX)
1887 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP)
1888 self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))
1889 self.Emit("PXOR" , _X0, _X0)
1890 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))
1891 }
1892
1893 func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1894 self.Emit("MOVQ", jit.Type(p.vt()), _AX)
1895 self.decode_dynamic(_AX, _VP)
1896 }
1897
1898 func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1899 self.Xjmp("JMP", p.vi())
1900 }
1901
1902 func (self *_Assembler) _asm_OP_switch(p *_Instr) {
1903 self.Emit("MOVQ", _VAR_sr, _AX)
1904 self.Emit("CMPQ", _AX, jit.Imm(p.i64()))
1905 self.Sjmp("JAE" , "_default_{n}")
1906
1907
1908 self.Byte(0x48, 0x8d, 0x3d)
1909 self.Sref("_switch_table_{n}", 4)
1910 self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)
1911 self.Emit("ADDQ" , _DI, _AX)
1912 self.Rjmp("JMP" , _AX)
1913 self.Link("_switch_table_{n}")
1914
1915
1916 for i, v := range p.vs() {
1917 self.Xref(v, int64(-i) * 4)
1918 }
1919
1920
1921 self.Link("_default_{n}")
1922 self.NOP()
1923 }
1924
1925 func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1926 self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX)
1927 self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX)
1928 self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
1929 self.call_go(_F_println)
1930 }
1931
View as plain text