compiler.go (24323B)
1 /* 2 * Copyright 2021 ByteDance Inc. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package encoder 18 19 import ( 20 `fmt` 21 `reflect` 22 `strconv` 23 `strings` 24 `unsafe` 25 26 `github.com/bytedance/sonic/internal/resolver` 27 `github.com/bytedance/sonic/internal/rt` 28 `github.com/bytedance/sonic/option` 29 ) 30 31 type _Op uint8 32 33 const ( 34 _OP_null _Op = iota + 1 35 _OP_empty_arr 36 _OP_empty_obj 37 _OP_bool 38 _OP_i8 39 _OP_i16 40 _OP_i32 41 _OP_i64 42 _OP_u8 43 _OP_u16 44 _OP_u32 45 _OP_u64 46 _OP_f32 47 _OP_f64 48 _OP_str 49 _OP_bin 50 _OP_quote 51 _OP_number 52 _OP_eface 53 _OP_iface 54 _OP_byte 55 _OP_text 56 _OP_deref 57 _OP_index 58 _OP_load 59 _OP_save 60 _OP_drop 61 _OP_drop_2 62 _OP_recurse 63 _OP_is_nil 64 _OP_is_nil_p1 65 _OP_is_zero_1 66 _OP_is_zero_2 67 _OP_is_zero_4 68 _OP_is_zero_8 69 _OP_is_zero_map 70 _OP_goto 71 _OP_map_iter 72 _OP_map_stop 73 _OP_map_check_key 74 _OP_map_write_key 75 _OP_map_value_next 76 _OP_slice_len 77 _OP_slice_next 78 _OP_marshal 79 _OP_marshal_p 80 _OP_marshal_text 81 _OP_marshal_text_p 82 _OP_cond_set 83 _OP_cond_testc 84 ) 85 86 const ( 87 _INT_SIZE = 32 << (^uint(0) >> 63) 88 _PTR_SIZE = 32 << (^uintptr(0) >> 63) 89 _PTR_BYTE = unsafe.Sizeof(uintptr(0)) 90 ) 91 92 const ( 93 _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions 94 _MAX_FIELDS = 50 // cutoff at 50 fields struct 95 ) 96 97 var _OpNames = [256]string { 98 _OP_null : "null", 99 _OP_empty_arr : "empty_arr", 100 _OP_empty_obj : "empty_obj", 101 _OP_bool : "bool", 102 _OP_i8 : "i8", 103 _OP_i16 : "i16", 104 _OP_i32 : "i32", 105 _OP_i64 : "i64", 106 _OP_u8 : "u8", 107 _OP_u16 : "u16", 108 _OP_u32 : "u32", 109 _OP_u64 : "u64", 110 _OP_f32 : "f32", 111 _OP_f64 : "f64", 112 _OP_str : "str", 113 _OP_bin : "bin", 114 _OP_quote : "quote", 115 _OP_number : "number", 116 _OP_eface : "eface", 117 _OP_iface : "iface", 118 _OP_byte : "byte", 119 _OP_text : "text", 120 _OP_deref : "deref", 121 _OP_index : "index", 122 _OP_load : "load", 123 _OP_save : "save", 124 _OP_drop : "drop", 125 _OP_drop_2 : "drop_2", 126 _OP_recurse : "recurse", 127 _OP_is_nil : "is_nil", 128 _OP_is_nil_p1 : "is_nil_p1", 129 _OP_is_zero_1 : "is_zero_1", 130 _OP_is_zero_2 : "is_zero_2", 131 _OP_is_zero_4 : "is_zero_4", 132 _OP_is_zero_8 : "is_zero_8", 133 _OP_is_zero_map : "is_zero_map", 134 _OP_goto : "goto", 135 _OP_map_iter : "map_iter", 136 _OP_map_stop : "map_stop", 137 _OP_map_check_key : "map_check_key", 138 _OP_map_write_key : "map_write_key", 139 _OP_map_value_next : "map_value_next", 140 _OP_slice_len : "slice_len", 141 _OP_slice_next : "slice_next", 142 _OP_marshal : "marshal", 143 _OP_marshal_p : "marshal_p", 144 _OP_marshal_text : "marshal_text", 145 _OP_marshal_text_p : "marshal_text_p", 146 _OP_cond_set : "cond_set", 147 _OP_cond_testc : "cond_testc", 148 } 149 150 func (self _Op) String() string { 151 if ret := _OpNames[self]; ret != "" { 152 return ret 153 } else { 154 return "<invalid>" 155 } 156 } 157 158 func _OP_int() _Op { 159 switch _INT_SIZE { 160 case 32: return _OP_i32 161 case 64: return _OP_i64 162 default: panic("unsupported int size") 163 } 164 } 165 166 func _OP_uint() _Op { 167 switch _INT_SIZE { 168 case 32: return _OP_u32 169 case 64: return _OP_u64 170 default: panic("unsupported uint size") 171 } 172 } 173 174 func _OP_uintptr() _Op { 175 switch _PTR_SIZE { 176 case 32: return _OP_u32 177 case 64: return _OP_u64 178 default: panic("unsupported pointer size") 179 } 180 } 181 182 func _OP_is_zero_ints() _Op { 183 switch _INT_SIZE { 184 case 32: return _OP_is_zero_4 185 case 64: return _OP_is_zero_8 186 default: panic("unsupported integer size") 187 } 188 } 189 190 type _Instr struct { 191 u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str) 192 p unsafe.Pointer // maybe GoString.Ptr, or *GoType 193 } 194 195 func packOp(op _Op) uint64 { 196 return uint64(op) << 56 197 } 198 199 func newInsOp(op _Op) _Instr { 200 return _Instr{u: packOp(op)} 201 } 202 203 func newInsVi(op _Op, vi int) _Instr { 204 return _Instr{u: packOp(op) | rt.PackInt(vi)} 205 } 206 207 func newInsVs(op _Op, vs string) _Instr { 208 return _Instr { 209 u: packOp(op) | rt.PackInt(len(vs)), 210 p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr, 211 } 212 } 213 214 func newInsVt(op _Op, vt reflect.Type) _Instr { 215 return _Instr { 216 u: packOp(op), 217 p: unsafe.Pointer(rt.UnpackType(vt)), 218 } 219 } 220 221 func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr { 222 i := 0 223 if pv { 224 i = 1 225 } 226 return _Instr { 227 u: packOp(op) | rt.PackInt(i), 228 p: unsafe.Pointer(rt.UnpackType(vt)), 229 } 230 } 231 232 func (self _Instr) op() _Op { 233 return _Op(self.u >> 56) 234 } 235 236 func (self _Instr) vi() int { 237 return rt.UnpackInt(self.u) 238 } 239 240 func (self _Instr) vf() uint8 { 241 return (*rt.GoType)(self.p).KindFlags 242 } 243 244 func (self _Instr) vs() (v string) { 245 (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p 246 (*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi() 247 return 248 } 249 250 func (self _Instr) vk() reflect.Kind { 251 return (*rt.GoType)(self.p).Kind() 252 } 253 254 func (self _Instr) vt() reflect.Type { 255 return (*rt.GoType)(self.p).Pack() 256 } 257 258 func (self _Instr) vp() (vt reflect.Type, pv bool) { 259 return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1 260 } 261 262 func (self _Instr) i64() int64 { 263 return int64(self.vi()) 264 } 265 266 func (self _Instr) vlen() int { 267 return int((*rt.GoType)(self.p).Size) 268 } 269 270 func (self _Instr) isBranch() bool { 271 switch self.op() { 272 case _OP_goto : fallthrough 273 case _OP_is_nil : fallthrough 274 case _OP_is_nil_p1 : fallthrough 275 case _OP_is_zero_1 : fallthrough 276 case _OP_is_zero_2 : fallthrough 277 case _OP_is_zero_4 : fallthrough 278 case _OP_is_zero_8 : fallthrough 279 case _OP_map_check_key : fallthrough 280 case _OP_map_write_key : fallthrough 281 case _OP_slice_next : fallthrough 282 case _OP_cond_testc : return true 283 default : return false 284 } 285 } 286 287 func (self _Instr) disassemble() string { 288 switch self.op() { 289 case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi()))) 290 case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs())) 291 case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi()) 292 case _OP_recurse : fallthrough 293 case _OP_map_iter : fallthrough 294 case _OP_marshal : fallthrough 295 case _OP_marshal_p : fallthrough 296 case _OP_marshal_text : fallthrough 297 case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt()) 298 case _OP_goto : fallthrough 299 case _OP_is_nil : fallthrough 300 case _OP_is_nil_p1 : fallthrough 301 case _OP_is_zero_1 : fallthrough 302 case _OP_is_zero_2 : fallthrough 303 case _OP_is_zero_4 : fallthrough 304 case _OP_is_zero_8 : fallthrough 305 case _OP_is_zero_map : fallthrough 306 case _OP_cond_testc : fallthrough 307 case _OP_map_check_key : fallthrough 308 case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi()) 309 case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt()) 310 default : return self.op().String() 311 } 312 } 313 314 type ( 315 _Program []_Instr 316 ) 317 318 func (self _Program) pc() int { 319 return len(self) 320 } 321 322 func (self _Program) tag(n int) { 323 if n >= _MaxStack { 324 panic("type nesting too deep") 325 } 326 } 327 328 func (self _Program) pin(i int) { 329 v := &self[i] 330 v.u &= 0xffff000000000000 331 v.u |= rt.PackInt(self.pc()) 332 } 333 334 func (self _Program) rel(v []int) { 335 for _, i := range v { 336 self.pin(i) 337 } 338 } 339 340 func (self *_Program) add(op _Op) { 341 *self = append(*self, newInsOp(op)) 342 } 343 344 func (self *_Program) key(op _Op) { 345 *self = append(*self, 346 newInsVi(_OP_byte, '"'), 347 newInsOp(op), 348 newInsVi(_OP_byte, '"'), 349 ) 350 } 351 352 func (self *_Program) int(op _Op, vi int) { 353 *self = append(*self, newInsVi(op, vi)) 354 } 355 356 func (self *_Program) str(op _Op, vs string) { 357 *self = append(*self, newInsVs(op, vs)) 358 } 359 360 func (self *_Program) rtt(op _Op, vt reflect.Type) { 361 *self = append(*self, newInsVt(op, vt)) 362 } 363 364 func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) { 365 *self = append(*self, newInsVp(op, vt, pv)) 366 } 367 368 func (self _Program) disassemble() string { 369 nb := len(self) 370 tab := make([]bool, nb + 1) 371 ret := make([]string, 0, nb + 1) 372 373 /* prescan to get all the labels */ 374 for _, ins := range self { 375 if ins.isBranch() { 376 tab[ins.vi()] = true 377 } 378 } 379 380 /* disassemble each instruction */ 381 for i, ins := range self { 382 if !tab[i] { 383 ret = append(ret, "\t" + ins.disassemble()) 384 } else { 385 ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble())) 386 } 387 } 388 389 /* add the last label, if needed */ 390 if tab[nb] { 391 ret = append(ret, fmt.Sprintf("L_%d:", nb)) 392 } 393 394 /* add an "end" indicator, and join all the strings */ 395 return strings.Join(append(ret, "\tend"), "\n") 396 } 397 398 type _Compiler struct { 399 opts option.CompileOptions 400 pv bool 401 tab map[reflect.Type]bool 402 rec map[reflect.Type]uint8 403 } 404 405 func newCompiler() *_Compiler { 406 return &_Compiler { 407 opts: option.DefaultCompileOptions(), 408 tab: map[reflect.Type]bool{}, 409 rec: map[reflect.Type]uint8{}, 410 } 411 } 412 413 func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler { 414 self.opts = opts 415 if self.opts.RecursiveDepth > 0 { 416 self.rec = map[reflect.Type]uint8{} 417 } 418 return self 419 } 420 421 func (self *_Compiler) rescue(ep *error) { 422 if val := recover(); val != nil { 423 if err, ok := val.(error); ok { 424 *ep = err 425 } else { 426 panic(val) 427 } 428 } 429 } 430 431 func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) { 432 defer self.rescue(&err) 433 self.compileOne(&ret, 0, vt, pv) 434 return 435 } 436 437 func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) { 438 if self.tab[vt] { 439 p.vp(_OP_recurse, vt, pv) 440 } else { 441 self.compileRec(p, sp, vt, pv) 442 } 443 } 444 445 func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) { 446 pr := self.pv 447 pt := reflect.PtrTo(vt) 448 449 /* check for addressable `json.Marshaler` with pointer receiver */ 450 if pv && pt.Implements(jsonMarshalerType) { 451 p.rtt(_OP_marshal_p, pt) 452 return 453 } 454 455 /* check for `json.Marshaler` */ 456 if vt.Implements(jsonMarshalerType) { 457 self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType) 458 return 459 } 460 461 /* check for addressable `encoding.TextMarshaler` with pointer receiver */ 462 if pv && pt.Implements(encodingTextMarshalerType) { 463 p.rtt(_OP_marshal_text_p, pt) 464 return 465 } 466 467 /* check for `encoding.TextMarshaler` */ 468 if vt.Implements(encodingTextMarshalerType) { 469 self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType) 470 return 471 } 472 473 /* enter the recursion, and compile the type */ 474 self.pv = pv 475 self.tab[vt] = true 476 self.compileOps(p, sp, vt) 477 478 /* exit the recursion */ 479 self.pv = pr 480 delete(self.tab, vt) 481 } 482 483 func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) { 484 switch vt.Kind() { 485 case reflect.Bool : p.add(_OP_bool) 486 case reflect.Int : p.add(_OP_int()) 487 case reflect.Int8 : p.add(_OP_i8) 488 case reflect.Int16 : p.add(_OP_i16) 489 case reflect.Int32 : p.add(_OP_i32) 490 case reflect.Int64 : p.add(_OP_i64) 491 case reflect.Uint : p.add(_OP_uint()) 492 case reflect.Uint8 : p.add(_OP_u8) 493 case reflect.Uint16 : p.add(_OP_u16) 494 case reflect.Uint32 : p.add(_OP_u32) 495 case reflect.Uint64 : p.add(_OP_u64) 496 case reflect.Uintptr : p.add(_OP_uintptr()) 497 case reflect.Float32 : p.add(_OP_f32) 498 case reflect.Float64 : p.add(_OP_f64) 499 case reflect.String : self.compileString (p, vt) 500 case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len()) 501 case reflect.Interface : self.compileInterface (p, vt) 502 case reflect.Map : self.compileMap (p, sp, vt) 503 case reflect.Ptr : self.compilePtr (p, sp, vt.Elem()) 504 case reflect.Slice : self.compileSlice (p, sp, vt.Elem()) 505 case reflect.Struct : self.compileStruct (p, sp, vt) 506 default : panic (error_type(vt)) 507 } 508 } 509 510 func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) { 511 x := p.pc() 512 p.add(_OP_is_nil) 513 fn(p, sp, vt) 514 e := p.pc() 515 p.add(_OP_goto) 516 p.pin(x) 517 p.add(nil_op) 518 p.pin(e) 519 } 520 521 func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) { 522 self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody) 523 } 524 525 func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) { 526 p.tag(sp) 527 p.add(_OP_save) 528 p.add(_OP_deref) 529 self.compileOne(p, sp + 1, vt, true) 530 p.add(_OP_drop) 531 } 532 533 func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) { 534 self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody) 535 } 536 537 func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) { 538 p.tag(sp + 1) 539 p.int(_OP_byte, '{') 540 p.add(_OP_save) 541 p.rtt(_OP_map_iter, vt) 542 p.add(_OP_save) 543 i := p.pc() 544 p.add(_OP_map_check_key) 545 u := p.pc() 546 p.add(_OP_map_write_key) 547 self.compileMapBodyKey(p, vt.Key()) 548 p.pin(u) 549 p.int(_OP_byte, ':') 550 p.add(_OP_map_value_next) 551 self.compileOne(p, sp + 2, vt.Elem(), false) 552 j := p.pc() 553 p.add(_OP_map_check_key) 554 p.int(_OP_byte, ',') 555 v := p.pc() 556 p.add(_OP_map_write_key) 557 self.compileMapBodyKey(p, vt.Key()) 558 p.pin(v) 559 p.int(_OP_byte, ':') 560 p.add(_OP_map_value_next) 561 self.compileOne(p, sp + 2, vt.Elem(), false) 562 p.int(_OP_goto, j) 563 p.pin(i) 564 p.pin(j) 565 p.add(_OP_map_stop) 566 p.add(_OP_drop_2) 567 p.int(_OP_byte, '}') 568 } 569 570 func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) { 571 if !vk.Implements(encodingTextMarshalerType) { 572 self.compileMapBodyTextKey(p, vk) 573 } else { 574 self.compileMapBodyUtextKey(p, vk) 575 } 576 } 577 578 func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) { 579 switch vk.Kind() { 580 case reflect.Invalid : panic("map key is nil") 581 case reflect.Bool : p.key(_OP_bool) 582 case reflect.Int : p.key(_OP_int()) 583 case reflect.Int8 : p.key(_OP_i8) 584 case reflect.Int16 : p.key(_OP_i16) 585 case reflect.Int32 : p.key(_OP_i32) 586 case reflect.Int64 : p.key(_OP_i64) 587 case reflect.Uint : p.key(_OP_uint()) 588 case reflect.Uint8 : p.key(_OP_u8) 589 case reflect.Uint16 : p.key(_OP_u16) 590 case reflect.Uint32 : p.key(_OP_u32) 591 case reflect.Uint64 : p.key(_OP_u64) 592 case reflect.Uintptr : p.key(_OP_uintptr()) 593 case reflect.Float32 : p.key(_OP_f32) 594 case reflect.Float64 : p.key(_OP_f64) 595 case reflect.String : self.compileString(p, vk) 596 default : panic(error_type(vk)) 597 } 598 } 599 600 func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) { 601 if vk.Kind() != reflect.Ptr { 602 p.rtt(_OP_marshal_text, vk) 603 } else { 604 self.compileMapBodyUtextPtr(p, vk) 605 } 606 } 607 608 func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) { 609 i := p.pc() 610 p.add(_OP_is_nil) 611 p.rtt(_OP_marshal_text, vk) 612 j := p.pc() 613 p.add(_OP_goto) 614 p.pin(i) 615 p.str(_OP_text, "\"\"") 616 p.pin(j) 617 } 618 619 func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) { 620 self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody) 621 } 622 623 func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) { 624 if isSimpleByte(vt) { 625 p.add(_OP_bin) 626 } else { 627 self.compileSliceArray(p, sp, vt) 628 } 629 } 630 631 func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) { 632 p.tag(sp) 633 p.int(_OP_byte, '[') 634 p.add(_OP_save) 635 p.add(_OP_slice_len) 636 i := p.pc() 637 p.rtt(_OP_slice_next, vt) 638 self.compileOne(p, sp + 1, vt, true) 639 j := p.pc() 640 p.rtt(_OP_slice_next, vt) 641 p.int(_OP_byte, ',') 642 self.compileOne(p, sp + 1, vt, true) 643 p.int(_OP_goto, j) 644 p.pin(i) 645 p.pin(j) 646 p.add(_OP_drop) 647 p.int(_OP_byte, ']') 648 } 649 650 func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) { 651 p.tag(sp) 652 p.int(_OP_byte, '[') 653 p.add(_OP_save) 654 655 /* first item */ 656 if nb != 0 { 657 self.compileOne(p, sp + 1, vt, self.pv) 658 p.add(_OP_load) 659 } 660 661 /* remaining items */ 662 for i := 1; i < nb; i++ { 663 p.int(_OP_byte, ',') 664 p.int(_OP_index, i * int(vt.Size())) 665 self.compileOne(p, sp + 1, vt, self.pv) 666 p.add(_OP_load) 667 } 668 669 /* end of array */ 670 p.add(_OP_drop) 671 p.int(_OP_byte, ']') 672 } 673 674 func (self *_Compiler) compileString(p *_Program, vt reflect.Type) { 675 if vt != jsonNumberType { 676 p.add(_OP_str) 677 } else { 678 p.add(_OP_number) 679 } 680 } 681 682 func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) { 683 if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) { 684 p.vp(_OP_recurse, vt, self.pv) 685 if self.opts.RecursiveDepth > 0 { 686 if self.pv { 687 self.rec[vt] = 1 688 } else { 689 self.rec[vt] = 0 690 } 691 } 692 } else { 693 self.compileStructBody(p, sp, vt) 694 } 695 } 696 697 func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { 698 p.tag(sp) 699 p.int(_OP_byte, '{') 700 p.add(_OP_save) 701 p.add(_OP_cond_set) 702 703 /* compile each field */ 704 for _, fv := range resolver.ResolveStruct(vt) { 705 var s []int 706 var o resolver.Offset 707 708 /* "omitempty" for arrays */ 709 if fv.Type.Kind() == reflect.Array { 710 if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 { 711 continue 712 } 713 } 714 715 /* index to the field */ 716 for _, o = range fv.Path { 717 if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref { 718 s = append(s, p.pc()) 719 p.add(_OP_is_nil) 720 p.add(_OP_deref) 721 } 722 } 723 724 /* check for "omitempty" option */ 725 if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 { 726 s = append(s, p.pc()) 727 self.compileStructFieldZero(p, fv.Type) 728 } 729 730 /* add the comma if not the first element */ 731 i := p.pc() 732 p.add(_OP_cond_testc) 733 p.int(_OP_byte, ',') 734 p.pin(i) 735 736 /* compile the key and value */ 737 ft := fv.Type 738 p.str(_OP_text, Quote(fv.Name) + ":") 739 740 /* check for "stringnize" option */ 741 if (fv.Opts & resolver.F_stringize) == 0 { 742 self.compileOne(p, sp + 1, ft, self.pv) 743 } else { 744 self.compileStructFieldStr(p, sp + 1, ft) 745 } 746 747 /* patch the skipping jumps and reload the struct pointer */ 748 p.rel(s) 749 p.add(_OP_load) 750 } 751 752 /* end of object */ 753 p.add(_OP_drop) 754 p.int(_OP_byte, '}') 755 } 756 757 func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) { 758 pc := -1 759 ft := vt 760 sv := false 761 762 /* dereference the pointer if needed */ 763 if ft.Kind() == reflect.Ptr { 764 ft = ft.Elem() 765 } 766 767 /* check if it can be stringized */ 768 switch ft.Kind() { 769 case reflect.Bool : sv = true 770 case reflect.Int : sv = true 771 case reflect.Int8 : sv = true 772 case reflect.Int16 : sv = true 773 case reflect.Int32 : sv = true 774 case reflect.Int64 : sv = true 775 case reflect.Uint : sv = true 776 case reflect.Uint8 : sv = true 777 case reflect.Uint16 : sv = true 778 case reflect.Uint32 : sv = true 779 case reflect.Uint64 : sv = true 780 case reflect.Uintptr : sv = true 781 case reflect.Float32 : sv = true 782 case reflect.Float64 : sv = true 783 case reflect.String : sv = true 784 } 785 786 /* if it's not, ignore the "string" and follow the regular path */ 787 if !sv { 788 self.compileOne(p, sp, vt, self.pv) 789 return 790 } 791 792 /* dereference the pointer */ 793 if vt.Kind() == reflect.Ptr { 794 pc = p.pc() 795 vt = vt.Elem() 796 p.add(_OP_is_nil) 797 p.add(_OP_deref) 798 } 799 800 /* special case of a double-quoted string */ 801 if ft != jsonNumberType && ft.Kind() == reflect.String { 802 p.add(_OP_quote) 803 } else { 804 self.compileStructFieldQuoted(p, sp, vt) 805 } 806 807 /* the "null" case of the pointer */ 808 if pc != -1 { 809 e := p.pc() 810 p.add(_OP_goto) 811 p.pin(pc) 812 p.add(_OP_null) 813 p.pin(e) 814 } 815 } 816 817 func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) { 818 switch vt.Kind() { 819 case reflect.Bool : p.add(_OP_is_zero_1) 820 case reflect.Int : p.add(_OP_is_zero_ints()) 821 case reflect.Int8 : p.add(_OP_is_zero_1) 822 case reflect.Int16 : p.add(_OP_is_zero_2) 823 case reflect.Int32 : p.add(_OP_is_zero_4) 824 case reflect.Int64 : p.add(_OP_is_zero_8) 825 case reflect.Uint : p.add(_OP_is_zero_ints()) 826 case reflect.Uint8 : p.add(_OP_is_zero_1) 827 case reflect.Uint16 : p.add(_OP_is_zero_2) 828 case reflect.Uint32 : p.add(_OP_is_zero_4) 829 case reflect.Uint64 : p.add(_OP_is_zero_8) 830 case reflect.Uintptr : p.add(_OP_is_nil) 831 case reflect.Float32 : p.add(_OP_is_zero_4) 832 case reflect.Float64 : p.add(_OP_is_zero_8) 833 case reflect.String : p.add(_OP_is_nil_p1) 834 case reflect.Interface : p.add(_OP_is_nil_p1) 835 case reflect.Map : p.add(_OP_is_zero_map) 836 case reflect.Ptr : p.add(_OP_is_nil) 837 case reflect.Slice : p.add(_OP_is_nil_p1) 838 default : panic(error_type(vt)) 839 } 840 } 841 842 func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) { 843 p.int(_OP_byte, '"') 844 self.compileOne(p, sp, vt, self.pv) 845 p.int(_OP_byte, '"') 846 } 847 848 func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) { 849 x := p.pc() 850 p.add(_OP_is_nil_p1) 851 852 /* iface and efaces are different */ 853 if vt.NumMethod() == 0 { 854 p.add(_OP_eface) 855 } else { 856 p.add(_OP_iface) 857 } 858 859 /* the "null" value */ 860 e := p.pc() 861 p.add(_OP_goto) 862 p.pin(x) 863 p.add(_OP_null) 864 p.pin(e) 865 } 866 867 func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) { 868 pc := p.pc() 869 vk := vt.Kind() 870 871 /* direct receiver */ 872 if vk != reflect.Ptr { 873 p.rtt(op, vt) 874 return 875 } 876 877 /* value receiver with a pointer type, check for nil before calling the marshaler */ 878 p.add(_OP_is_nil) 879 p.rtt(op, vt) 880 i := p.pc() 881 p.add(_OP_goto) 882 p.pin(pc) 883 p.add(_OP_null) 884 p.pin(i) 885 }