Source file
src/reflect/value.go
Documentation: reflect
1
2
3
4
5 package reflect
6
7 import (
8 "internal/abi"
9 "internal/itoa"
10 "internal/unsafeheader"
11 "math"
12 "runtime"
13 "unsafe"
14 )
15
16 const ptrSize = 4 << (^uintptr(0) >> 63)
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41 typ *rtype
42
43
44
45 ptr unsafe.Pointer
46
47
48
49
50
51
52
53
54
55
56
57
58
59 flag
60
61
62
63
64
65
66 }
67
68 type flag uintptr
69
70 const (
71 flagKindWidth = 5
72 flagKindMask flag = 1<<flagKindWidth - 1
73 flagStickyRO flag = 1 << 5
74 flagEmbedRO flag = 1 << 6
75 flagIndir flag = 1 << 7
76 flagAddr flag = 1 << 8
77 flagMethod flag = 1 << 9
78 flagMethodShift = 10
79 flagRO flag = flagStickyRO | flagEmbedRO
80 )
81
82 func (f flag) kind() Kind {
83 return Kind(f & flagKindMask)
84 }
85
86 func (f flag) ro() flag {
87 if f&flagRO != 0 {
88 return flagStickyRO
89 }
90 return 0
91 }
92
93
94
95
96 func (v Value) pointer() unsafe.Pointer {
97 if v.typ.size != ptrSize || !v.typ.pointers() {
98 panic("can't call pointer on a non-pointer Value")
99 }
100 if v.flag&flagIndir != 0 {
101 return *(*unsafe.Pointer)(v.ptr)
102 }
103 return v.ptr
104 }
105
106
107 func packEface(v Value) interface{} {
108 t := v.typ
109 var i interface{}
110 e := (*emptyInterface)(unsafe.Pointer(&i))
111
112 switch {
113 case ifaceIndir(t):
114 if v.flag&flagIndir == 0 {
115 panic("bad indir")
116 }
117
118 ptr := v.ptr
119 if v.flag&flagAddr != 0 {
120
121
122 c := unsafe_New(t)
123 typedmemmove(t, c, ptr)
124 ptr = c
125 }
126 e.word = ptr
127 case v.flag&flagIndir != 0:
128
129
130 e.word = *(*unsafe.Pointer)(v.ptr)
131 default:
132
133 e.word = v.ptr
134 }
135
136
137
138
139 e.typ = t
140 return i
141 }
142
143
144 func unpackEface(i interface{}) Value {
145 e := (*emptyInterface)(unsafe.Pointer(&i))
146
147 t := e.typ
148 if t == nil {
149 return Value{}
150 }
151 f := flag(t.Kind())
152 if ifaceIndir(t) {
153 f |= flagIndir
154 }
155 return Value{t, e.word, f}
156 }
157
158
159
160
161 type ValueError struct {
162 Method string
163 Kind Kind
164 }
165
166 func (e *ValueError) Error() string {
167 if e.Kind == 0 {
168 return "reflect: call of " + e.Method + " on zero Value"
169 }
170 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
171 }
172
173
174
175 func methodName() string {
176 pc, _, _, _ := runtime.Caller(2)
177 f := runtime.FuncForPC(pc)
178 if f == nil {
179 return "unknown method"
180 }
181 return f.Name()
182 }
183
184
185
186 func methodNameSkip() string {
187 pc, _, _, _ := runtime.Caller(3)
188 f := runtime.FuncForPC(pc)
189 if f == nil {
190 return "unknown method"
191 }
192 return f.Name()
193 }
194
195
196 type emptyInterface struct {
197 typ *rtype
198 word unsafe.Pointer
199 }
200
201
202 type nonEmptyInterface struct {
203
204 itab *struct {
205 ityp *rtype
206 typ *rtype
207 hash uint32
208 _ [4]byte
209 fun [100000]unsafe.Pointer
210 }
211 word unsafe.Pointer
212 }
213
214
215
216
217
218
219
220 func (f flag) mustBe(expected Kind) {
221
222 if Kind(f&flagKindMask) != expected {
223 panic(&ValueError{methodName(), f.kind()})
224 }
225 }
226
227
228
229 func (f flag) mustBeExported() {
230 if f == 0 || f&flagRO != 0 {
231 f.mustBeExportedSlow()
232 }
233 }
234
235 func (f flag) mustBeExportedSlow() {
236 if f == 0 {
237 panic(&ValueError{methodNameSkip(), Invalid})
238 }
239 if f&flagRO != 0 {
240 panic("reflect: " + methodNameSkip() + " using value obtained using unexported field")
241 }
242 }
243
244
245
246
247 func (f flag) mustBeAssignable() {
248 if f&flagRO != 0 || f&flagAddr == 0 {
249 f.mustBeAssignableSlow()
250 }
251 }
252
253 func (f flag) mustBeAssignableSlow() {
254 if f == 0 {
255 panic(&ValueError{methodNameSkip(), Invalid})
256 }
257
258 if f&flagRO != 0 {
259 panic("reflect: " + methodNameSkip() + " using value obtained using unexported field")
260 }
261 if f&flagAddr == 0 {
262 panic("reflect: " + methodNameSkip() + " using unaddressable value")
263 }
264 }
265
266
267
268
269
270
271 func (v Value) Addr() Value {
272 if v.flag&flagAddr == 0 {
273 panic("reflect.Value.Addr of unaddressable value")
274 }
275
276
277 fl := v.flag & flagRO
278 return Value{v.typ.ptrTo(), v.ptr, fl | flag(Ptr)}
279 }
280
281
282
283 func (v Value) Bool() bool {
284 v.mustBe(Bool)
285 return *(*bool)(v.ptr)
286 }
287
288
289
290 func (v Value) Bytes() []byte {
291 v.mustBe(Slice)
292 if v.typ.Elem().Kind() != Uint8 {
293 panic("reflect.Value.Bytes of non-byte slice")
294 }
295
296 return *(*[]byte)(v.ptr)
297 }
298
299
300
301 func (v Value) runes() []rune {
302 v.mustBe(Slice)
303 if v.typ.Elem().Kind() != Int32 {
304 panic("reflect.Value.Bytes of non-rune slice")
305 }
306
307 return *(*[]rune)(v.ptr)
308 }
309
310
311
312
313
314
315 func (v Value) CanAddr() bool {
316 return v.flag&flagAddr != 0
317 }
318
319
320
321
322
323
324 func (v Value) CanSet() bool {
325 return v.flag&(flagAddr|flagRO) == flagAddr
326 }
327
328
329
330
331
332
333
334
335
336 func (v Value) Call(in []Value) []Value {
337 v.mustBe(Func)
338 v.mustBeExported()
339 return v.call("Call", in)
340 }
341
342
343
344
345
346
347
348
349 func (v Value) CallSlice(in []Value) []Value {
350 v.mustBe(Func)
351 v.mustBeExported()
352 return v.call("CallSlice", in)
353 }
354
355 var callGC bool
356
357 const debugReflectCall = false
358
359 func (v Value) call(op string, in []Value) []Value {
360
361 t := (*funcType)(unsafe.Pointer(v.typ))
362 var (
363 fn unsafe.Pointer
364 rcvr Value
365 rcvrtype *rtype
366 )
367 if v.flag&flagMethod != 0 {
368 rcvr = v
369 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
370 } else if v.flag&flagIndir != 0 {
371 fn = *(*unsafe.Pointer)(v.ptr)
372 } else {
373 fn = v.ptr
374 }
375
376 if fn == nil {
377 panic("reflect.Value.Call: call of nil function")
378 }
379
380 isSlice := op == "CallSlice"
381 n := t.NumIn()
382 isVariadic := t.IsVariadic()
383 if isSlice {
384 if !isVariadic {
385 panic("reflect: CallSlice of non-variadic function")
386 }
387 if len(in) < n {
388 panic("reflect: CallSlice with too few input arguments")
389 }
390 if len(in) > n {
391 panic("reflect: CallSlice with too many input arguments")
392 }
393 } else {
394 if isVariadic {
395 n--
396 }
397 if len(in) < n {
398 panic("reflect: Call with too few input arguments")
399 }
400 if !isVariadic && len(in) > n {
401 panic("reflect: Call with too many input arguments")
402 }
403 }
404 for _, x := range in {
405 if x.Kind() == Invalid {
406 panic("reflect: " + op + " using zero Value argument")
407 }
408 }
409 for i := 0; i < n; i++ {
410 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(targ) {
411 panic("reflect: " + op + " using " + xt.String() + " as type " + targ.String())
412 }
413 }
414 if !isSlice && isVariadic {
415
416 m := len(in) - n
417 slice := MakeSlice(t.In(n), m, m)
418 elem := t.In(n).Elem()
419 for i := 0; i < m; i++ {
420 x := in[n+i]
421 if xt := x.Type(); !xt.AssignableTo(elem) {
422 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
423 }
424 slice.Index(i).Set(x)
425 }
426 origIn := in
427 in = make([]Value, n+1)
428 copy(in[:n], origIn)
429 in[n] = slice
430 }
431
432 nin := len(in)
433 if nin != t.NumIn() {
434 panic("reflect.Value.Call: wrong argument count")
435 }
436 nout := t.NumOut()
437
438
439 var regArgs abi.RegArgs
440
441
442 frametype, framePool, abi := funcLayout(t, rcvrtype)
443
444
445 var stackArgs unsafe.Pointer
446 if frametype.size != 0 {
447 if nout == 0 {
448 stackArgs = framePool.Get().(unsafe.Pointer)
449 } else {
450
451
452 stackArgs = unsafe_New(frametype)
453 }
454 }
455 frameSize := frametype.size
456
457 if debugReflectCall {
458 println("reflect.call", t.String())
459 abi.dump()
460 }
461
462
463
464
465 inStart := 0
466 if rcvrtype != nil {
467
468
469
470 switch st := abi.call.steps[0]; st.kind {
471 case abiStepStack:
472 storeRcvr(rcvr, stackArgs)
473 case abiStepIntReg, abiStepPointer:
474
475
476
477
478 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
479 case abiStepFloatReg:
480 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
481 default:
482 panic("unknown ABI parameter kind")
483 }
484 inStart = 1
485 }
486
487
488 for i, v := range in {
489 v.mustBeExported()
490 targ := t.In(i).(*rtype)
491
492
493
494 v = v.assignTo("reflect.Value.Call", targ, nil)
495 stepsLoop:
496 for _, st := range abi.call.stepsForValue(i + inStart) {
497 switch st.kind {
498 case abiStepStack:
499
500 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
501 if v.flag&flagIndir != 0 {
502 typedmemmove(targ, addr, v.ptr)
503 } else {
504 *(*unsafe.Pointer)(addr) = v.ptr
505 }
506
507 break stepsLoop
508 case abiStepIntReg, abiStepPointer:
509
510 if v.flag&flagIndir != 0 {
511 offset := add(v.ptr, st.offset, "precomputed value offset")
512 if st.kind == abiStepPointer {
513
514
515
516 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
517 }
518 memmove(unsafe.Pointer(®Args.Ints[st.ireg]), offset, st.size)
519 } else {
520 if st.kind == abiStepPointer {
521
522 regArgs.Ptrs[st.ireg] = v.ptr
523 }
524 regArgs.Ints[st.ireg] = uintptr(v.ptr)
525 }
526 case abiStepFloatReg:
527
528 if v.flag&flagIndir == 0 {
529 panic("attempted to copy pointer to FP register")
530 }
531 offset := add(v.ptr, st.offset, "precomputed value offset")
532 memmove(unsafe.Pointer(®Args.Floats[st.freg]), offset, st.size)
533 default:
534 panic("unknown ABI part kind")
535 }
536 }
537 }
538
539
540 frameSize = align(frameSize, ptrSize)
541 frameSize += abi.spill
542
543
544 regArgs.ReturnIsPtr = abi.outRegPtrs
545
546 if debugReflectCall {
547 regArgs.Dump()
548 }
549
550
551 if callGC {
552 runtime.GC()
553 }
554
555
556 call(frametype, fn, stackArgs, uint32(frametype.size), uint32(abi.retOffset), uint32(frameSize), ®Args)
557
558
559 if callGC {
560 runtime.GC()
561 }
562
563 var ret []Value
564 if nout == 0 {
565 if stackArgs != nil {
566 typedmemclr(frametype, stackArgs)
567 framePool.Put(stackArgs)
568 }
569 } else {
570 if stackArgs != nil {
571
572
573
574 typedmemclrpartial(frametype, stackArgs, 0, abi.retOffset)
575 }
576
577
578 ret = make([]Value, nout)
579 for i := 0; i < nout; i++ {
580 tv := t.Out(i)
581 if tv.Size() == 0 {
582
583
584 ret[i] = Zero(tv)
585 continue
586 }
587 steps := abi.ret.stepsForValue(i)
588 if st := steps[0]; st.kind == abiStepStack {
589
590
591
592 fl := flagIndir | flag(tv.Kind())
593 ret[i] = Value{tv.common(), add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
594
595
596
597
598 continue
599 }
600
601
602 if !ifaceIndir(tv.common()) {
603
604
605 if steps[0].kind != abiStepPointer {
606 print("kind=", steps[0].kind, ", type=", tv.String(), "\n")
607 panic("mismatch between ABI description and types")
608 }
609 ret[i] = Value{tv.common(), regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
610 continue
611 }
612
613
614
615
616
617
618
619
620
621
622 s := unsafe_New(tv.common())
623 for _, st := range steps {
624 switch st.kind {
625 case abiStepIntReg:
626 offset := add(s, st.offset, "precomputed value offset")
627 memmove(offset, unsafe.Pointer(®Args.Ints[st.ireg]), st.size)
628 case abiStepPointer:
629 s := add(s, st.offset, "precomputed value offset")
630 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
631 case abiStepFloatReg:
632 offset := add(s, st.offset, "precomputed value offset")
633 memmove(offset, unsafe.Pointer(®Args.Floats[st.freg]), st.size)
634 case abiStepStack:
635 panic("register-based return value has stack component")
636 default:
637 panic("unknown ABI part kind")
638 }
639 }
640 ret[i] = Value{tv.common(), s, flagIndir | flag(tv.Kind())}
641 }
642 }
643
644 return ret
645 }
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
668 if callGC {
669
670
671
672
673
674 runtime.GC()
675 }
676 ftyp := ctxt.ftyp
677 f := ctxt.fn
678
679 _, _, abi := funcLayout(ftyp, nil)
680
681
682 ptr := frame
683 in := make([]Value, 0, int(ftyp.inCount))
684 for i, typ := range ftyp.in() {
685 if typ.Size() == 0 {
686 in = append(in, Zero(typ))
687 continue
688 }
689 v := Value{typ, nil, flag(typ.Kind())}
690 steps := abi.call.stepsForValue(i)
691 if st := steps[0]; st.kind == abiStepStack {
692 if ifaceIndir(typ) {
693
694
695
696
697 v.ptr = unsafe_New(typ)
698 if typ.size > 0 {
699 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
700 }
701 v.flag |= flagIndir
702 } else {
703 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
704 }
705 } else {
706 if ifaceIndir(typ) {
707
708
709 v.flag |= flagIndir
710 v.ptr = unsafe_New(typ)
711 for _, st := range steps {
712 switch st.kind {
713 case abiStepIntReg:
714 offset := add(v.ptr, st.offset, "precomputed value offset")
715 memmove(offset, unsafe.Pointer(®s.Ints[st.ireg]), st.size)
716 case abiStepPointer:
717 s := add(v.ptr, st.offset, "precomputed value offset")
718 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
719 case abiStepFloatReg:
720 offset := add(v.ptr, st.offset, "precomputed value offset")
721 memmove(offset, unsafe.Pointer(®s.Floats[st.freg]), st.size)
722 case abiStepStack:
723 panic("register-based return value has stack component")
724 default:
725 panic("unknown ABI part kind")
726 }
727 }
728 } else {
729
730
731 if steps[0].kind != abiStepPointer {
732 print("kind=", steps[0].kind, ", type=", typ.String(), "\n")
733 panic("mismatch between ABI description and types")
734 }
735 v.ptr = regs.Ptrs[steps[0].ireg]
736 }
737 }
738 in = append(in, v)
739 }
740
741
742 out := f(in)
743 numOut := ftyp.NumOut()
744 if len(out) != numOut {
745 panic("reflect: wrong return count from function created by MakeFunc")
746 }
747
748
749 if numOut > 0 {
750 for i, typ := range ftyp.out() {
751 v := out[i]
752 if v.typ == nil {
753 panic("reflect: function created by MakeFunc using " + funcName(f) +
754 " returned zero Value")
755 }
756 if v.flag&flagRO != 0 {
757 panic("reflect: function created by MakeFunc using " + funcName(f) +
758 " returned value obtained from unexported field")
759 }
760 if typ.size == 0 {
761 continue
762 }
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778 v = v.assignTo("reflect.MakeFunc", typ, nil)
779 stepsLoop:
780 for _, st := range abi.ret.stepsForValue(i) {
781 switch st.kind {
782 case abiStepStack:
783
784 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
785
786
787
788
789 if v.flag&flagIndir != 0 {
790 memmove(addr, v.ptr, st.size)
791 } else {
792
793 *(*uintptr)(addr) = uintptr(v.ptr)
794 }
795
796 break stepsLoop
797 case abiStepIntReg, abiStepPointer:
798
799 if v.flag&flagIndir != 0 {
800 offset := add(v.ptr, st.offset, "precomputed value offset")
801 memmove(unsafe.Pointer(®s.Ints[st.ireg]), offset, st.size)
802 } else {
803
804
805
806
807
808 regs.Ints[st.ireg] = uintptr(v.ptr)
809 }
810 case abiStepFloatReg:
811
812 if v.flag&flagIndir == 0 {
813 panic("attempted to copy pointer to FP register")
814 }
815 offset := add(v.ptr, st.offset, "precomputed value offset")
816 memmove(unsafe.Pointer(®s.Floats[st.freg]), offset, st.size)
817 default:
818 panic("unknown ABI part kind")
819 }
820 }
821 }
822 }
823
824
825
826 *retValid = true
827
828
829
830
831
832 runtime.KeepAlive(out)
833
834
835
836
837 runtime.KeepAlive(ctxt)
838 }
839
840
841
842
843
844
845
846
847 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *rtype, t *funcType, fn unsafe.Pointer) {
848 i := methodIndex
849 if v.typ.Kind() == Interface {
850 tt := (*interfaceType)(unsafe.Pointer(v.typ))
851 if uint(i) >= uint(len(tt.methods)) {
852 panic("reflect: internal error: invalid method index")
853 }
854 m := &tt.methods[i]
855 if !tt.nameOff(m.name).isExported() {
856 panic("reflect: " + op + " of unexported method")
857 }
858 iface := (*nonEmptyInterface)(v.ptr)
859 if iface.itab == nil {
860 panic("reflect: " + op + " of method on nil interface value")
861 }
862 rcvrtype = iface.itab.typ
863 fn = unsafe.Pointer(&iface.itab.fun[i])
864 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.typ)))
865 } else {
866 rcvrtype = v.typ
867 ms := v.typ.exportedMethods()
868 if uint(i) >= uint(len(ms)) {
869 panic("reflect: internal error: invalid method index")
870 }
871 m := ms[i]
872 if !v.typ.nameOff(m.name).isExported() {
873 panic("reflect: " + op + " of unexported method")
874 }
875 ifn := v.typ.textOff(m.ifn)
876 fn = unsafe.Pointer(&ifn)
877 t = (*funcType)(unsafe.Pointer(v.typ.typeOff(m.mtyp)))
878 }
879 return
880 }
881
882
883
884
885
886 func storeRcvr(v Value, p unsafe.Pointer) {
887 t := v.typ
888 if t.Kind() == Interface {
889
890 iface := (*nonEmptyInterface)(v.ptr)
891 *(*unsafe.Pointer)(p) = iface.word
892 } else if v.flag&flagIndir != 0 && !ifaceIndir(t) {
893 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
894 } else {
895 *(*unsafe.Pointer)(p) = v.ptr
896 }
897 }
898
899
900
901 func align(x, n uintptr) uintptr {
902 return (x + n - 1) &^ (n - 1)
903 }
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
925 rcvr := ctxt.rcvr
926 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
927
928
929
930
931
932
933
934
935
936 _, _, valueABI := funcLayout(valueFuncType, nil)
937 valueFrame, valueRegs := frame, regs
938 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
939
940
941
942 methodFrame := methodFramePool.Get().(unsafe.Pointer)
943 var methodRegs abi.RegArgs
944
945
946 if st := methodABI.call.steps[0]; st.kind == abiStepStack {
947
948
949 storeRcvr(rcvr, methodFrame)
950 } else {
951
952 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints))
953 }
954
955
956 for i, t := range valueFuncType.in() {
957 valueSteps := valueABI.call.stepsForValue(i)
958 methodSteps := methodABI.call.stepsForValue(i + 1)
959
960
961 if len(valueSteps) == 0 {
962 if len(methodSteps) != 0 {
963 panic("method ABI and value ABI do not align")
964 }
965 continue
966 }
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
982 mStep := methodSteps[0]
983
984 if mStep.kind == abiStepStack {
985 if vStep.size != mStep.size {
986 panic("method ABI and value ABI do not align")
987 }
988 typedmemmove(t,
989 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
990 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
991 continue
992 }
993
994 for _, mStep := range methodSteps {
995 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
996 switch mStep.kind {
997 case abiStepPointer:
998
999 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
1000 fallthrough
1001 case abiStepIntReg:
1002 memmove(unsafe.Pointer(&methodRegs.Ints[mStep.ireg]), from, mStep.size)
1003 case abiStepFloatReg:
1004 memmove(unsafe.Pointer(&methodRegs.Floats[mStep.freg]), from, mStep.size)
1005 default:
1006 panic("unexpected method step")
1007 }
1008 }
1009 continue
1010 }
1011
1012 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1013 for _, vStep := range valueSteps {
1014 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1015 switch vStep.kind {
1016 case abiStepPointer:
1017
1018 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1019 case abiStepIntReg:
1020 memmove(to, unsafe.Pointer(&valueRegs.Ints[vStep.ireg]), vStep.size)
1021 case abiStepFloatReg:
1022 memmove(to, unsafe.Pointer(&valueRegs.Floats[vStep.freg]), vStep.size)
1023 default:
1024 panic("unexpected value step")
1025 }
1026 }
1027 continue
1028 }
1029
1030 if len(valueSteps) != len(methodSteps) {
1031
1032
1033
1034 panic("method ABI and value ABI don't align")
1035 }
1036 for i, vStep := range valueSteps {
1037 mStep := methodSteps[i]
1038 if mStep.kind != vStep.kind {
1039 panic("method ABI and value ABI don't align")
1040 }
1041 switch vStep.kind {
1042 case abiStepPointer:
1043
1044 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1045 fallthrough
1046 case abiStepIntReg:
1047 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1048 case abiStepFloatReg:
1049 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1050 default:
1051 panic("unexpected value step")
1052 }
1053 }
1054 }
1055
1056 methodFrameSize := methodFrameType.size
1057
1058
1059 methodFrameSize = align(methodFrameSize, ptrSize)
1060 methodFrameSize += methodABI.spill
1061
1062
1063 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1064
1065
1066
1067
1068 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.size), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079 if valueRegs != nil {
1080 *valueRegs = methodRegs
1081 }
1082 if retSize := methodFrameType.size - methodABI.retOffset; retSize > 0 {
1083 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1084 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1085
1086 memmove(valueRet, methodRet, retSize)
1087 }
1088
1089
1090
1091 *retValid = true
1092
1093
1094
1095
1096 typedmemclr(methodFrameType, methodFrame)
1097 methodFramePool.Put(methodFrame)
1098
1099
1100 runtime.KeepAlive(ctxt)
1101
1102
1103
1104
1105 runtime.KeepAlive(valueRegs)
1106 }
1107
1108
1109 func funcName(f func([]Value) []Value) string {
1110 pc := *(*uintptr)(unsafe.Pointer(&f))
1111 rf := runtime.FuncForPC(pc)
1112 if rf != nil {
1113 return rf.Name()
1114 }
1115 return "closure"
1116 }
1117
1118
1119
1120 func (v Value) Cap() int {
1121 k := v.kind()
1122 switch k {
1123 case Array:
1124 return v.typ.Len()
1125 case Chan:
1126 return chancap(v.pointer())
1127 case Slice:
1128
1129 return (*unsafeheader.Slice)(v.ptr).Cap
1130 }
1131 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1132 }
1133
1134
1135
1136 func (v Value) Close() {
1137 v.mustBe(Chan)
1138 v.mustBeExported()
1139 chanclose(v.pointer())
1140 }
1141
1142
1143
1144 func (v Value) Complex() complex128 {
1145 k := v.kind()
1146 switch k {
1147 case Complex64:
1148 return complex128(*(*complex64)(v.ptr))
1149 case Complex128:
1150 return *(*complex128)(v.ptr)
1151 }
1152 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1153 }
1154
1155
1156
1157
1158
1159 func (v Value) Elem() Value {
1160 k := v.kind()
1161 switch k {
1162 case Interface:
1163 var eface interface{}
1164 if v.typ.NumMethod() == 0 {
1165 eface = *(*interface{})(v.ptr)
1166 } else {
1167 eface = (interface{})(*(*interface {
1168 M()
1169 })(v.ptr))
1170 }
1171 x := unpackEface(eface)
1172 if x.flag != 0 {
1173 x.flag |= v.flag.ro()
1174 }
1175 return x
1176 case Ptr:
1177 ptr := v.ptr
1178 if v.flag&flagIndir != 0 {
1179 ptr = *(*unsafe.Pointer)(ptr)
1180 }
1181
1182 if ptr == nil {
1183 return Value{}
1184 }
1185 tt := (*ptrType)(unsafe.Pointer(v.typ))
1186 typ := tt.elem
1187 fl := v.flag&flagRO | flagIndir | flagAddr
1188 fl |= flag(typ.Kind())
1189 return Value{typ, ptr, fl}
1190 }
1191 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1192 }
1193
1194
1195
1196 func (v Value) Field(i int) Value {
1197 if v.kind() != Struct {
1198 panic(&ValueError{"reflect.Value.Field", v.kind()})
1199 }
1200 tt := (*structType)(unsafe.Pointer(v.typ))
1201 if uint(i) >= uint(len(tt.fields)) {
1202 panic("reflect: Field index out of range")
1203 }
1204 field := &tt.fields[i]
1205 typ := field.typ
1206
1207
1208 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1209
1210 if !field.name.isExported() {
1211 if field.embedded() {
1212 fl |= flagEmbedRO
1213 } else {
1214 fl |= flagStickyRO
1215 }
1216 }
1217
1218
1219
1220
1221
1222 ptr := add(v.ptr, field.offset(), "same as non-reflect &v.field")
1223 return Value{typ, ptr, fl}
1224 }
1225
1226
1227
1228 func (v Value) FieldByIndex(index []int) Value {
1229 if len(index) == 1 {
1230 return v.Field(index[0])
1231 }
1232 v.mustBe(Struct)
1233 for i, x := range index {
1234 if i > 0 {
1235 if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct {
1236 if v.IsNil() {
1237 panic("reflect: indirection through nil pointer to embedded struct")
1238 }
1239 v = v.Elem()
1240 }
1241 }
1242 v = v.Field(x)
1243 }
1244 return v
1245 }
1246
1247
1248
1249
1250 func (v Value) FieldByName(name string) Value {
1251 v.mustBe(Struct)
1252 if f, ok := v.typ.FieldByName(name); ok {
1253 return v.FieldByIndex(f.Index)
1254 }
1255 return Value{}
1256 }
1257
1258
1259
1260
1261
1262 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1263 if f, ok := v.typ.FieldByNameFunc(match); ok {
1264 return v.FieldByIndex(f.Index)
1265 }
1266 return Value{}
1267 }
1268
1269
1270
1271 func (v Value) Float() float64 {
1272 k := v.kind()
1273 switch k {
1274 case Float32:
1275 return float64(*(*float32)(v.ptr))
1276 case Float64:
1277 return *(*float64)(v.ptr)
1278 }
1279 panic(&ValueError{"reflect.Value.Float", v.kind()})
1280 }
1281
1282 var uint8Type = TypeOf(uint8(0)).(*rtype)
1283
1284
1285
1286 func (v Value) Index(i int) Value {
1287 switch v.kind() {
1288 case Array:
1289 tt := (*arrayType)(unsafe.Pointer(v.typ))
1290 if uint(i) >= uint(tt.len) {
1291 panic("reflect: array index out of range")
1292 }
1293 typ := tt.elem
1294 offset := uintptr(i) * typ.size
1295
1296
1297
1298
1299
1300
1301 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1302 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1303 return Value{typ, val, fl}
1304
1305 case Slice:
1306
1307
1308 s := (*unsafeheader.Slice)(v.ptr)
1309 if uint(i) >= uint(s.Len) {
1310 panic("reflect: slice index out of range")
1311 }
1312 tt := (*sliceType)(unsafe.Pointer(v.typ))
1313 typ := tt.elem
1314 val := arrayAt(s.Data, i, typ.size, "i < s.Len")
1315 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1316 return Value{typ, val, fl}
1317
1318 case String:
1319 s := (*unsafeheader.String)(v.ptr)
1320 if uint(i) >= uint(s.Len) {
1321 panic("reflect: string index out of range")
1322 }
1323 p := arrayAt(s.Data, i, 1, "i < s.Len")
1324 fl := v.flag.ro() | flag(Uint8) | flagIndir
1325 return Value{uint8Type, p, fl}
1326 }
1327 panic(&ValueError{"reflect.Value.Index", v.kind()})
1328 }
1329
1330
1331
1332 func (v Value) Int() int64 {
1333 k := v.kind()
1334 p := v.ptr
1335 switch k {
1336 case Int:
1337 return int64(*(*int)(p))
1338 case Int8:
1339 return int64(*(*int8)(p))
1340 case Int16:
1341 return int64(*(*int16)(p))
1342 case Int32:
1343 return int64(*(*int32)(p))
1344 case Int64:
1345 return *(*int64)(p)
1346 }
1347 panic(&ValueError{"reflect.Value.Int", v.kind()})
1348 }
1349
1350
1351 func (v Value) CanInterface() bool {
1352 if v.flag == 0 {
1353 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1354 }
1355 return v.flag&flagRO == 0
1356 }
1357
1358
1359
1360
1361
1362
1363 func (v Value) Interface() (i interface{}) {
1364 return valueInterface(v, true)
1365 }
1366
1367 func valueInterface(v Value, safe bool) interface{} {
1368 if v.flag == 0 {
1369 panic(&ValueError{"reflect.Value.Interface", Invalid})
1370 }
1371 if safe && v.flag&flagRO != 0 {
1372
1373
1374
1375 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1376 }
1377 if v.flag&flagMethod != 0 {
1378 v = makeMethodValue("Interface", v)
1379 }
1380
1381 if v.kind() == Interface {
1382
1383
1384
1385 if v.NumMethod() == 0 {
1386 return *(*interface{})(v.ptr)
1387 }
1388 return *(*interface {
1389 M()
1390 })(v.ptr)
1391 }
1392
1393
1394 return packEface(v)
1395 }
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406 func (v Value) InterfaceData() [2]uintptr {
1407 v.mustBe(Interface)
1408
1409
1410
1411
1412
1413 return *(*[2]uintptr)(v.ptr)
1414 }
1415
1416
1417
1418
1419
1420
1421
1422
1423 func (v Value) IsNil() bool {
1424 k := v.kind()
1425 switch k {
1426 case Chan, Func, Map, Ptr, UnsafePointer:
1427 if v.flag&flagMethod != 0 {
1428 return false
1429 }
1430 ptr := v.ptr
1431 if v.flag&flagIndir != 0 {
1432 ptr = *(*unsafe.Pointer)(ptr)
1433 }
1434 return ptr == nil
1435 case Interface, Slice:
1436
1437
1438 return *(*unsafe.Pointer)(v.ptr) == nil
1439 }
1440 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1441 }
1442
1443
1444
1445
1446
1447
1448 func (v Value) IsValid() bool {
1449 return v.flag != 0
1450 }
1451
1452
1453
1454 func (v Value) IsZero() bool {
1455 switch v.kind() {
1456 case Bool:
1457 return !v.Bool()
1458 case Int, Int8, Int16, Int32, Int64:
1459 return v.Int() == 0
1460 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1461 return v.Uint() == 0
1462 case Float32, Float64:
1463 return math.Float64bits(v.Float()) == 0
1464 case Complex64, Complex128:
1465 c := v.Complex()
1466 return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
1467 case Array:
1468 for i := 0; i < v.Len(); i++ {
1469 if !v.Index(i).IsZero() {
1470 return false
1471 }
1472 }
1473 return true
1474 case Chan, Func, Interface, Map, Ptr, Slice, UnsafePointer:
1475 return v.IsNil()
1476 case String:
1477 return v.Len() == 0
1478 case Struct:
1479 for i := 0; i < v.NumField(); i++ {
1480 if !v.Field(i).IsZero() {
1481 return false
1482 }
1483 }
1484 return true
1485 default:
1486
1487
1488 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1489 }
1490 }
1491
1492
1493
1494 func (v Value) Kind() Kind {
1495 return v.kind()
1496 }
1497
1498
1499
1500 func (v Value) Len() int {
1501 k := v.kind()
1502 switch k {
1503 case Array:
1504 tt := (*arrayType)(unsafe.Pointer(v.typ))
1505 return int(tt.len)
1506 case Chan:
1507 return chanlen(v.pointer())
1508 case Map:
1509 return maplen(v.pointer())
1510 case Slice:
1511
1512 return (*unsafeheader.Slice)(v.ptr).Len
1513 case String:
1514
1515 return (*unsafeheader.String)(v.ptr).Len
1516 }
1517 panic(&ValueError{"reflect.Value.Len", v.kind()})
1518 }
1519
1520
1521
1522
1523
1524 func (v Value) MapIndex(key Value) Value {
1525 v.mustBe(Map)
1526 tt := (*mapType)(unsafe.Pointer(v.typ))
1527
1528
1529
1530
1531
1532
1533
1534
1535 key = key.assignTo("reflect.Value.MapIndex", tt.key, nil)
1536
1537 var k unsafe.Pointer
1538 if key.flag&flagIndir != 0 {
1539 k = key.ptr
1540 } else {
1541 k = unsafe.Pointer(&key.ptr)
1542 }
1543 e := mapaccess(v.typ, v.pointer(), k)
1544 if e == nil {
1545 return Value{}
1546 }
1547 typ := tt.elem
1548 fl := (v.flag | key.flag).ro()
1549 fl |= flag(typ.Kind())
1550 return copyVal(typ, fl, e)
1551 }
1552
1553
1554
1555
1556
1557 func (v Value) MapKeys() []Value {
1558 v.mustBe(Map)
1559 tt := (*mapType)(unsafe.Pointer(v.typ))
1560 keyType := tt.key
1561
1562 fl := v.flag.ro() | flag(keyType.Kind())
1563
1564 m := v.pointer()
1565 mlen := int(0)
1566 if m != nil {
1567 mlen = maplen(m)
1568 }
1569 it := mapiterinit(v.typ, m)
1570 a := make([]Value, mlen)
1571 var i int
1572 for i = 0; i < len(a); i++ {
1573 key := mapiterkey(it)
1574 if key == nil {
1575
1576
1577
1578 break
1579 }
1580 a[i] = copyVal(keyType, fl, key)
1581 mapiternext(it)
1582 }
1583 return a[:i]
1584 }
1585
1586
1587
1588 type MapIter struct {
1589 m Value
1590 it unsafe.Pointer
1591 }
1592
1593
1594 func (it *MapIter) Key() Value {
1595 if it.it == nil {
1596 panic("MapIter.Key called before Next")
1597 }
1598 if mapiterkey(it.it) == nil {
1599 panic("MapIter.Key called on exhausted iterator")
1600 }
1601
1602 t := (*mapType)(unsafe.Pointer(it.m.typ))
1603 ktype := t.key
1604 return copyVal(ktype, it.m.flag.ro()|flag(ktype.Kind()), mapiterkey(it.it))
1605 }
1606
1607
1608 func (it *MapIter) Value() Value {
1609 if it.it == nil {
1610 panic("MapIter.Value called before Next")
1611 }
1612 if mapiterkey(it.it) == nil {
1613 panic("MapIter.Value called on exhausted iterator")
1614 }
1615
1616 t := (*mapType)(unsafe.Pointer(it.m.typ))
1617 vtype := t.elem
1618 return copyVal(vtype, it.m.flag.ro()|flag(vtype.Kind()), mapiterelem(it.it))
1619 }
1620
1621
1622
1623
1624 func (it *MapIter) Next() bool {
1625 if it.it == nil {
1626 it.it = mapiterinit(it.m.typ, it.m.pointer())
1627 } else {
1628 if mapiterkey(it.it) == nil {
1629 panic("MapIter.Next called on exhausted iterator")
1630 }
1631 mapiternext(it.it)
1632 }
1633 return mapiterkey(it.it) != nil
1634 }
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652 func (v Value) MapRange() *MapIter {
1653 v.mustBe(Map)
1654 return &MapIter{m: v}
1655 }
1656
1657
1658
1659 func copyVal(typ *rtype, fl flag, ptr unsafe.Pointer) Value {
1660 if ifaceIndir(typ) {
1661
1662
1663 c := unsafe_New(typ)
1664 typedmemmove(typ, c, ptr)
1665 return Value{typ, c, fl | flagIndir}
1666 }
1667 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
1668 }
1669
1670
1671
1672
1673
1674 func (v Value) Method(i int) Value {
1675 if v.typ == nil {
1676 panic(&ValueError{"reflect.Value.Method", Invalid})
1677 }
1678 if v.flag&flagMethod != 0 || uint(i) >= uint(v.typ.NumMethod()) {
1679 panic("reflect: Method index out of range")
1680 }
1681 if v.typ.Kind() == Interface && v.IsNil() {
1682 panic("reflect: Method on nil interface value")
1683 }
1684 fl := v.flag.ro() | (v.flag & flagIndir)
1685 fl |= flag(Func)
1686 fl |= flag(i)<<flagMethodShift | flagMethod
1687 return Value{v.typ, v.ptr, fl}
1688 }
1689
1690
1691 func (v Value) NumMethod() int {
1692 if v.typ == nil {
1693 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
1694 }
1695 if v.flag&flagMethod != 0 {
1696 return 0
1697 }
1698 return v.typ.NumMethod()
1699 }
1700
1701
1702
1703
1704
1705
1706 func (v Value) MethodByName(name string) Value {
1707 if v.typ == nil {
1708 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
1709 }
1710 if v.flag&flagMethod != 0 {
1711 return Value{}
1712 }
1713 m, ok := v.typ.MethodByName(name)
1714 if !ok {
1715 return Value{}
1716 }
1717 return v.Method(m.Index)
1718 }
1719
1720
1721
1722 func (v Value) NumField() int {
1723 v.mustBe(Struct)
1724 tt := (*structType)(unsafe.Pointer(v.typ))
1725 return len(tt.fields)
1726 }
1727
1728
1729
1730 func (v Value) OverflowComplex(x complex128) bool {
1731 k := v.kind()
1732 switch k {
1733 case Complex64:
1734 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
1735 case Complex128:
1736 return false
1737 }
1738 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
1739 }
1740
1741
1742
1743 func (v Value) OverflowFloat(x float64) bool {
1744 k := v.kind()
1745 switch k {
1746 case Float32:
1747 return overflowFloat32(x)
1748 case Float64:
1749 return false
1750 }
1751 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
1752 }
1753
1754 func overflowFloat32(x float64) bool {
1755 if x < 0 {
1756 x = -x
1757 }
1758 return math.MaxFloat32 < x && x <= math.MaxFloat64
1759 }
1760
1761
1762
1763 func (v Value) OverflowInt(x int64) bool {
1764 k := v.kind()
1765 switch k {
1766 case Int, Int8, Int16, Int32, Int64:
1767 bitSize := v.typ.size * 8
1768 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
1769 return x != trunc
1770 }
1771 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
1772 }
1773
1774
1775
1776 func (v Value) OverflowUint(x uint64) bool {
1777 k := v.kind()
1778 switch k {
1779 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
1780 bitSize := v.typ.size * 8
1781 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
1782 return x != trunc
1783 }
1784 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
1785 }
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806 func (v Value) Pointer() uintptr {
1807
1808 k := v.kind()
1809 switch k {
1810 case Ptr:
1811 if v.typ.ptrdata == 0 {
1812
1813
1814
1815
1816 return *(*uintptr)(v.ptr)
1817 }
1818 fallthrough
1819 case Chan, Map, UnsafePointer:
1820 return uintptr(v.pointer())
1821 case Func:
1822 if v.flag&flagMethod != 0 {
1823
1824
1825
1826
1827
1828
1829 f := methodValueCall
1830 return **(**uintptr)(unsafe.Pointer(&f))
1831 }
1832 p := v.pointer()
1833
1834
1835 if p != nil {
1836 p = *(*unsafe.Pointer)(p)
1837 }
1838 return uintptr(p)
1839
1840 case Slice:
1841 return (*SliceHeader)(v.ptr).Data
1842 }
1843 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
1844 }
1845
1846
1847
1848
1849
1850
1851 func (v Value) Recv() (x Value, ok bool) {
1852 v.mustBe(Chan)
1853 v.mustBeExported()
1854 return v.recv(false)
1855 }
1856
1857
1858
1859 func (v Value) recv(nb bool) (val Value, ok bool) {
1860 tt := (*chanType)(unsafe.Pointer(v.typ))
1861 if ChanDir(tt.dir)&RecvDir == 0 {
1862 panic("reflect: recv on send-only channel")
1863 }
1864 t := tt.elem
1865 val = Value{t, nil, flag(t.Kind())}
1866 var p unsafe.Pointer
1867 if ifaceIndir(t) {
1868 p = unsafe_New(t)
1869 val.ptr = p
1870 val.flag |= flagIndir
1871 } else {
1872 p = unsafe.Pointer(&val.ptr)
1873 }
1874 selected, ok := chanrecv(v.pointer(), nb, p)
1875 if !selected {
1876 val = Value{}
1877 }
1878 return
1879 }
1880
1881
1882
1883
1884 func (v Value) Send(x Value) {
1885 v.mustBe(Chan)
1886 v.mustBeExported()
1887 v.send(x, false)
1888 }
1889
1890
1891
1892 func (v Value) send(x Value, nb bool) (selected bool) {
1893 tt := (*chanType)(unsafe.Pointer(v.typ))
1894 if ChanDir(tt.dir)&SendDir == 0 {
1895 panic("reflect: send on recv-only channel")
1896 }
1897 x.mustBeExported()
1898 x = x.assignTo("reflect.Value.Send", tt.elem, nil)
1899 var p unsafe.Pointer
1900 if x.flag&flagIndir != 0 {
1901 p = x.ptr
1902 } else {
1903 p = unsafe.Pointer(&x.ptr)
1904 }
1905 return chansend(v.pointer(), p, nb)
1906 }
1907
1908
1909
1910
1911 func (v Value) Set(x Value) {
1912 v.mustBeAssignable()
1913 x.mustBeExported()
1914 var target unsafe.Pointer
1915 if v.kind() == Interface {
1916 target = v.ptr
1917 }
1918 x = x.assignTo("reflect.Set", v.typ, target)
1919 if x.flag&flagIndir != 0 {
1920 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
1921 typedmemclr(v.typ, v.ptr)
1922 } else {
1923 typedmemmove(v.typ, v.ptr, x.ptr)
1924 }
1925 } else {
1926 *(*unsafe.Pointer)(v.ptr) = x.ptr
1927 }
1928 }
1929
1930
1931
1932 func (v Value) SetBool(x bool) {
1933 v.mustBeAssignable()
1934 v.mustBe(Bool)
1935 *(*bool)(v.ptr) = x
1936 }
1937
1938
1939
1940 func (v Value) SetBytes(x []byte) {
1941 v.mustBeAssignable()
1942 v.mustBe(Slice)
1943 if v.typ.Elem().Kind() != Uint8 {
1944 panic("reflect.Value.SetBytes of non-byte slice")
1945 }
1946 *(*[]byte)(v.ptr) = x
1947 }
1948
1949
1950
1951 func (v Value) setRunes(x []rune) {
1952 v.mustBeAssignable()
1953 v.mustBe(Slice)
1954 if v.typ.Elem().Kind() != Int32 {
1955 panic("reflect.Value.setRunes of non-rune slice")
1956 }
1957 *(*[]rune)(v.ptr) = x
1958 }
1959
1960
1961
1962 func (v Value) SetComplex(x complex128) {
1963 v.mustBeAssignable()
1964 switch k := v.kind(); k {
1965 default:
1966 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
1967 case Complex64:
1968 *(*complex64)(v.ptr) = complex64(x)
1969 case Complex128:
1970 *(*complex128)(v.ptr) = x
1971 }
1972 }
1973
1974
1975
1976 func (v Value) SetFloat(x float64) {
1977 v.mustBeAssignable()
1978 switch k := v.kind(); k {
1979 default:
1980 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
1981 case Float32:
1982 *(*float32)(v.ptr) = float32(x)
1983 case Float64:
1984 *(*float64)(v.ptr) = x
1985 }
1986 }
1987
1988
1989
1990 func (v Value) SetInt(x int64) {
1991 v.mustBeAssignable()
1992 switch k := v.kind(); k {
1993 default:
1994 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
1995 case Int:
1996 *(*int)(v.ptr) = int(x)
1997 case Int8:
1998 *(*int8)(v.ptr) = int8(x)
1999 case Int16:
2000 *(*int16)(v.ptr) = int16(x)
2001 case Int32:
2002 *(*int32)(v.ptr) = int32(x)
2003 case Int64:
2004 *(*int64)(v.ptr) = x
2005 }
2006 }
2007
2008
2009
2010
2011 func (v Value) SetLen(n int) {
2012 v.mustBeAssignable()
2013 v.mustBe(Slice)
2014 s := (*unsafeheader.Slice)(v.ptr)
2015 if uint(n) > uint(s.Cap) {
2016 panic("reflect: slice length out of range in SetLen")
2017 }
2018 s.Len = n
2019 }
2020
2021
2022
2023
2024 func (v Value) SetCap(n int) {
2025 v.mustBeAssignable()
2026 v.mustBe(Slice)
2027 s := (*unsafeheader.Slice)(v.ptr)
2028 if n < s.Len || n > s.Cap {
2029 panic("reflect: slice capacity out of range in SetCap")
2030 }
2031 s.Cap = n
2032 }
2033
2034
2035
2036
2037
2038
2039
2040 func (v Value) SetMapIndex(key, elem Value) {
2041 v.mustBe(Map)
2042 v.mustBeExported()
2043 key.mustBeExported()
2044 tt := (*mapType)(unsafe.Pointer(v.typ))
2045 key = key.assignTo("reflect.Value.SetMapIndex", tt.key, nil)
2046 var k unsafe.Pointer
2047 if key.flag&flagIndir != 0 {
2048 k = key.ptr
2049 } else {
2050 k = unsafe.Pointer(&key.ptr)
2051 }
2052 if elem.typ == nil {
2053 mapdelete(v.typ, v.pointer(), k)
2054 return
2055 }
2056 elem.mustBeExported()
2057 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
2058 var e unsafe.Pointer
2059 if elem.flag&flagIndir != 0 {
2060 e = elem.ptr
2061 } else {
2062 e = unsafe.Pointer(&elem.ptr)
2063 }
2064 mapassign(v.typ, v.pointer(), k, e)
2065 }
2066
2067
2068
2069 func (v Value) SetUint(x uint64) {
2070 v.mustBeAssignable()
2071 switch k := v.kind(); k {
2072 default:
2073 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2074 case Uint:
2075 *(*uint)(v.ptr) = uint(x)
2076 case Uint8:
2077 *(*uint8)(v.ptr) = uint8(x)
2078 case Uint16:
2079 *(*uint16)(v.ptr) = uint16(x)
2080 case Uint32:
2081 *(*uint32)(v.ptr) = uint32(x)
2082 case Uint64:
2083 *(*uint64)(v.ptr) = x
2084 case Uintptr:
2085 *(*uintptr)(v.ptr) = uintptr(x)
2086 }
2087 }
2088
2089
2090
2091 func (v Value) SetPointer(x unsafe.Pointer) {
2092 v.mustBeAssignable()
2093 v.mustBe(UnsafePointer)
2094 *(*unsafe.Pointer)(v.ptr) = x
2095 }
2096
2097
2098
2099 func (v Value) SetString(x string) {
2100 v.mustBeAssignable()
2101 v.mustBe(String)
2102 *(*string)(v.ptr) = x
2103 }
2104
2105
2106
2107
2108 func (v Value) Slice(i, j int) Value {
2109 var (
2110 cap int
2111 typ *sliceType
2112 base unsafe.Pointer
2113 )
2114 switch kind := v.kind(); kind {
2115 default:
2116 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2117
2118 case Array:
2119 if v.flag&flagAddr == 0 {
2120 panic("reflect.Value.Slice: slice of unaddressable array")
2121 }
2122 tt := (*arrayType)(unsafe.Pointer(v.typ))
2123 cap = int(tt.len)
2124 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2125 base = v.ptr
2126
2127 case Slice:
2128 typ = (*sliceType)(unsafe.Pointer(v.typ))
2129 s := (*unsafeheader.Slice)(v.ptr)
2130 base = s.Data
2131 cap = s.Cap
2132
2133 case String:
2134 s := (*unsafeheader.String)(v.ptr)
2135 if i < 0 || j < i || j > s.Len {
2136 panic("reflect.Value.Slice: string slice index out of bounds")
2137 }
2138 var t unsafeheader.String
2139 if i < s.Len {
2140 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2141 }
2142 return Value{v.typ, unsafe.Pointer(&t), v.flag}
2143 }
2144
2145 if i < 0 || j < i || j > cap {
2146 panic("reflect.Value.Slice: slice index out of bounds")
2147 }
2148
2149
2150 var x []unsafe.Pointer
2151
2152
2153 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2154 s.Len = j - i
2155 s.Cap = cap - i
2156 if cap-i > 0 {
2157 s.Data = arrayAt(base, i, typ.elem.Size(), "i < cap")
2158 } else {
2159
2160 s.Data = base
2161 }
2162
2163 fl := v.flag.ro() | flagIndir | flag(Slice)
2164 return Value{typ.common(), unsafe.Pointer(&x), fl}
2165 }
2166
2167
2168
2169
2170 func (v Value) Slice3(i, j, k int) Value {
2171 var (
2172 cap int
2173 typ *sliceType
2174 base unsafe.Pointer
2175 )
2176 switch kind := v.kind(); kind {
2177 default:
2178 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2179
2180 case Array:
2181 if v.flag&flagAddr == 0 {
2182 panic("reflect.Value.Slice3: slice of unaddressable array")
2183 }
2184 tt := (*arrayType)(unsafe.Pointer(v.typ))
2185 cap = int(tt.len)
2186 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2187 base = v.ptr
2188
2189 case Slice:
2190 typ = (*sliceType)(unsafe.Pointer(v.typ))
2191 s := (*unsafeheader.Slice)(v.ptr)
2192 base = s.Data
2193 cap = s.Cap
2194 }
2195
2196 if i < 0 || j < i || k < j || k > cap {
2197 panic("reflect.Value.Slice3: slice index out of bounds")
2198 }
2199
2200
2201
2202 var x []unsafe.Pointer
2203
2204
2205 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2206 s.Len = j - i
2207 s.Cap = k - i
2208 if k-i > 0 {
2209 s.Data = arrayAt(base, i, typ.elem.Size(), "i < k <= cap")
2210 } else {
2211
2212 s.Data = base
2213 }
2214
2215 fl := v.flag.ro() | flagIndir | flag(Slice)
2216 return Value{typ.common(), unsafe.Pointer(&x), fl}
2217 }
2218
2219
2220
2221
2222
2223
2224
2225 func (v Value) String() string {
2226 switch k := v.kind(); k {
2227 case Invalid:
2228 return "<invalid Value>"
2229 case String:
2230 return *(*string)(v.ptr)
2231 }
2232
2233
2234 return "<" + v.Type().String() + " Value>"
2235 }
2236
2237
2238
2239
2240
2241
2242 func (v Value) TryRecv() (x Value, ok bool) {
2243 v.mustBe(Chan)
2244 v.mustBeExported()
2245 return v.recv(true)
2246 }
2247
2248
2249
2250
2251
2252 func (v Value) TrySend(x Value) bool {
2253 v.mustBe(Chan)
2254 v.mustBeExported()
2255 return v.send(x, true)
2256 }
2257
2258
2259 func (v Value) Type() Type {
2260 f := v.flag
2261 if f == 0 {
2262 panic(&ValueError{"reflect.Value.Type", Invalid})
2263 }
2264 if f&flagMethod == 0 {
2265
2266 return v.typ
2267 }
2268
2269
2270
2271 i := int(v.flag) >> flagMethodShift
2272 if v.typ.Kind() == Interface {
2273
2274 tt := (*interfaceType)(unsafe.Pointer(v.typ))
2275 if uint(i) >= uint(len(tt.methods)) {
2276 panic("reflect: internal error: invalid method index")
2277 }
2278 m := &tt.methods[i]
2279 return v.typ.typeOff(m.typ)
2280 }
2281
2282 ms := v.typ.exportedMethods()
2283 if uint(i) >= uint(len(ms)) {
2284 panic("reflect: internal error: invalid method index")
2285 }
2286 m := ms[i]
2287 return v.typ.typeOff(m.mtyp)
2288 }
2289
2290
2291
2292 func (v Value) Uint() uint64 {
2293 k := v.kind()
2294 p := v.ptr
2295 switch k {
2296 case Uint:
2297 return uint64(*(*uint)(p))
2298 case Uint8:
2299 return uint64(*(*uint8)(p))
2300 case Uint16:
2301 return uint64(*(*uint16)(p))
2302 case Uint32:
2303 return uint64(*(*uint32)(p))
2304 case Uint64:
2305 return *(*uint64)(p)
2306 case Uintptr:
2307 return uint64(*(*uintptr)(p))
2308 }
2309 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2310 }
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320 func (v Value) UnsafeAddr() uintptr {
2321
2322 if v.typ == nil {
2323 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2324 }
2325 if v.flag&flagAddr == 0 {
2326 panic("reflect.Value.UnsafeAddr of unaddressable value")
2327 }
2328 return uintptr(v.ptr)
2329 }
2330
2331
2332
2333
2334
2335
2336
2337 type StringHeader struct {
2338 Data uintptr
2339 Len int
2340 }
2341
2342
2343
2344
2345
2346
2347
2348 type SliceHeader struct {
2349 Data uintptr
2350 Len int
2351 Cap int
2352 }
2353
2354 func typesMustMatch(what string, t1, t2 Type) {
2355 if t1 != t2 {
2356 panic(what + ": " + t1.String() + " != " + t2.String())
2357 }
2358 }
2359
2360
2361
2362
2363
2364
2365
2366
2367 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2368 return add(p, uintptr(i)*eltSize, "i < len")
2369 }
2370
2371
2372
2373 func grow(s Value, extra int) (Value, int, int) {
2374 i0 := s.Len()
2375 i1 := i0 + extra
2376 if i1 < i0 {
2377 panic("reflect.Append: slice overflow")
2378 }
2379 m := s.Cap()
2380 if i1 <= m {
2381 return s.Slice(0, i1), i0, i1
2382 }
2383 if m == 0 {
2384 m = extra
2385 } else {
2386 for m < i1 {
2387 if i0 < 1024 {
2388 m += m
2389 } else {
2390 m += m / 4
2391 }
2392 }
2393 }
2394 t := MakeSlice(s.Type(), i1, m)
2395 Copy(t, s)
2396 return t, i0, i1
2397 }
2398
2399
2400
2401 func Append(s Value, x ...Value) Value {
2402 s.mustBe(Slice)
2403 s, i0, i1 := grow(s, len(x))
2404 for i, j := i0, 0; i < i1; i, j = i+1, j+1 {
2405 s.Index(i).Set(x[j])
2406 }
2407 return s
2408 }
2409
2410
2411
2412 func AppendSlice(s, t Value) Value {
2413 s.mustBe(Slice)
2414 t.mustBe(Slice)
2415 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2416 s, i0, i1 := grow(s, t.Len())
2417 Copy(s.Slice(i0, i1), t)
2418 return s
2419 }
2420
2421
2422
2423
2424
2425
2426
2427
2428 func Copy(dst, src Value) int {
2429 dk := dst.kind()
2430 if dk != Array && dk != Slice {
2431 panic(&ValueError{"reflect.Copy", dk})
2432 }
2433 if dk == Array {
2434 dst.mustBeAssignable()
2435 }
2436 dst.mustBeExported()
2437
2438 sk := src.kind()
2439 var stringCopy bool
2440 if sk != Array && sk != Slice {
2441 stringCopy = sk == String && dst.typ.Elem().Kind() == Uint8
2442 if !stringCopy {
2443 panic(&ValueError{"reflect.Copy", sk})
2444 }
2445 }
2446 src.mustBeExported()
2447
2448 de := dst.typ.Elem()
2449 if !stringCopy {
2450 se := src.typ.Elem()
2451 typesMustMatch("reflect.Copy", de, se)
2452 }
2453
2454 var ds, ss unsafeheader.Slice
2455 if dk == Array {
2456 ds.Data = dst.ptr
2457 ds.Len = dst.Len()
2458 ds.Cap = ds.Len
2459 } else {
2460 ds = *(*unsafeheader.Slice)(dst.ptr)
2461 }
2462 if sk == Array {
2463 ss.Data = src.ptr
2464 ss.Len = src.Len()
2465 ss.Cap = ss.Len
2466 } else if sk == Slice {
2467 ss = *(*unsafeheader.Slice)(src.ptr)
2468 } else {
2469 sh := *(*unsafeheader.String)(src.ptr)
2470 ss.Data = sh.Data
2471 ss.Len = sh.Len
2472 ss.Cap = sh.Len
2473 }
2474
2475 return typedslicecopy(de.common(), ds, ss)
2476 }
2477
2478
2479
2480 type runtimeSelect struct {
2481 dir SelectDir
2482 typ *rtype
2483 ch unsafe.Pointer
2484 val unsafe.Pointer
2485 }
2486
2487
2488
2489
2490
2491
2492 func rselect([]runtimeSelect) (chosen int, recvOK bool)
2493
2494
2495 type SelectDir int
2496
2497
2498
2499 const (
2500 _ SelectDir = iota
2501 SelectSend
2502 SelectRecv
2503 SelectDefault
2504 )
2505
2506
2507
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523 type SelectCase struct {
2524 Dir SelectDir
2525 Chan Value
2526 Send Value
2527 }
2528
2529
2530
2531
2532
2533
2534
2535
2536
2537 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
2538 if len(cases) > 65536 {
2539 panic("reflect.Select: too many cases (max 65536)")
2540 }
2541
2542
2543
2544 var runcases []runtimeSelect
2545 if len(cases) > 4 {
2546
2547 runcases = make([]runtimeSelect, len(cases))
2548 } else {
2549
2550 runcases = make([]runtimeSelect, len(cases), 4)
2551 }
2552
2553 haveDefault := false
2554 for i, c := range cases {
2555 rc := &runcases[i]
2556 rc.dir = c.Dir
2557 switch c.Dir {
2558 default:
2559 panic("reflect.Select: invalid Dir")
2560
2561 case SelectDefault:
2562 if haveDefault {
2563 panic("reflect.Select: multiple default cases")
2564 }
2565 haveDefault = true
2566 if c.Chan.IsValid() {
2567 panic("reflect.Select: default case has Chan value")
2568 }
2569 if c.Send.IsValid() {
2570 panic("reflect.Select: default case has Send value")
2571 }
2572
2573 case SelectSend:
2574 ch := c.Chan
2575 if !ch.IsValid() {
2576 break
2577 }
2578 ch.mustBe(Chan)
2579 ch.mustBeExported()
2580 tt := (*chanType)(unsafe.Pointer(ch.typ))
2581 if ChanDir(tt.dir)&SendDir == 0 {
2582 panic("reflect.Select: SendDir case using recv-only channel")
2583 }
2584 rc.ch = ch.pointer()
2585 rc.typ = &tt.rtype
2586 v := c.Send
2587 if !v.IsValid() {
2588 panic("reflect.Select: SendDir case missing Send value")
2589 }
2590 v.mustBeExported()
2591 v = v.assignTo("reflect.Select", tt.elem, nil)
2592 if v.flag&flagIndir != 0 {
2593 rc.val = v.ptr
2594 } else {
2595 rc.val = unsafe.Pointer(&v.ptr)
2596 }
2597
2598 case SelectRecv:
2599 if c.Send.IsValid() {
2600 panic("reflect.Select: RecvDir case has Send value")
2601 }
2602 ch := c.Chan
2603 if !ch.IsValid() {
2604 break
2605 }
2606 ch.mustBe(Chan)
2607 ch.mustBeExported()
2608 tt := (*chanType)(unsafe.Pointer(ch.typ))
2609 if ChanDir(tt.dir)&RecvDir == 0 {
2610 panic("reflect.Select: RecvDir case using send-only channel")
2611 }
2612 rc.ch = ch.pointer()
2613 rc.typ = &tt.rtype
2614 rc.val = unsafe_New(tt.elem)
2615 }
2616 }
2617
2618 chosen, recvOK = rselect(runcases)
2619 if runcases[chosen].dir == SelectRecv {
2620 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
2621 t := tt.elem
2622 p := runcases[chosen].val
2623 fl := flag(t.Kind())
2624 if ifaceIndir(t) {
2625 recv = Value{t, p, fl | flagIndir}
2626 } else {
2627 recv = Value{t, *(*unsafe.Pointer)(p), fl}
2628 }
2629 }
2630 return chosen, recv, recvOK
2631 }
2632
2633
2636
2637
2638 func unsafe_New(*rtype) unsafe.Pointer
2639 func unsafe_NewArray(*rtype, int) unsafe.Pointer
2640
2641
2642
2643 func MakeSlice(typ Type, len, cap int) Value {
2644 if typ.Kind() != Slice {
2645 panic("reflect.MakeSlice of non-slice type")
2646 }
2647 if len < 0 {
2648 panic("reflect.MakeSlice: negative len")
2649 }
2650 if cap < 0 {
2651 panic("reflect.MakeSlice: negative cap")
2652 }
2653 if len > cap {
2654 panic("reflect.MakeSlice: len > cap")
2655 }
2656
2657 s := unsafeheader.Slice{Data: unsafe_NewArray(typ.Elem().(*rtype), cap), Len: len, Cap: cap}
2658 return Value{typ.(*rtype), unsafe.Pointer(&s), flagIndir | flag(Slice)}
2659 }
2660
2661
2662 func MakeChan(typ Type, buffer int) Value {
2663 if typ.Kind() != Chan {
2664 panic("reflect.MakeChan of non-chan type")
2665 }
2666 if buffer < 0 {
2667 panic("reflect.MakeChan: negative buffer size")
2668 }
2669 if typ.ChanDir() != BothDir {
2670 panic("reflect.MakeChan: unidirectional channel type")
2671 }
2672 t := typ.(*rtype)
2673 ch := makechan(t, buffer)
2674 return Value{t, ch, flag(Chan)}
2675 }
2676
2677
2678 func MakeMap(typ Type) Value {
2679 return MakeMapWithSize(typ, 0)
2680 }
2681
2682
2683
2684 func MakeMapWithSize(typ Type, n int) Value {
2685 if typ.Kind() != Map {
2686 panic("reflect.MakeMapWithSize of non-map type")
2687 }
2688 t := typ.(*rtype)
2689 m := makemap(t, n)
2690 return Value{t, m, flag(Map)}
2691 }
2692
2693
2694
2695
2696 func Indirect(v Value) Value {
2697 if v.Kind() != Ptr {
2698 return v
2699 }
2700 return v.Elem()
2701 }
2702
2703
2704
2705 func ValueOf(i interface{}) Value {
2706 if i == nil {
2707 return Value{}
2708 }
2709
2710
2711
2712
2713
2714 escapes(i)
2715
2716 return unpackEface(i)
2717 }
2718
2719
2720
2721
2722
2723
2724 func Zero(typ Type) Value {
2725 if typ == nil {
2726 panic("reflect: Zero(nil)")
2727 }
2728 t := typ.(*rtype)
2729 fl := flag(t.Kind())
2730 if ifaceIndir(t) {
2731 var p unsafe.Pointer
2732 if t.size <= maxZero {
2733 p = unsafe.Pointer(&zeroVal[0])
2734 } else {
2735 p = unsafe_New(t)
2736 }
2737 return Value{t, p, fl | flagIndir}
2738 }
2739 return Value{t, nil, fl}
2740 }
2741
2742
2743 const maxZero = 1024
2744
2745
2746 var zeroVal [maxZero]byte
2747
2748
2749
2750 func New(typ Type) Value {
2751 if typ == nil {
2752 panic("reflect: New(nil)")
2753 }
2754 t := typ.(*rtype)
2755 pt := t.ptrTo()
2756 if ifaceIndir(pt) {
2757
2758 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
2759 }
2760 ptr := unsafe_New(t)
2761 fl := flag(Ptr)
2762 return Value{pt, ptr, fl}
2763 }
2764
2765
2766
2767 func NewAt(typ Type, p unsafe.Pointer) Value {
2768 fl := flag(Ptr)
2769 t := typ.(*rtype)
2770 return Value{t.ptrTo(), p, fl}
2771 }
2772
2773
2774
2775
2776
2777 func (v Value) assignTo(context string, dst *rtype, target unsafe.Pointer) Value {
2778 if v.flag&flagMethod != 0 {
2779 v = makeMethodValue(context, v)
2780 }
2781
2782 switch {
2783 case directlyAssignable(dst, v.typ):
2784
2785
2786 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
2787 fl |= flag(dst.Kind())
2788 return Value{dst, v.ptr, fl}
2789
2790 case implements(dst, v.typ):
2791 if target == nil {
2792 target = unsafe_New(dst)
2793 }
2794 if v.Kind() == Interface && v.IsNil() {
2795
2796
2797
2798 return Value{dst, nil, flag(Interface)}
2799 }
2800 x := valueInterface(v, false)
2801 if dst.NumMethod() == 0 {
2802 *(*interface{})(target) = x
2803 } else {
2804 ifaceE2I(dst, x, target)
2805 }
2806 return Value{dst, target, flagIndir | flag(Interface)}
2807 }
2808
2809
2810 panic(context + ": value of type " + v.typ.String() + " is not assignable to type " + dst.String())
2811 }
2812
2813
2814
2815
2816 func (v Value) Convert(t Type) Value {
2817 if v.flag&flagMethod != 0 {
2818 v = makeMethodValue("Convert", v)
2819 }
2820 op := convertOp(t.common(), v.typ)
2821 if op == nil {
2822 panic("reflect.Value.Convert: value of type " + v.typ.String() + " cannot be converted to type " + t.String())
2823 }
2824 return op(v, t)
2825 }
2826
2827
2828
2829 func (v Value) CanConvert(t Type) bool {
2830 vt := v.Type()
2831 if !vt.ConvertibleTo(t) {
2832 return false
2833 }
2834
2835
2836
2837 if vt.Kind() == Slice && t.Kind() == Ptr && t.Elem().Kind() == Array {
2838 n := t.Elem().Len()
2839 h := (*unsafeheader.Slice)(v.ptr)
2840 if n > h.Len {
2841 return false
2842 }
2843 }
2844 return true
2845 }
2846
2847
2848
2849 func convertOp(dst, src *rtype) func(Value, Type) Value {
2850 switch src.Kind() {
2851 case Int, Int8, Int16, Int32, Int64:
2852 switch dst.Kind() {
2853 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2854 return cvtInt
2855 case Float32, Float64:
2856 return cvtIntFloat
2857 case String:
2858 return cvtIntString
2859 }
2860
2861 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2862 switch dst.Kind() {
2863 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2864 return cvtUint
2865 case Float32, Float64:
2866 return cvtUintFloat
2867 case String:
2868 return cvtUintString
2869 }
2870
2871 case Float32, Float64:
2872 switch dst.Kind() {
2873 case Int, Int8, Int16, Int32, Int64:
2874 return cvtFloatInt
2875 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2876 return cvtFloatUint
2877 case Float32, Float64:
2878 return cvtFloat
2879 }
2880
2881 case Complex64, Complex128:
2882 switch dst.Kind() {
2883 case Complex64, Complex128:
2884 return cvtComplex
2885 }
2886
2887 case String:
2888 if dst.Kind() == Slice && dst.Elem().PkgPath() == "" {
2889 switch dst.Elem().Kind() {
2890 case Uint8:
2891 return cvtStringBytes
2892 case Int32:
2893 return cvtStringRunes
2894 }
2895 }
2896
2897 case Slice:
2898 if dst.Kind() == String && src.Elem().PkgPath() == "" {
2899 switch src.Elem().Kind() {
2900 case Uint8:
2901 return cvtBytesString
2902 case Int32:
2903 return cvtRunesString
2904 }
2905 }
2906
2907
2908 if dst.Kind() == Ptr && dst.Elem().Kind() == Array && src.Elem() == dst.Elem().Elem() {
2909 return cvtSliceArrayPtr
2910 }
2911
2912 case Chan:
2913 if dst.Kind() == Chan && specialChannelAssignability(dst, src) {
2914 return cvtDirect
2915 }
2916 }
2917
2918
2919 if haveIdenticalUnderlyingType(dst, src, false) {
2920 return cvtDirect
2921 }
2922
2923
2924 if dst.Kind() == Ptr && dst.Name() == "" &&
2925 src.Kind() == Ptr && src.Name() == "" &&
2926 haveIdenticalUnderlyingType(dst.Elem().common(), src.Elem().common(), false) {
2927 return cvtDirect
2928 }
2929
2930 if implements(dst, src) {
2931 if src.Kind() == Interface {
2932 return cvtI2I
2933 }
2934 return cvtT2I
2935 }
2936
2937 return nil
2938 }
2939
2940
2941
2942 func makeInt(f flag, bits uint64, t Type) Value {
2943 typ := t.common()
2944 ptr := unsafe_New(typ)
2945 switch typ.size {
2946 case 1:
2947 *(*uint8)(ptr) = uint8(bits)
2948 case 2:
2949 *(*uint16)(ptr) = uint16(bits)
2950 case 4:
2951 *(*uint32)(ptr) = uint32(bits)
2952 case 8:
2953 *(*uint64)(ptr) = bits
2954 }
2955 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2956 }
2957
2958
2959
2960 func makeFloat(f flag, v float64, t Type) Value {
2961 typ := t.common()
2962 ptr := unsafe_New(typ)
2963 switch typ.size {
2964 case 4:
2965 *(*float32)(ptr) = float32(v)
2966 case 8:
2967 *(*float64)(ptr) = v
2968 }
2969 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2970 }
2971
2972
2973 func makeFloat32(f flag, v float32, t Type) Value {
2974 typ := t.common()
2975 ptr := unsafe_New(typ)
2976 *(*float32)(ptr) = v
2977 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2978 }
2979
2980
2981
2982 func makeComplex(f flag, v complex128, t Type) Value {
2983 typ := t.common()
2984 ptr := unsafe_New(typ)
2985 switch typ.size {
2986 case 8:
2987 *(*complex64)(ptr) = complex64(v)
2988 case 16:
2989 *(*complex128)(ptr) = v
2990 }
2991 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
2992 }
2993
2994 func makeString(f flag, v string, t Type) Value {
2995 ret := New(t).Elem()
2996 ret.SetString(v)
2997 ret.flag = ret.flag&^flagAddr | f
2998 return ret
2999 }
3000
3001 func makeBytes(f flag, v []byte, t Type) Value {
3002 ret := New(t).Elem()
3003 ret.SetBytes(v)
3004 ret.flag = ret.flag&^flagAddr | f
3005 return ret
3006 }
3007
3008 func makeRunes(f flag, v []rune, t Type) Value {
3009 ret := New(t).Elem()
3010 ret.setRunes(v)
3011 ret.flag = ret.flag&^flagAddr | f
3012 return ret
3013 }
3014
3015
3016
3017
3018
3019
3020
3021 func cvtInt(v Value, t Type) Value {
3022 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3023 }
3024
3025
3026 func cvtUint(v Value, t Type) Value {
3027 return makeInt(v.flag.ro(), v.Uint(), t)
3028 }
3029
3030
3031 func cvtFloatInt(v Value, t Type) Value {
3032 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3033 }
3034
3035
3036 func cvtFloatUint(v Value, t Type) Value {
3037 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3038 }
3039
3040
3041 func cvtIntFloat(v Value, t Type) Value {
3042 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3043 }
3044
3045
3046 func cvtUintFloat(v Value, t Type) Value {
3047 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3048 }
3049
3050
3051 func cvtFloat(v Value, t Type) Value {
3052 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3053
3054
3055
3056 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3057 }
3058 return makeFloat(v.flag.ro(), v.Float(), t)
3059 }
3060
3061
3062 func cvtComplex(v Value, t Type) Value {
3063 return makeComplex(v.flag.ro(), v.Complex(), t)
3064 }
3065
3066
3067 func cvtIntString(v Value, t Type) Value {
3068 s := "\uFFFD"
3069 if x := v.Int(); int64(rune(x)) == x {
3070 s = string(rune(x))
3071 }
3072 return makeString(v.flag.ro(), s, t)
3073 }
3074
3075
3076 func cvtUintString(v Value, t Type) Value {
3077 s := "\uFFFD"
3078 if x := v.Uint(); uint64(rune(x)) == x {
3079 s = string(rune(x))
3080 }
3081 return makeString(v.flag.ro(), s, t)
3082 }
3083
3084
3085 func cvtBytesString(v Value, t Type) Value {
3086 return makeString(v.flag.ro(), string(v.Bytes()), t)
3087 }
3088
3089
3090 func cvtStringBytes(v Value, t Type) Value {
3091 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3092 }
3093
3094
3095 func cvtRunesString(v Value, t Type) Value {
3096 return makeString(v.flag.ro(), string(v.runes()), t)
3097 }
3098
3099
3100 func cvtStringRunes(v Value, t Type) Value {
3101 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3102 }
3103
3104
3105 func cvtSliceArrayPtr(v Value, t Type) Value {
3106 n := t.Elem().Len()
3107 h := (*unsafeheader.Slice)(v.ptr)
3108 if n > h.Len {
3109 panic("reflect: cannot convert slice with length " + itoa.Itoa(h.Len) + " to pointer to array with length " + itoa.Itoa(n))
3110 }
3111 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Ptr)}
3112 }
3113
3114
3115 func cvtDirect(v Value, typ Type) Value {
3116 f := v.flag
3117 t := typ.common()
3118 ptr := v.ptr
3119 if f&flagAddr != 0 {
3120
3121 c := unsafe_New(t)
3122 typedmemmove(t, c, ptr)
3123 ptr = c
3124 f &^= flagAddr
3125 }
3126 return Value{t, ptr, v.flag.ro() | f}
3127 }
3128
3129
3130 func cvtT2I(v Value, typ Type) Value {
3131 target := unsafe_New(typ.common())
3132 x := valueInterface(v, false)
3133 if typ.NumMethod() == 0 {
3134 *(*interface{})(target) = x
3135 } else {
3136 ifaceE2I(typ.(*rtype), x, target)
3137 }
3138 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3139 }
3140
3141
3142 func cvtI2I(v Value, typ Type) Value {
3143 if v.IsNil() {
3144 ret := Zero(typ)
3145 ret.flag |= v.flag.ro()
3146 return ret
3147 }
3148 return cvtT2I(v.Elem(), typ)
3149 }
3150
3151
3152 func chancap(ch unsafe.Pointer) int
3153 func chanclose(ch unsafe.Pointer)
3154 func chanlen(ch unsafe.Pointer) int
3155
3156
3157
3158
3159
3160
3161
3162
3163
3164
3165 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3166
3167
3168 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3169
3170 func makechan(typ *rtype, size int) (ch unsafe.Pointer)
3171 func makemap(t *rtype, cap int) (m unsafe.Pointer)
3172
3173
3174 func mapaccess(t *rtype, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3175
3176
3177 func mapassign(t *rtype, m unsafe.Pointer, key, val unsafe.Pointer)
3178
3179
3180 func mapdelete(t *rtype, m unsafe.Pointer, key unsafe.Pointer)
3181
3182
3183
3184
3185 func mapiterinit(t *rtype, m unsafe.Pointer) unsafe.Pointer
3186
3187
3188 func mapiterkey(it unsafe.Pointer) (key unsafe.Pointer)
3189
3190
3191 func mapiterelem(it unsafe.Pointer) (elem unsafe.Pointer)
3192
3193
3194 func mapiternext(it unsafe.Pointer)
3195
3196
3197 func maplen(m unsafe.Pointer) int
3198
3199
3200
3201
3202
3203
3204
3205
3206
3207
3208
3209
3210
3211
3212
3213
3214
3215
3216
3217
3218
3219
3220
3221
3222
3223
3224 func call(stackArgsType *rtype, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3225
3226 func ifaceE2I(t *rtype, src interface{}, dst unsafe.Pointer)
3227
3228
3229
3230 func memmove(dst, src unsafe.Pointer, size uintptr)
3231
3232
3233
3234 func typedmemmove(t *rtype, dst, src unsafe.Pointer)
3235
3236
3237
3238
3239 func typedmemmovepartial(t *rtype, dst, src unsafe.Pointer, off, size uintptr)
3240
3241
3242
3243 func typedmemclr(t *rtype, ptr unsafe.Pointer)
3244
3245
3246
3247
3248 func typedmemclrpartial(t *rtype, ptr unsafe.Pointer, off, size uintptr)
3249
3250
3251
3252
3253 func typedslicecopy(elemType *rtype, dst, src unsafeheader.Slice) int
3254
3255
3256 func typehash(t *rtype, p unsafe.Pointer, h uintptr) uintptr
3257
3258
3259
3260
3261 func escapes(x interface{}) {
3262 if dummy.b {
3263 dummy.x = x
3264 }
3265 }
3266
3267 var dummy struct {
3268 b bool
3269 x interface{}
3270 }
3271
View as plain text