Source file
src/reflect/value.go
1
2
3
4
5 package reflect
6
7 import (
8 "errors"
9 "internal/abi"
10 "internal/goarch"
11 "internal/itoa"
12 "internal/unsafeheader"
13 "math"
14 "runtime"
15 "unsafe"
16 )
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41 typ *rtype
42
43
44
45 ptr unsafe.Pointer
46
47
48
49
50
51
52
53
54
55
56
57
58
59 flag
60
61
62
63
64
65
66 }
67
68 type flag uintptr
69
70 const (
71 flagKindWidth = 5
72 flagKindMask flag = 1<<flagKindWidth - 1
73 flagStickyRO flag = 1 << 5
74 flagEmbedRO flag = 1 << 6
75 flagIndir flag = 1 << 7
76 flagAddr flag = 1 << 8
77 flagMethod flag = 1 << 9
78 flagMethodShift = 10
79 flagRO flag = flagStickyRO | flagEmbedRO
80 )
81
82 func (f flag) kind() Kind {
83 return Kind(f & flagKindMask)
84 }
85
86 func (f flag) ro() flag {
87 if f&flagRO != 0 {
88 return flagStickyRO
89 }
90 return 0
91 }
92
93
94
95
96 func (v Value) pointer() unsafe.Pointer {
97 if v.typ.size != goarch.PtrSize || !v.typ.pointers() {
98 panic("can't call pointer on a non-pointer Value")
99 }
100 if v.flag&flagIndir != 0 {
101 return *(*unsafe.Pointer)(v.ptr)
102 }
103 return v.ptr
104 }
105
106
107 func packEface(v Value) any {
108 t := v.typ
109 var i any
110 e := (*emptyInterface)(unsafe.Pointer(&i))
111
112 switch {
113 case ifaceIndir(t):
114 if v.flag&flagIndir == 0 {
115 panic("bad indir")
116 }
117
118 ptr := v.ptr
119 if v.flag&flagAddr != 0 {
120
121
122 c := unsafe_New(t)
123 typedmemmove(t, c, ptr)
124 ptr = c
125 }
126 e.word = ptr
127 case v.flag&flagIndir != 0:
128
129
130 e.word = *(*unsafe.Pointer)(v.ptr)
131 default:
132
133 e.word = v.ptr
134 }
135
136
137
138
139 e.typ = t
140 return i
141 }
142
143
144 func unpackEface(i any) Value {
145 e := (*emptyInterface)(unsafe.Pointer(&i))
146
147 t := e.typ
148 if t == nil {
149 return Value{}
150 }
151 f := flag(t.Kind())
152 if ifaceIndir(t) {
153 f |= flagIndir
154 }
155 return Value{t, e.word, f}
156 }
157
158
159
160
161 type ValueError struct {
162 Method string
163 Kind Kind
164 }
165
166 func (e *ValueError) Error() string {
167 if e.Kind == 0 {
168 return "reflect: call of " + e.Method + " on zero Value"
169 }
170 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
171 }
172
173
174
175 func methodName() string {
176 pc, _, _, _ := runtime.Caller(2)
177 f := runtime.FuncForPC(pc)
178 if f == nil {
179 return "unknown method"
180 }
181 return f.Name()
182 }
183
184
185
186 func methodNameSkip() string {
187 pc, _, _, _ := runtime.Caller(3)
188 f := runtime.FuncForPC(pc)
189 if f == nil {
190 return "unknown method"
191 }
192 return f.Name()
193 }
194
195
196 type emptyInterface struct {
197 typ *rtype
198 word unsafe.Pointer
199 }
200
201
202 type nonEmptyInterface struct {
203
204 itab *struct {
205 ityp *rtype
206 typ *rtype
207 hash uint32
208 _ [4]byte
209 fun [100000]unsafe.Pointer
210 }
211 word unsafe.Pointer
212 }
213
214
215
216
217
218
219
220 func (f flag) mustBe(expected Kind) {
221
222 if Kind(f&flagKindMask) != expected {
223 panic(&ValueError{methodName(), f.kind()})
224 }
225 }
226
227
228
229 func (f flag) mustBeExported() {
230 if f == 0 || f&flagRO != 0 {
231 f.mustBeExportedSlow()
232 }
233 }
234
235 func (f flag) mustBeExportedSlow() {
236 if f == 0 {
237 panic(&ValueError{methodNameSkip(), Invalid})
238 }
239 if f&flagRO != 0 {
240 panic("reflect: " + methodNameSkip() + " using value obtained using unexported field")
241 }
242 }
243
244
245
246
247 func (f flag) mustBeAssignable() {
248 if f&flagRO != 0 || f&flagAddr == 0 {
249 f.mustBeAssignableSlow()
250 }
251 }
252
253 func (f flag) mustBeAssignableSlow() {
254 if f == 0 {
255 panic(&ValueError{methodNameSkip(), Invalid})
256 }
257
258 if f&flagRO != 0 {
259 panic("reflect: " + methodNameSkip() + " using value obtained using unexported field")
260 }
261 if f&flagAddr == 0 {
262 panic("reflect: " + methodNameSkip() + " using unaddressable value")
263 }
264 }
265
266
267
268
269
270
271 func (v Value) Addr() Value {
272 if v.flag&flagAddr == 0 {
273 panic("reflect.Value.Addr of unaddressable value")
274 }
275
276
277 fl := v.flag & flagRO
278 return Value{v.typ.ptrTo(), v.ptr, fl | flag(Pointer)}
279 }
280
281
282
283 func (v Value) Bool() bool {
284 v.mustBe(Bool)
285 return *(*bool)(v.ptr)
286 }
287
288
289
290 func (v Value) Bytes() []byte {
291 v.mustBe(Slice)
292 if v.typ.Elem().Kind() != Uint8 {
293 panic("reflect.Value.Bytes of non-byte slice")
294 }
295
296 return *(*[]byte)(v.ptr)
297 }
298
299
300
301 func (v Value) runes() []rune {
302 v.mustBe(Slice)
303 if v.typ.Elem().Kind() != Int32 {
304 panic("reflect.Value.Bytes of non-rune slice")
305 }
306
307 return *(*[]rune)(v.ptr)
308 }
309
310
311
312
313
314
315 func (v Value) CanAddr() bool {
316 return v.flag&flagAddr != 0
317 }
318
319
320
321
322
323
324 func (v Value) CanSet() bool {
325 return v.flag&(flagAddr|flagRO) == flagAddr
326 }
327
328
329
330
331
332
333
334
335
336 func (v Value) Call(in []Value) []Value {
337 v.mustBe(Func)
338 v.mustBeExported()
339 return v.call("Call", in)
340 }
341
342
343
344
345
346
347
348
349 func (v Value) CallSlice(in []Value) []Value {
350 v.mustBe(Func)
351 v.mustBeExported()
352 return v.call("CallSlice", in)
353 }
354
355 var callGC bool
356
357 const debugReflectCall = false
358
359 func (v Value) call(op string, in []Value) []Value {
360
361 t := (*funcType)(unsafe.Pointer(v.typ))
362 var (
363 fn unsafe.Pointer
364 rcvr Value
365 rcvrtype *rtype
366 )
367 if v.flag&flagMethod != 0 {
368 rcvr = v
369 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
370 } else if v.flag&flagIndir != 0 {
371 fn = *(*unsafe.Pointer)(v.ptr)
372 } else {
373 fn = v.ptr
374 }
375
376 if fn == nil {
377 panic("reflect.Value.Call: call of nil function")
378 }
379
380 isSlice := op == "CallSlice"
381 n := t.NumIn()
382 isVariadic := t.IsVariadic()
383 if isSlice {
384 if !isVariadic {
385 panic("reflect: CallSlice of non-variadic function")
386 }
387 if len(in) < n {
388 panic("reflect: CallSlice with too few input arguments")
389 }
390 if len(in) > n {
391 panic("reflect: CallSlice with too many input arguments")
392 }
393 } else {
394 if isVariadic {
395 n--
396 }
397 if len(in) < n {
398 panic("reflect: Call with too few input arguments")
399 }
400 if !isVariadic && len(in) > n {
401 panic("reflect: Call with too many input arguments")
402 }
403 }
404 for _, x := range in {
405 if x.Kind() == Invalid {
406 panic("reflect: " + op + " using zero Value argument")
407 }
408 }
409 for i := 0; i < n; i++ {
410 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(targ) {
411 panic("reflect: " + op + " using " + xt.String() + " as type " + targ.String())
412 }
413 }
414 if !isSlice && isVariadic {
415
416 m := len(in) - n
417 slice := MakeSlice(t.In(n), m, m)
418 elem := t.In(n).Elem()
419 for i := 0; i < m; i++ {
420 x := in[n+i]
421 if xt := x.Type(); !xt.AssignableTo(elem) {
422 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
423 }
424 slice.Index(i).Set(x)
425 }
426 origIn := in
427 in = make([]Value, n+1)
428 copy(in[:n], origIn)
429 in[n] = slice
430 }
431
432 nin := len(in)
433 if nin != t.NumIn() {
434 panic("reflect.Value.Call: wrong argument count")
435 }
436 nout := t.NumOut()
437
438
439 var regArgs abi.RegArgs
440
441
442 frametype, framePool, abi := funcLayout(t, rcvrtype)
443
444
445 var stackArgs unsafe.Pointer
446 if frametype.size != 0 {
447 if nout == 0 {
448 stackArgs = framePool.Get().(unsafe.Pointer)
449 } else {
450
451
452 stackArgs = unsafe_New(frametype)
453 }
454 }
455 frameSize := frametype.size
456
457 if debugReflectCall {
458 println("reflect.call", t.String())
459 abi.dump()
460 }
461
462
463
464
465 inStart := 0
466 if rcvrtype != nil {
467
468
469
470 switch st := abi.call.steps[0]; st.kind {
471 case abiStepStack:
472 storeRcvr(rcvr, stackArgs)
473 case abiStepIntReg, abiStepPointer:
474
475
476
477
478 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
479 case abiStepFloatReg:
480 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
481 default:
482 panic("unknown ABI parameter kind")
483 }
484 inStart = 1
485 }
486
487
488 for i, v := range in {
489 v.mustBeExported()
490 targ := t.In(i).(*rtype)
491
492
493
494 v = v.assignTo("reflect.Value.Call", targ, nil)
495 stepsLoop:
496 for _, st := range abi.call.stepsForValue(i + inStart) {
497 switch st.kind {
498 case abiStepStack:
499
500 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
501 if v.flag&flagIndir != 0 {
502 typedmemmove(targ, addr, v.ptr)
503 } else {
504 *(*unsafe.Pointer)(addr) = v.ptr
505 }
506
507 break stepsLoop
508 case abiStepIntReg, abiStepPointer:
509
510 if v.flag&flagIndir != 0 {
511 offset := add(v.ptr, st.offset, "precomputed value offset")
512 if st.kind == abiStepPointer {
513
514
515
516 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
517 }
518 intToReg(®Args, st.ireg, st.size, offset)
519 } else {
520 if st.kind == abiStepPointer {
521
522 regArgs.Ptrs[st.ireg] = v.ptr
523 }
524 regArgs.Ints[st.ireg] = uintptr(v.ptr)
525 }
526 case abiStepFloatReg:
527
528 if v.flag&flagIndir == 0 {
529 panic("attempted to copy pointer to FP register")
530 }
531 offset := add(v.ptr, st.offset, "precomputed value offset")
532 floatToReg(®Args, st.freg, st.size, offset)
533 default:
534 panic("unknown ABI part kind")
535 }
536 }
537 }
538
539
540 frameSize = align(frameSize, goarch.PtrSize)
541 frameSize += abi.spill
542
543
544 regArgs.ReturnIsPtr = abi.outRegPtrs
545
546 if debugReflectCall {
547 regArgs.Dump()
548 }
549
550
551 if callGC {
552 runtime.GC()
553 }
554
555
556 call(frametype, fn, stackArgs, uint32(frametype.size), uint32(abi.retOffset), uint32(frameSize), ®Args)
557
558
559 if callGC {
560 runtime.GC()
561 }
562
563 var ret []Value
564 if nout == 0 {
565 if stackArgs != nil {
566 typedmemclr(frametype, stackArgs)
567 framePool.Put(stackArgs)
568 }
569 } else {
570 if stackArgs != nil {
571
572
573
574 typedmemclrpartial(frametype, stackArgs, 0, abi.retOffset)
575 }
576
577
578 ret = make([]Value, nout)
579 for i := 0; i < nout; i++ {
580 tv := t.Out(i)
581 if tv.Size() == 0 {
582
583
584 ret[i] = Zero(tv)
585 continue
586 }
587 steps := abi.ret.stepsForValue(i)
588 if st := steps[0]; st.kind == abiStepStack {
589
590
591
592 fl := flagIndir | flag(tv.Kind())
593 ret[i] = Value{tv.common(), add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
594
595
596
597
598 continue
599 }
600
601
602 if !ifaceIndir(tv.common()) {
603
604
605 if steps[0].kind != abiStepPointer {
606 print("kind=", steps[0].kind, ", type=", tv.String(), "\n")
607 panic("mismatch between ABI description and types")
608 }
609 ret[i] = Value{tv.common(), regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
610 continue
611 }
612
613
614
615
616
617
618
619
620
621
622 s := unsafe_New(tv.common())
623 for _, st := range steps {
624 switch st.kind {
625 case abiStepIntReg:
626 offset := add(s, st.offset, "precomputed value offset")
627 intFromReg(®Args, st.ireg, st.size, offset)
628 case abiStepPointer:
629 s := add(s, st.offset, "precomputed value offset")
630 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
631 case abiStepFloatReg:
632 offset := add(s, st.offset, "precomputed value offset")
633 floatFromReg(®Args, st.freg, st.size, offset)
634 case abiStepStack:
635 panic("register-based return value has stack component")
636 default:
637 panic("unknown ABI part kind")
638 }
639 }
640 ret[i] = Value{tv.common(), s, flagIndir | flag(tv.Kind())}
641 }
642 }
643
644 return ret
645 }
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
668 if callGC {
669
670
671
672
673
674 runtime.GC()
675 }
676 ftyp := ctxt.ftyp
677 f := ctxt.fn
678
679 _, _, abi := funcLayout(ftyp, nil)
680
681
682 ptr := frame
683 in := make([]Value, 0, int(ftyp.inCount))
684 for i, typ := range ftyp.in() {
685 if typ.Size() == 0 {
686 in = append(in, Zero(typ))
687 continue
688 }
689 v := Value{typ, nil, flag(typ.Kind())}
690 steps := abi.call.stepsForValue(i)
691 if st := steps[0]; st.kind == abiStepStack {
692 if ifaceIndir(typ) {
693
694
695
696
697 v.ptr = unsafe_New(typ)
698 if typ.size > 0 {
699 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
700 }
701 v.flag |= flagIndir
702 } else {
703 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
704 }
705 } else {
706 if ifaceIndir(typ) {
707
708
709 v.flag |= flagIndir
710 v.ptr = unsafe_New(typ)
711 for _, st := range steps {
712 switch st.kind {
713 case abiStepIntReg:
714 offset := add(v.ptr, st.offset, "precomputed value offset")
715 intFromReg(regs, st.ireg, st.size, offset)
716 case abiStepPointer:
717 s := add(v.ptr, st.offset, "precomputed value offset")
718 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
719 case abiStepFloatReg:
720 offset := add(v.ptr, st.offset, "precomputed value offset")
721 floatFromReg(regs, st.freg, st.size, offset)
722 case abiStepStack:
723 panic("register-based return value has stack component")
724 default:
725 panic("unknown ABI part kind")
726 }
727 }
728 } else {
729
730
731 if steps[0].kind != abiStepPointer {
732 print("kind=", steps[0].kind, ", type=", typ.String(), "\n")
733 panic("mismatch between ABI description and types")
734 }
735 v.ptr = regs.Ptrs[steps[0].ireg]
736 }
737 }
738 in = append(in, v)
739 }
740
741
742 out := f(in)
743 numOut := ftyp.NumOut()
744 if len(out) != numOut {
745 panic("reflect: wrong return count from function created by MakeFunc")
746 }
747
748
749 if numOut > 0 {
750 for i, typ := range ftyp.out() {
751 v := out[i]
752 if v.typ == nil {
753 panic("reflect: function created by MakeFunc using " + funcName(f) +
754 " returned zero Value")
755 }
756 if v.flag&flagRO != 0 {
757 panic("reflect: function created by MakeFunc using " + funcName(f) +
758 " returned value obtained from unexported field")
759 }
760 if typ.size == 0 {
761 continue
762 }
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778 v = v.assignTo("reflect.MakeFunc", typ, nil)
779 stepsLoop:
780 for _, st := range abi.ret.stepsForValue(i) {
781 switch st.kind {
782 case abiStepStack:
783
784 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
785
786
787
788
789 if v.flag&flagIndir != 0 {
790 memmove(addr, v.ptr, st.size)
791 } else {
792
793 *(*uintptr)(addr) = uintptr(v.ptr)
794 }
795
796 break stepsLoop
797 case abiStepIntReg, abiStepPointer:
798
799 if v.flag&flagIndir != 0 {
800 offset := add(v.ptr, st.offset, "precomputed value offset")
801 intToReg(regs, st.ireg, st.size, offset)
802 } else {
803
804
805
806
807
808 regs.Ints[st.ireg] = uintptr(v.ptr)
809 }
810 case abiStepFloatReg:
811
812 if v.flag&flagIndir == 0 {
813 panic("attempted to copy pointer to FP register")
814 }
815 offset := add(v.ptr, st.offset, "precomputed value offset")
816 floatToReg(regs, st.freg, st.size, offset)
817 default:
818 panic("unknown ABI part kind")
819 }
820 }
821 }
822 }
823
824
825
826 *retValid = true
827
828
829
830
831
832 runtime.KeepAlive(out)
833
834
835
836
837 runtime.KeepAlive(ctxt)
838 }
839
840
841
842
843
844
845
846
847 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *rtype, t *funcType, fn unsafe.Pointer) {
848 i := methodIndex
849 if v.typ.Kind() == Interface {
850 tt := (*interfaceType)(unsafe.Pointer(v.typ))
851 if uint(i) >= uint(len(tt.methods)) {
852 panic("reflect: internal error: invalid method index")
853 }
854 m := &tt.methods[i]
855 if !tt.nameOff(m.name).isExported() {
856 panic("reflect: " + op + " of unexported method")
857 }
858 iface := (*nonEmptyInterface)(v.ptr)
859 if iface.itab == nil {
860 panic("reflect: " + op + " of method on nil interface value")
861 }
862 rcvrtype = iface.itab.typ
863 fn = unsafe.Pointer(&iface.itab.fun[i])
864 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.typ)))
865 } else {
866 rcvrtype = v.typ
867 ms := v.typ.exportedMethods()
868 if uint(i) >= uint(len(ms)) {
869 panic("reflect: internal error: invalid method index")
870 }
871 m := ms[i]
872 if !v.typ.nameOff(m.name).isExported() {
873 panic("reflect: " + op + " of unexported method")
874 }
875 ifn := v.typ.textOff(m.ifn)
876 fn = unsafe.Pointer(&ifn)
877 t = (*funcType)(unsafe.Pointer(v.typ.typeOff(m.mtyp)))
878 }
879 return
880 }
881
882
883
884
885
886 func storeRcvr(v Value, p unsafe.Pointer) {
887 t := v.typ
888 if t.Kind() == Interface {
889
890 iface := (*nonEmptyInterface)(v.ptr)
891 *(*unsafe.Pointer)(p) = iface.word
892 } else if v.flag&flagIndir != 0 && !ifaceIndir(t) {
893 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
894 } else {
895 *(*unsafe.Pointer)(p) = v.ptr
896 }
897 }
898
899
900
901 func align(x, n uintptr) uintptr {
902 return (x + n - 1) &^ (n - 1)
903 }
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
925 rcvr := ctxt.rcvr
926 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
927
928
929
930
931
932
933
934
935
936 _, _, valueABI := funcLayout(valueFuncType, nil)
937 valueFrame, valueRegs := frame, regs
938 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
939
940
941
942 methodFrame := methodFramePool.Get().(unsafe.Pointer)
943 var methodRegs abi.RegArgs
944
945
946 if st := methodABI.call.steps[0]; st.kind == abiStepStack {
947
948
949 storeRcvr(rcvr, methodFrame)
950 } else {
951
952 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints))
953 }
954
955
956 for i, t := range valueFuncType.in() {
957 valueSteps := valueABI.call.stepsForValue(i)
958 methodSteps := methodABI.call.stepsForValue(i + 1)
959
960
961 if len(valueSteps) == 0 {
962 if len(methodSteps) != 0 {
963 panic("method ABI and value ABI do not align")
964 }
965 continue
966 }
967
968
969
970
971
972
973
974
975
976
977
978 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
979 mStep := methodSteps[0]
980
981 if mStep.kind == abiStepStack {
982 if vStep.size != mStep.size {
983 panic("method ABI and value ABI do not align")
984 }
985 typedmemmove(t,
986 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
987 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
988 continue
989 }
990
991 for _, mStep := range methodSteps {
992 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
993 switch mStep.kind {
994 case abiStepPointer:
995
996 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
997 fallthrough
998 case abiStepIntReg:
999 intToReg(&methodRegs, mStep.ireg, mStep.size, from)
1000 case abiStepFloatReg:
1001 floatToReg(&methodRegs, mStep.freg, mStep.size, from)
1002 default:
1003 panic("unexpected method step")
1004 }
1005 }
1006 continue
1007 }
1008
1009 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1010 for _, vStep := range valueSteps {
1011 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1012 switch vStep.kind {
1013 case abiStepPointer:
1014
1015 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1016 case abiStepIntReg:
1017 intFromReg(valueRegs, vStep.ireg, vStep.size, to)
1018 case abiStepFloatReg:
1019 floatFromReg(valueRegs, vStep.freg, vStep.size, to)
1020 default:
1021 panic("unexpected value step")
1022 }
1023 }
1024 continue
1025 }
1026
1027 if len(valueSteps) != len(methodSteps) {
1028
1029
1030
1031 panic("method ABI and value ABI don't align")
1032 }
1033 for i, vStep := range valueSteps {
1034 mStep := methodSteps[i]
1035 if mStep.kind != vStep.kind {
1036 panic("method ABI and value ABI don't align")
1037 }
1038 switch vStep.kind {
1039 case abiStepPointer:
1040
1041 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1042 fallthrough
1043 case abiStepIntReg:
1044 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1045 case abiStepFloatReg:
1046 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1047 default:
1048 panic("unexpected value step")
1049 }
1050 }
1051 }
1052
1053 methodFrameSize := methodFrameType.size
1054
1055
1056 methodFrameSize = align(methodFrameSize, goarch.PtrSize)
1057 methodFrameSize += methodABI.spill
1058
1059
1060 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1061
1062
1063
1064
1065 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.size), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076 if valueRegs != nil {
1077 *valueRegs = methodRegs
1078 }
1079 if retSize := methodFrameType.size - methodABI.retOffset; retSize > 0 {
1080 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1081 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1082
1083 memmove(valueRet, methodRet, retSize)
1084 }
1085
1086
1087
1088 *retValid = true
1089
1090
1091
1092
1093 typedmemclr(methodFrameType, methodFrame)
1094 methodFramePool.Put(methodFrame)
1095
1096
1097 runtime.KeepAlive(ctxt)
1098
1099
1100
1101
1102 runtime.KeepAlive(valueRegs)
1103 }
1104
1105
1106 func funcName(f func([]Value) []Value) string {
1107 pc := *(*uintptr)(unsafe.Pointer(&f))
1108 rf := runtime.FuncForPC(pc)
1109 if rf != nil {
1110 return rf.Name()
1111 }
1112 return "closure"
1113 }
1114
1115
1116
1117 func (v Value) Cap() int {
1118 k := v.kind()
1119 switch k {
1120 case Array:
1121 return v.typ.Len()
1122 case Chan:
1123 return chancap(v.pointer())
1124 case Slice:
1125
1126 return (*unsafeheader.Slice)(v.ptr).Cap
1127 }
1128 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1129 }
1130
1131
1132
1133 func (v Value) Close() {
1134 v.mustBe(Chan)
1135 v.mustBeExported()
1136 chanclose(v.pointer())
1137 }
1138
1139
1140 func (v Value) CanComplex() bool {
1141 switch v.kind() {
1142 case Complex64, Complex128:
1143 return true
1144 default:
1145 return false
1146 }
1147 }
1148
1149
1150
1151 func (v Value) Complex() complex128 {
1152 k := v.kind()
1153 switch k {
1154 case Complex64:
1155 return complex128(*(*complex64)(v.ptr))
1156 case Complex128:
1157 return *(*complex128)(v.ptr)
1158 }
1159 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1160 }
1161
1162
1163
1164
1165
1166 func (v Value) Elem() Value {
1167 k := v.kind()
1168 switch k {
1169 case Interface:
1170 var eface any
1171 if v.typ.NumMethod() == 0 {
1172 eface = *(*any)(v.ptr)
1173 } else {
1174 eface = (any)(*(*interface {
1175 M()
1176 })(v.ptr))
1177 }
1178 x := unpackEface(eface)
1179 if x.flag != 0 {
1180 x.flag |= v.flag.ro()
1181 }
1182 return x
1183 case Pointer:
1184 ptr := v.ptr
1185 if v.flag&flagIndir != 0 {
1186 if ifaceIndir(v.typ) {
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197 if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
1198 panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
1199 }
1200 }
1201 ptr = *(*unsafe.Pointer)(ptr)
1202 }
1203
1204 if ptr == nil {
1205 return Value{}
1206 }
1207 tt := (*ptrType)(unsafe.Pointer(v.typ))
1208 typ := tt.elem
1209 fl := v.flag&flagRO | flagIndir | flagAddr
1210 fl |= flag(typ.Kind())
1211 return Value{typ, ptr, fl}
1212 }
1213 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1214 }
1215
1216
1217
1218 func (v Value) Field(i int) Value {
1219 if v.kind() != Struct {
1220 panic(&ValueError{"reflect.Value.Field", v.kind()})
1221 }
1222 tt := (*structType)(unsafe.Pointer(v.typ))
1223 if uint(i) >= uint(len(tt.fields)) {
1224 panic("reflect: Field index out of range")
1225 }
1226 field := &tt.fields[i]
1227 typ := field.typ
1228
1229
1230 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1231
1232 if !field.name.isExported() {
1233 if field.embedded() {
1234 fl |= flagEmbedRO
1235 } else {
1236 fl |= flagStickyRO
1237 }
1238 }
1239
1240
1241
1242
1243
1244 ptr := add(v.ptr, field.offset(), "same as non-reflect &v.field")
1245 return Value{typ, ptr, fl}
1246 }
1247
1248
1249
1250
1251 func (v Value) FieldByIndex(index []int) Value {
1252 if len(index) == 1 {
1253 return v.Field(index[0])
1254 }
1255 v.mustBe(Struct)
1256 for i, x := range index {
1257 if i > 0 {
1258 if v.Kind() == Pointer && v.typ.Elem().Kind() == Struct {
1259 if v.IsNil() {
1260 panic("reflect: indirection through nil pointer to embedded struct")
1261 }
1262 v = v.Elem()
1263 }
1264 }
1265 v = v.Field(x)
1266 }
1267 return v
1268 }
1269
1270
1271
1272
1273
1274 func (v Value) FieldByIndexErr(index []int) (Value, error) {
1275 if len(index) == 1 {
1276 return v.Field(index[0]), nil
1277 }
1278 v.mustBe(Struct)
1279 for i, x := range index {
1280 if i > 0 {
1281 if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct {
1282 if v.IsNil() {
1283 return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + v.typ.Elem().Name())
1284 }
1285 v = v.Elem()
1286 }
1287 }
1288 v = v.Field(x)
1289 }
1290 return v, nil
1291 }
1292
1293
1294
1295
1296 func (v Value) FieldByName(name string) Value {
1297 v.mustBe(Struct)
1298 if f, ok := v.typ.FieldByName(name); ok {
1299 return v.FieldByIndex(f.Index)
1300 }
1301 return Value{}
1302 }
1303
1304
1305
1306
1307
1308 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1309 if f, ok := v.typ.FieldByNameFunc(match); ok {
1310 return v.FieldByIndex(f.Index)
1311 }
1312 return Value{}
1313 }
1314
1315
1316 func (v Value) CanFloat() bool {
1317 switch v.kind() {
1318 case Float32, Float64:
1319 return true
1320 default:
1321 return false
1322 }
1323 }
1324
1325
1326
1327 func (v Value) Float() float64 {
1328 k := v.kind()
1329 switch k {
1330 case Float32:
1331 return float64(*(*float32)(v.ptr))
1332 case Float64:
1333 return *(*float64)(v.ptr)
1334 }
1335 panic(&ValueError{"reflect.Value.Float", v.kind()})
1336 }
1337
1338 var uint8Type = TypeOf(uint8(0)).(*rtype)
1339
1340
1341
1342 func (v Value) Index(i int) Value {
1343 switch v.kind() {
1344 case Array:
1345 tt := (*arrayType)(unsafe.Pointer(v.typ))
1346 if uint(i) >= uint(tt.len) {
1347 panic("reflect: array index out of range")
1348 }
1349 typ := tt.elem
1350 offset := uintptr(i) * typ.size
1351
1352
1353
1354
1355
1356
1357 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1358 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1359 return Value{typ, val, fl}
1360
1361 case Slice:
1362
1363
1364 s := (*unsafeheader.Slice)(v.ptr)
1365 if uint(i) >= uint(s.Len) {
1366 panic("reflect: slice index out of range")
1367 }
1368 tt := (*sliceType)(unsafe.Pointer(v.typ))
1369 typ := tt.elem
1370 val := arrayAt(s.Data, i, typ.size, "i < s.Len")
1371 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1372 return Value{typ, val, fl}
1373
1374 case String:
1375 s := (*unsafeheader.String)(v.ptr)
1376 if uint(i) >= uint(s.Len) {
1377 panic("reflect: string index out of range")
1378 }
1379 p := arrayAt(s.Data, i, 1, "i < s.Len")
1380 fl := v.flag.ro() | flag(Uint8) | flagIndir
1381 return Value{uint8Type, p, fl}
1382 }
1383 panic(&ValueError{"reflect.Value.Index", v.kind()})
1384 }
1385
1386
1387 func (v Value) CanInt() bool {
1388 switch v.kind() {
1389 case Int, Int8, Int16, Int32, Int64:
1390 return true
1391 default:
1392 return false
1393 }
1394 }
1395
1396
1397
1398 func (v Value) Int() int64 {
1399 k := v.kind()
1400 p := v.ptr
1401 switch k {
1402 case Int:
1403 return int64(*(*int)(p))
1404 case Int8:
1405 return int64(*(*int8)(p))
1406 case Int16:
1407 return int64(*(*int16)(p))
1408 case Int32:
1409 return int64(*(*int32)(p))
1410 case Int64:
1411 return *(*int64)(p)
1412 }
1413 panic(&ValueError{"reflect.Value.Int", v.kind()})
1414 }
1415
1416
1417 func (v Value) CanInterface() bool {
1418 if v.flag == 0 {
1419 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1420 }
1421 return v.flag&flagRO == 0
1422 }
1423
1424
1425
1426
1427
1428
1429 func (v Value) Interface() (i any) {
1430 return valueInterface(v, true)
1431 }
1432
1433 func valueInterface(v Value, safe bool) any {
1434 if v.flag == 0 {
1435 panic(&ValueError{"reflect.Value.Interface", Invalid})
1436 }
1437 if safe && v.flag&flagRO != 0 {
1438
1439
1440
1441 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1442 }
1443 if v.flag&flagMethod != 0 {
1444 v = makeMethodValue("Interface", v)
1445 }
1446
1447 if v.kind() == Interface {
1448
1449
1450
1451 if v.NumMethod() == 0 {
1452 return *(*any)(v.ptr)
1453 }
1454 return *(*interface {
1455 M()
1456 })(v.ptr)
1457 }
1458
1459
1460 return packEface(v)
1461 }
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472 func (v Value) InterfaceData() [2]uintptr {
1473 v.mustBe(Interface)
1474
1475
1476
1477
1478
1479 return *(*[2]uintptr)(v.ptr)
1480 }
1481
1482
1483
1484
1485
1486
1487
1488
1489 func (v Value) IsNil() bool {
1490 k := v.kind()
1491 switch k {
1492 case Chan, Func, Map, Pointer, UnsafePointer:
1493 if v.flag&flagMethod != 0 {
1494 return false
1495 }
1496 ptr := v.ptr
1497 if v.flag&flagIndir != 0 {
1498 ptr = *(*unsafe.Pointer)(ptr)
1499 }
1500 return ptr == nil
1501 case Interface, Slice:
1502
1503
1504 return *(*unsafe.Pointer)(v.ptr) == nil
1505 }
1506 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1507 }
1508
1509
1510
1511
1512
1513
1514 func (v Value) IsValid() bool {
1515 return v.flag != 0
1516 }
1517
1518
1519
1520 func (v Value) IsZero() bool {
1521 switch v.kind() {
1522 case Bool:
1523 return !v.Bool()
1524 case Int, Int8, Int16, Int32, Int64:
1525 return v.Int() == 0
1526 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1527 return v.Uint() == 0
1528 case Float32, Float64:
1529 return math.Float64bits(v.Float()) == 0
1530 case Complex64, Complex128:
1531 c := v.Complex()
1532 return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
1533 case Array:
1534 for i := 0; i < v.Len(); i++ {
1535 if !v.Index(i).IsZero() {
1536 return false
1537 }
1538 }
1539 return true
1540 case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
1541 return v.IsNil()
1542 case String:
1543 return v.Len() == 0
1544 case Struct:
1545 for i := 0; i < v.NumField(); i++ {
1546 if !v.Field(i).IsZero() {
1547 return false
1548 }
1549 }
1550 return true
1551 default:
1552
1553
1554 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1555 }
1556 }
1557
1558
1559
1560 func (v Value) Kind() Kind {
1561 return v.kind()
1562 }
1563
1564
1565
1566 func (v Value) Len() int {
1567 k := v.kind()
1568 switch k {
1569 case Array:
1570 tt := (*arrayType)(unsafe.Pointer(v.typ))
1571 return int(tt.len)
1572 case Chan:
1573 return chanlen(v.pointer())
1574 case Map:
1575 return maplen(v.pointer())
1576 case Slice:
1577
1578 return (*unsafeheader.Slice)(v.ptr).Len
1579 case String:
1580
1581 return (*unsafeheader.String)(v.ptr).Len
1582 }
1583 panic(&ValueError{"reflect.Value.Len", v.kind()})
1584 }
1585
1586 var stringType = TypeOf("").(*rtype)
1587
1588
1589
1590
1591
1592 func (v Value) MapIndex(key Value) Value {
1593 v.mustBe(Map)
1594 tt := (*mapType)(unsafe.Pointer(v.typ))
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604 var e unsafe.Pointer
1605 if (tt.key == stringType || key.kind() == String) && tt.key == key.typ && tt.elem.size <= maxValSize {
1606 k := *(*string)(key.ptr)
1607 e = mapaccess_faststr(v.typ, v.pointer(), k)
1608 } else {
1609 key = key.assignTo("reflect.Value.MapIndex", tt.key, nil)
1610 var k unsafe.Pointer
1611 if key.flag&flagIndir != 0 {
1612 k = key.ptr
1613 } else {
1614 k = unsafe.Pointer(&key.ptr)
1615 }
1616 e = mapaccess(v.typ, v.pointer(), k)
1617 }
1618 if e == nil {
1619 return Value{}
1620 }
1621 typ := tt.elem
1622 fl := (v.flag | key.flag).ro()
1623 fl |= flag(typ.Kind())
1624 return copyVal(typ, fl, e)
1625 }
1626
1627
1628
1629
1630
1631 func (v Value) MapKeys() []Value {
1632 v.mustBe(Map)
1633 tt := (*mapType)(unsafe.Pointer(v.typ))
1634 keyType := tt.key
1635
1636 fl := v.flag.ro() | flag(keyType.Kind())
1637
1638 m := v.pointer()
1639 mlen := int(0)
1640 if m != nil {
1641 mlen = maplen(m)
1642 }
1643 var it hiter
1644 mapiterinit(v.typ, m, &it)
1645 a := make([]Value, mlen)
1646 var i int
1647 for i = 0; i < len(a); i++ {
1648 key := mapiterkey(&it)
1649 if key == nil {
1650
1651
1652
1653 break
1654 }
1655 a[i] = copyVal(keyType, fl, key)
1656 mapiternext(&it)
1657 }
1658 return a[:i]
1659 }
1660
1661
1662
1663
1664
1665 type hiter struct {
1666 key unsafe.Pointer
1667 elem unsafe.Pointer
1668 t unsafe.Pointer
1669 h unsafe.Pointer
1670 buckets unsafe.Pointer
1671 bptr unsafe.Pointer
1672 overflow *[]unsafe.Pointer
1673 oldoverflow *[]unsafe.Pointer
1674 startBucket uintptr
1675 offset uint8
1676 wrapped bool
1677 B uint8
1678 i uint8
1679 bucket uintptr
1680 checkBucket uintptr
1681 }
1682
1683 func (h *hiter) initialized() bool {
1684 return h.t != nil
1685 }
1686
1687
1688
1689 type MapIter struct {
1690 m Value
1691 hiter hiter
1692 }
1693
1694
1695 func (iter *MapIter) Key() Value {
1696 if !iter.hiter.initialized() {
1697 panic("MapIter.Key called before Next")
1698 }
1699 iterkey := mapiterkey(&iter.hiter)
1700 if iterkey == nil {
1701 panic("MapIter.Key called on exhausted iterator")
1702 }
1703
1704 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1705 ktype := t.key
1706 return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
1707 }
1708
1709
1710
1711
1712 func (v Value) SetIterKey(iter *MapIter) {
1713 if !iter.hiter.initialized() {
1714 panic("reflect: Value.SetIterKey called before Next")
1715 }
1716 iterkey := mapiterkey(&iter.hiter)
1717 if iterkey == nil {
1718 panic("reflect: Value.SetIterKey called on exhausted iterator")
1719 }
1720
1721 v.mustBeAssignable()
1722 var target unsafe.Pointer
1723 if v.kind() == Interface {
1724 target = v.ptr
1725 }
1726
1727 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1728 ktype := t.key
1729
1730 key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
1731 key = key.assignTo("reflect.MapIter.SetKey", v.typ, target)
1732 typedmemmove(v.typ, v.ptr, key.ptr)
1733 }
1734
1735
1736 func (iter *MapIter) Value() Value {
1737 if !iter.hiter.initialized() {
1738 panic("MapIter.Value called before Next")
1739 }
1740 iterelem := mapiterelem(&iter.hiter)
1741 if iterelem == nil {
1742 panic("MapIter.Value called on exhausted iterator")
1743 }
1744
1745 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1746 vtype := t.elem
1747 return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
1748 }
1749
1750
1751
1752
1753 func (v Value) SetIterValue(iter *MapIter) {
1754 if !iter.hiter.initialized() {
1755 panic("reflect: Value.SetIterValue called before Next")
1756 }
1757 iterelem := mapiterelem(&iter.hiter)
1758 if iterelem == nil {
1759 panic("reflect: Value.SetIterValue called on exhausted iterator")
1760 }
1761
1762 v.mustBeAssignable()
1763 var target unsafe.Pointer
1764 if v.kind() == Interface {
1765 target = v.ptr
1766 }
1767
1768 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1769 vtype := t.elem
1770
1771 elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
1772 elem = elem.assignTo("reflect.MapIter.SetValue", v.typ, target)
1773 typedmemmove(v.typ, v.ptr, elem.ptr)
1774 }
1775
1776
1777
1778
1779 func (iter *MapIter) Next() bool {
1780 if !iter.m.IsValid() {
1781 panic("MapIter.Next called on an iterator that does not have an associated map Value")
1782 }
1783 if !iter.hiter.initialized() {
1784 mapiterinit(iter.m.typ, iter.m.pointer(), &iter.hiter)
1785 } else {
1786 if mapiterkey(&iter.hiter) == nil {
1787 panic("MapIter.Next called on exhausted iterator")
1788 }
1789 mapiternext(&iter.hiter)
1790 }
1791 return mapiterkey(&iter.hiter) != nil
1792 }
1793
1794
1795
1796
1797
1798 func (iter *MapIter) Reset(v Value) {
1799 if v.IsValid() {
1800 v.mustBe(Map)
1801 }
1802 iter.m = v
1803 iter.hiter = hiter{}
1804 }
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822 func (v Value) MapRange() *MapIter {
1823 v.mustBe(Map)
1824 return &MapIter{m: v}
1825 }
1826
1827
1828
1829 func copyVal(typ *rtype, fl flag, ptr unsafe.Pointer) Value {
1830 if ifaceIndir(typ) {
1831
1832
1833 c := unsafe_New(typ)
1834 typedmemmove(typ, c, ptr)
1835 return Value{typ, c, fl | flagIndir}
1836 }
1837 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
1838 }
1839
1840
1841
1842
1843
1844 func (v Value) Method(i int) Value {
1845 if v.typ == nil {
1846 panic(&ValueError{"reflect.Value.Method", Invalid})
1847 }
1848 if v.flag&flagMethod != 0 || uint(i) >= uint(v.typ.NumMethod()) {
1849 panic("reflect: Method index out of range")
1850 }
1851 if v.typ.Kind() == Interface && v.IsNil() {
1852 panic("reflect: Method on nil interface value")
1853 }
1854 fl := v.flag.ro() | (v.flag & flagIndir)
1855 fl |= flag(Func)
1856 fl |= flag(i)<<flagMethodShift | flagMethod
1857 return Value{v.typ, v.ptr, fl}
1858 }
1859
1860
1861 func (v Value) NumMethod() int {
1862 if v.typ == nil {
1863 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
1864 }
1865 if v.flag&flagMethod != 0 {
1866 return 0
1867 }
1868 return v.typ.NumMethod()
1869 }
1870
1871
1872
1873
1874
1875
1876 func (v Value) MethodByName(name string) Value {
1877 if v.typ == nil {
1878 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
1879 }
1880 if v.flag&flagMethod != 0 {
1881 return Value{}
1882 }
1883 m, ok := v.typ.MethodByName(name)
1884 if !ok {
1885 return Value{}
1886 }
1887 return v.Method(m.Index)
1888 }
1889
1890
1891
1892 func (v Value) NumField() int {
1893 v.mustBe(Struct)
1894 tt := (*structType)(unsafe.Pointer(v.typ))
1895 return len(tt.fields)
1896 }
1897
1898
1899
1900 func (v Value) OverflowComplex(x complex128) bool {
1901 k := v.kind()
1902 switch k {
1903 case Complex64:
1904 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
1905 case Complex128:
1906 return false
1907 }
1908 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
1909 }
1910
1911
1912
1913 func (v Value) OverflowFloat(x float64) bool {
1914 k := v.kind()
1915 switch k {
1916 case Float32:
1917 return overflowFloat32(x)
1918 case Float64:
1919 return false
1920 }
1921 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
1922 }
1923
1924 func overflowFloat32(x float64) bool {
1925 if x < 0 {
1926 x = -x
1927 }
1928 return math.MaxFloat32 < x && x <= math.MaxFloat64
1929 }
1930
1931
1932
1933 func (v Value) OverflowInt(x int64) bool {
1934 k := v.kind()
1935 switch k {
1936 case Int, Int8, Int16, Int32, Int64:
1937 bitSize := v.typ.size * 8
1938 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
1939 return x != trunc
1940 }
1941 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
1942 }
1943
1944
1945
1946 func (v Value) OverflowUint(x uint64) bool {
1947 k := v.kind()
1948 switch k {
1949 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
1950 bitSize := v.typ.size * 8
1951 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
1952 return x != trunc
1953 }
1954 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
1955 }
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978 func (v Value) Pointer() uintptr {
1979 k := v.kind()
1980 switch k {
1981 case Pointer:
1982 if v.typ.ptrdata == 0 {
1983 val := *(*uintptr)(v.ptr)
1984
1985
1986 if !verifyNotInHeapPtr(val) {
1987 panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
1988 }
1989 return val
1990 }
1991 fallthrough
1992 case Chan, Map, UnsafePointer:
1993 return uintptr(v.pointer())
1994 case Func:
1995 if v.flag&flagMethod != 0 {
1996
1997
1998
1999
2000
2001
2002 return methodValueCallCodePtr()
2003 }
2004 p := v.pointer()
2005
2006
2007 if p != nil {
2008 p = *(*unsafe.Pointer)(p)
2009 }
2010 return uintptr(p)
2011
2012 case Slice:
2013 return (*SliceHeader)(v.ptr).Data
2014 }
2015 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
2016 }
2017
2018
2019
2020
2021
2022
2023 func (v Value) Recv() (x Value, ok bool) {
2024 v.mustBe(Chan)
2025 v.mustBeExported()
2026 return v.recv(false)
2027 }
2028
2029
2030
2031 func (v Value) recv(nb bool) (val Value, ok bool) {
2032 tt := (*chanType)(unsafe.Pointer(v.typ))
2033 if ChanDir(tt.dir)&RecvDir == 0 {
2034 panic("reflect: recv on send-only channel")
2035 }
2036 t := tt.elem
2037 val = Value{t, nil, flag(t.Kind())}
2038 var p unsafe.Pointer
2039 if ifaceIndir(t) {
2040 p = unsafe_New(t)
2041 val.ptr = p
2042 val.flag |= flagIndir
2043 } else {
2044 p = unsafe.Pointer(&val.ptr)
2045 }
2046 selected, ok := chanrecv(v.pointer(), nb, p)
2047 if !selected {
2048 val = Value{}
2049 }
2050 return
2051 }
2052
2053
2054
2055
2056 func (v Value) Send(x Value) {
2057 v.mustBe(Chan)
2058 v.mustBeExported()
2059 v.send(x, false)
2060 }
2061
2062
2063
2064 func (v Value) send(x Value, nb bool) (selected bool) {
2065 tt := (*chanType)(unsafe.Pointer(v.typ))
2066 if ChanDir(tt.dir)&SendDir == 0 {
2067 panic("reflect: send on recv-only channel")
2068 }
2069 x.mustBeExported()
2070 x = x.assignTo("reflect.Value.Send", tt.elem, nil)
2071 var p unsafe.Pointer
2072 if x.flag&flagIndir != 0 {
2073 p = x.ptr
2074 } else {
2075 p = unsafe.Pointer(&x.ptr)
2076 }
2077 return chansend(v.pointer(), p, nb)
2078 }
2079
2080
2081
2082
2083 func (v Value) Set(x Value) {
2084 v.mustBeAssignable()
2085 x.mustBeExported()
2086 var target unsafe.Pointer
2087 if v.kind() == Interface {
2088 target = v.ptr
2089 }
2090 x = x.assignTo("reflect.Set", v.typ, target)
2091 if x.flag&flagIndir != 0 {
2092 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
2093 typedmemclr(v.typ, v.ptr)
2094 } else {
2095 typedmemmove(v.typ, v.ptr, x.ptr)
2096 }
2097 } else {
2098 *(*unsafe.Pointer)(v.ptr) = x.ptr
2099 }
2100 }
2101
2102
2103
2104 func (v Value) SetBool(x bool) {
2105 v.mustBeAssignable()
2106 v.mustBe(Bool)
2107 *(*bool)(v.ptr) = x
2108 }
2109
2110
2111
2112 func (v Value) SetBytes(x []byte) {
2113 v.mustBeAssignable()
2114 v.mustBe(Slice)
2115 if v.typ.Elem().Kind() != Uint8 {
2116 panic("reflect.Value.SetBytes of non-byte slice")
2117 }
2118 *(*[]byte)(v.ptr) = x
2119 }
2120
2121
2122
2123 func (v Value) setRunes(x []rune) {
2124 v.mustBeAssignable()
2125 v.mustBe(Slice)
2126 if v.typ.Elem().Kind() != Int32 {
2127 panic("reflect.Value.setRunes of non-rune slice")
2128 }
2129 *(*[]rune)(v.ptr) = x
2130 }
2131
2132
2133
2134 func (v Value) SetComplex(x complex128) {
2135 v.mustBeAssignable()
2136 switch k := v.kind(); k {
2137 default:
2138 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
2139 case Complex64:
2140 *(*complex64)(v.ptr) = complex64(x)
2141 case Complex128:
2142 *(*complex128)(v.ptr) = x
2143 }
2144 }
2145
2146
2147
2148 func (v Value) SetFloat(x float64) {
2149 v.mustBeAssignable()
2150 switch k := v.kind(); k {
2151 default:
2152 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
2153 case Float32:
2154 *(*float32)(v.ptr) = float32(x)
2155 case Float64:
2156 *(*float64)(v.ptr) = x
2157 }
2158 }
2159
2160
2161
2162 func (v Value) SetInt(x int64) {
2163 v.mustBeAssignable()
2164 switch k := v.kind(); k {
2165 default:
2166 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
2167 case Int:
2168 *(*int)(v.ptr) = int(x)
2169 case Int8:
2170 *(*int8)(v.ptr) = int8(x)
2171 case Int16:
2172 *(*int16)(v.ptr) = int16(x)
2173 case Int32:
2174 *(*int32)(v.ptr) = int32(x)
2175 case Int64:
2176 *(*int64)(v.ptr) = x
2177 }
2178 }
2179
2180
2181
2182
2183 func (v Value) SetLen(n int) {
2184 v.mustBeAssignable()
2185 v.mustBe(Slice)
2186 s := (*unsafeheader.Slice)(v.ptr)
2187 if uint(n) > uint(s.Cap) {
2188 panic("reflect: slice length out of range in SetLen")
2189 }
2190 s.Len = n
2191 }
2192
2193
2194
2195
2196 func (v Value) SetCap(n int) {
2197 v.mustBeAssignable()
2198 v.mustBe(Slice)
2199 s := (*unsafeheader.Slice)(v.ptr)
2200 if n < s.Len || n > s.Cap {
2201 panic("reflect: slice capacity out of range in SetCap")
2202 }
2203 s.Cap = n
2204 }
2205
2206
2207
2208
2209
2210
2211
2212 func (v Value) SetMapIndex(key, elem Value) {
2213 v.mustBe(Map)
2214 v.mustBeExported()
2215 key.mustBeExported()
2216 tt := (*mapType)(unsafe.Pointer(v.typ))
2217
2218 if (tt.key == stringType || key.kind() == String) && tt.key == key.typ && tt.elem.size <= maxValSize {
2219 k := *(*string)(key.ptr)
2220 if elem.typ == nil {
2221 mapdelete_faststr(v.typ, v.pointer(), k)
2222 return
2223 }
2224 elem.mustBeExported()
2225 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
2226 var e unsafe.Pointer
2227 if elem.flag&flagIndir != 0 {
2228 e = elem.ptr
2229 } else {
2230 e = unsafe.Pointer(&elem.ptr)
2231 }
2232 mapassign_faststr(v.typ, v.pointer(), k, e)
2233 return
2234 }
2235
2236 key = key.assignTo("reflect.Value.SetMapIndex", tt.key, nil)
2237 var k unsafe.Pointer
2238 if key.flag&flagIndir != 0 {
2239 k = key.ptr
2240 } else {
2241 k = unsafe.Pointer(&key.ptr)
2242 }
2243 if elem.typ == nil {
2244 mapdelete(v.typ, v.pointer(), k)
2245 return
2246 }
2247 elem.mustBeExported()
2248 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
2249 var e unsafe.Pointer
2250 if elem.flag&flagIndir != 0 {
2251 e = elem.ptr
2252 } else {
2253 e = unsafe.Pointer(&elem.ptr)
2254 }
2255 mapassign(v.typ, v.pointer(), k, e)
2256 }
2257
2258
2259
2260 func (v Value) SetUint(x uint64) {
2261 v.mustBeAssignable()
2262 switch k := v.kind(); k {
2263 default:
2264 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2265 case Uint:
2266 *(*uint)(v.ptr) = uint(x)
2267 case Uint8:
2268 *(*uint8)(v.ptr) = uint8(x)
2269 case Uint16:
2270 *(*uint16)(v.ptr) = uint16(x)
2271 case Uint32:
2272 *(*uint32)(v.ptr) = uint32(x)
2273 case Uint64:
2274 *(*uint64)(v.ptr) = x
2275 case Uintptr:
2276 *(*uintptr)(v.ptr) = uintptr(x)
2277 }
2278 }
2279
2280
2281
2282 func (v Value) SetPointer(x unsafe.Pointer) {
2283 v.mustBeAssignable()
2284 v.mustBe(UnsafePointer)
2285 *(*unsafe.Pointer)(v.ptr) = x
2286 }
2287
2288
2289
2290 func (v Value) SetString(x string) {
2291 v.mustBeAssignable()
2292 v.mustBe(String)
2293 *(*string)(v.ptr) = x
2294 }
2295
2296
2297
2298
2299 func (v Value) Slice(i, j int) Value {
2300 var (
2301 cap int
2302 typ *sliceType
2303 base unsafe.Pointer
2304 )
2305 switch kind := v.kind(); kind {
2306 default:
2307 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2308
2309 case Array:
2310 if v.flag&flagAddr == 0 {
2311 panic("reflect.Value.Slice: slice of unaddressable array")
2312 }
2313 tt := (*arrayType)(unsafe.Pointer(v.typ))
2314 cap = int(tt.len)
2315 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2316 base = v.ptr
2317
2318 case Slice:
2319 typ = (*sliceType)(unsafe.Pointer(v.typ))
2320 s := (*unsafeheader.Slice)(v.ptr)
2321 base = s.Data
2322 cap = s.Cap
2323
2324 case String:
2325 s := (*unsafeheader.String)(v.ptr)
2326 if i < 0 || j < i || j > s.Len {
2327 panic("reflect.Value.Slice: string slice index out of bounds")
2328 }
2329 var t unsafeheader.String
2330 if i < s.Len {
2331 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2332 }
2333 return Value{v.typ, unsafe.Pointer(&t), v.flag}
2334 }
2335
2336 if i < 0 || j < i || j > cap {
2337 panic("reflect.Value.Slice: slice index out of bounds")
2338 }
2339
2340
2341 var x []unsafe.Pointer
2342
2343
2344 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2345 s.Len = j - i
2346 s.Cap = cap - i
2347 if cap-i > 0 {
2348 s.Data = arrayAt(base, i, typ.elem.Size(), "i < cap")
2349 } else {
2350
2351 s.Data = base
2352 }
2353
2354 fl := v.flag.ro() | flagIndir | flag(Slice)
2355 return Value{typ.common(), unsafe.Pointer(&x), fl}
2356 }
2357
2358
2359
2360
2361 func (v Value) Slice3(i, j, k int) Value {
2362 var (
2363 cap int
2364 typ *sliceType
2365 base unsafe.Pointer
2366 )
2367 switch kind := v.kind(); kind {
2368 default:
2369 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2370
2371 case Array:
2372 if v.flag&flagAddr == 0 {
2373 panic("reflect.Value.Slice3: slice of unaddressable array")
2374 }
2375 tt := (*arrayType)(unsafe.Pointer(v.typ))
2376 cap = int(tt.len)
2377 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2378 base = v.ptr
2379
2380 case Slice:
2381 typ = (*sliceType)(unsafe.Pointer(v.typ))
2382 s := (*unsafeheader.Slice)(v.ptr)
2383 base = s.Data
2384 cap = s.Cap
2385 }
2386
2387 if i < 0 || j < i || k < j || k > cap {
2388 panic("reflect.Value.Slice3: slice index out of bounds")
2389 }
2390
2391
2392
2393 var x []unsafe.Pointer
2394
2395
2396 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2397 s.Len = j - i
2398 s.Cap = k - i
2399 if k-i > 0 {
2400 s.Data = arrayAt(base, i, typ.elem.Size(), "i < k <= cap")
2401 } else {
2402
2403 s.Data = base
2404 }
2405
2406 fl := v.flag.ro() | flagIndir | flag(Slice)
2407 return Value{typ.common(), unsafe.Pointer(&x), fl}
2408 }
2409
2410
2411
2412
2413
2414
2415
2416 func (v Value) String() string {
2417 switch k := v.kind(); k {
2418 case Invalid:
2419 return "<invalid Value>"
2420 case String:
2421 return *(*string)(v.ptr)
2422 }
2423
2424
2425 return "<" + v.Type().String() + " Value>"
2426 }
2427
2428
2429
2430
2431
2432
2433 func (v Value) TryRecv() (x Value, ok bool) {
2434 v.mustBe(Chan)
2435 v.mustBeExported()
2436 return v.recv(true)
2437 }
2438
2439
2440
2441
2442
2443 func (v Value) TrySend(x Value) bool {
2444 v.mustBe(Chan)
2445 v.mustBeExported()
2446 return v.send(x, true)
2447 }
2448
2449
2450 func (v Value) Type() Type {
2451 f := v.flag
2452 if f == 0 {
2453 panic(&ValueError{"reflect.Value.Type", Invalid})
2454 }
2455 if f&flagMethod == 0 {
2456
2457 return v.typ
2458 }
2459
2460
2461
2462 i := int(v.flag) >> flagMethodShift
2463 if v.typ.Kind() == Interface {
2464
2465 tt := (*interfaceType)(unsafe.Pointer(v.typ))
2466 if uint(i) >= uint(len(tt.methods)) {
2467 panic("reflect: internal error: invalid method index")
2468 }
2469 m := &tt.methods[i]
2470 return v.typ.typeOff(m.typ)
2471 }
2472
2473 ms := v.typ.exportedMethods()
2474 if uint(i) >= uint(len(ms)) {
2475 panic("reflect: internal error: invalid method index")
2476 }
2477 m := ms[i]
2478 return v.typ.typeOff(m.mtyp)
2479 }
2480
2481
2482 func (v Value) CanUint() bool {
2483 switch v.kind() {
2484 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2485 return true
2486 default:
2487 return false
2488 }
2489 }
2490
2491
2492
2493 func (v Value) Uint() uint64 {
2494 k := v.kind()
2495 p := v.ptr
2496 switch k {
2497 case Uint:
2498 return uint64(*(*uint)(p))
2499 case Uint8:
2500 return uint64(*(*uint8)(p))
2501 case Uint16:
2502 return uint64(*(*uint16)(p))
2503 case Uint32:
2504 return uint64(*(*uint32)(p))
2505 case Uint64:
2506 return *(*uint64)(p)
2507 case Uintptr:
2508 return uint64(*(*uintptr)(p))
2509 }
2510 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2511 }
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523 func (v Value) UnsafeAddr() uintptr {
2524 if v.typ == nil {
2525 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2526 }
2527 if v.flag&flagAddr == 0 {
2528 panic("reflect.Value.UnsafeAddr of unaddressable value")
2529 }
2530 return uintptr(v.ptr)
2531 }
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544 func (v Value) UnsafePointer() unsafe.Pointer {
2545 k := v.kind()
2546 switch k {
2547 case Pointer:
2548 if v.typ.ptrdata == 0 {
2549
2550
2551 if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
2552 panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
2553 }
2554 return *(*unsafe.Pointer)(v.ptr)
2555 }
2556 fallthrough
2557 case Chan, Map, UnsafePointer:
2558 return v.pointer()
2559 case Func:
2560 if v.flag&flagMethod != 0 {
2561
2562
2563
2564
2565
2566
2567 code := methodValueCallCodePtr()
2568 return *(*unsafe.Pointer)(unsafe.Pointer(&code))
2569 }
2570 p := v.pointer()
2571
2572
2573 if p != nil {
2574 p = *(*unsafe.Pointer)(p)
2575 }
2576 return p
2577
2578 case Slice:
2579 return (*unsafeheader.Slice)(v.ptr).Data
2580 }
2581 panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
2582 }
2583
2584
2585
2586
2587
2588
2589
2590 type StringHeader struct {
2591 Data uintptr
2592 Len int
2593 }
2594
2595
2596
2597
2598
2599
2600
2601 type SliceHeader struct {
2602 Data uintptr
2603 Len int
2604 Cap int
2605 }
2606
2607 func typesMustMatch(what string, t1, t2 Type) {
2608 if t1 != t2 {
2609 panic(what + ": " + t1.String() + " != " + t2.String())
2610 }
2611 }
2612
2613
2614
2615
2616
2617
2618
2619
2620 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2621 return add(p, uintptr(i)*eltSize, "i < len")
2622 }
2623
2624
2625
2626 func grow(s Value, extra int) (Value, int, int) {
2627 i0 := s.Len()
2628 i1 := i0 + extra
2629 if i1 < i0 {
2630 panic("reflect.Append: slice overflow")
2631 }
2632 m := s.Cap()
2633 if i1 <= m {
2634 return s.Slice(0, i1), i0, i1
2635 }
2636 if m == 0 {
2637 m = extra
2638 } else {
2639 const threshold = 256
2640 for m < i1 {
2641 if i0 < threshold {
2642 m += m
2643 } else {
2644 m += (m + 3*threshold) / 4
2645 }
2646 }
2647 }
2648 t := MakeSlice(s.Type(), i1, m)
2649 Copy(t, s)
2650 return t, i0, i1
2651 }
2652
2653
2654
2655 func Append(s Value, x ...Value) Value {
2656 s.mustBe(Slice)
2657 s, i0, i1 := grow(s, len(x))
2658 for i, j := i0, 0; i < i1; i, j = i+1, j+1 {
2659 s.Index(i).Set(x[j])
2660 }
2661 return s
2662 }
2663
2664
2665
2666 func AppendSlice(s, t Value) Value {
2667 s.mustBe(Slice)
2668 t.mustBe(Slice)
2669 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2670 s, i0, i1 := grow(s, t.Len())
2671 Copy(s.Slice(i0, i1), t)
2672 return s
2673 }
2674
2675
2676
2677
2678
2679
2680
2681
2682 func Copy(dst, src Value) int {
2683 dk := dst.kind()
2684 if dk != Array && dk != Slice {
2685 panic(&ValueError{"reflect.Copy", dk})
2686 }
2687 if dk == Array {
2688 dst.mustBeAssignable()
2689 }
2690 dst.mustBeExported()
2691
2692 sk := src.kind()
2693 var stringCopy bool
2694 if sk != Array && sk != Slice {
2695 stringCopy = sk == String && dst.typ.Elem().Kind() == Uint8
2696 if !stringCopy {
2697 panic(&ValueError{"reflect.Copy", sk})
2698 }
2699 }
2700 src.mustBeExported()
2701
2702 de := dst.typ.Elem()
2703 if !stringCopy {
2704 se := src.typ.Elem()
2705 typesMustMatch("reflect.Copy", de, se)
2706 }
2707
2708 var ds, ss unsafeheader.Slice
2709 if dk == Array {
2710 ds.Data = dst.ptr
2711 ds.Len = dst.Len()
2712 ds.Cap = ds.Len
2713 } else {
2714 ds = *(*unsafeheader.Slice)(dst.ptr)
2715 }
2716 if sk == Array {
2717 ss.Data = src.ptr
2718 ss.Len = src.Len()
2719 ss.Cap = ss.Len
2720 } else if sk == Slice {
2721 ss = *(*unsafeheader.Slice)(src.ptr)
2722 } else {
2723 sh := *(*unsafeheader.String)(src.ptr)
2724 ss.Data = sh.Data
2725 ss.Len = sh.Len
2726 ss.Cap = sh.Len
2727 }
2728
2729 return typedslicecopy(de.common(), ds, ss)
2730 }
2731
2732
2733
2734 type runtimeSelect struct {
2735 dir SelectDir
2736 typ *rtype
2737 ch unsafe.Pointer
2738 val unsafe.Pointer
2739 }
2740
2741
2742
2743
2744
2745
2746 func rselect([]runtimeSelect) (chosen int, recvOK bool)
2747
2748
2749 type SelectDir int
2750
2751
2752
2753 const (
2754 _ SelectDir = iota
2755 SelectSend
2756 SelectRecv
2757 SelectDefault
2758 )
2759
2760
2761
2762
2763
2764
2765
2766
2767
2768
2769
2770
2771
2772
2773
2774
2775
2776
2777 type SelectCase struct {
2778 Dir SelectDir
2779 Chan Value
2780 Send Value
2781 }
2782
2783
2784
2785
2786
2787
2788
2789
2790
2791 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
2792 if len(cases) > 65536 {
2793 panic("reflect.Select: too many cases (max 65536)")
2794 }
2795
2796
2797
2798 var runcases []runtimeSelect
2799 if len(cases) > 4 {
2800
2801 runcases = make([]runtimeSelect, len(cases))
2802 } else {
2803
2804 runcases = make([]runtimeSelect, len(cases), 4)
2805 }
2806
2807 haveDefault := false
2808 for i, c := range cases {
2809 rc := &runcases[i]
2810 rc.dir = c.Dir
2811 switch c.Dir {
2812 default:
2813 panic("reflect.Select: invalid Dir")
2814
2815 case SelectDefault:
2816 if haveDefault {
2817 panic("reflect.Select: multiple default cases")
2818 }
2819 haveDefault = true
2820 if c.Chan.IsValid() {
2821 panic("reflect.Select: default case has Chan value")
2822 }
2823 if c.Send.IsValid() {
2824 panic("reflect.Select: default case has Send value")
2825 }
2826
2827 case SelectSend:
2828 ch := c.Chan
2829 if !ch.IsValid() {
2830 break
2831 }
2832 ch.mustBe(Chan)
2833 ch.mustBeExported()
2834 tt := (*chanType)(unsafe.Pointer(ch.typ))
2835 if ChanDir(tt.dir)&SendDir == 0 {
2836 panic("reflect.Select: SendDir case using recv-only channel")
2837 }
2838 rc.ch = ch.pointer()
2839 rc.typ = &tt.rtype
2840 v := c.Send
2841 if !v.IsValid() {
2842 panic("reflect.Select: SendDir case missing Send value")
2843 }
2844 v.mustBeExported()
2845 v = v.assignTo("reflect.Select", tt.elem, nil)
2846 if v.flag&flagIndir != 0 {
2847 rc.val = v.ptr
2848 } else {
2849 rc.val = unsafe.Pointer(&v.ptr)
2850 }
2851
2852 case SelectRecv:
2853 if c.Send.IsValid() {
2854 panic("reflect.Select: RecvDir case has Send value")
2855 }
2856 ch := c.Chan
2857 if !ch.IsValid() {
2858 break
2859 }
2860 ch.mustBe(Chan)
2861 ch.mustBeExported()
2862 tt := (*chanType)(unsafe.Pointer(ch.typ))
2863 if ChanDir(tt.dir)&RecvDir == 0 {
2864 panic("reflect.Select: RecvDir case using send-only channel")
2865 }
2866 rc.ch = ch.pointer()
2867 rc.typ = &tt.rtype
2868 rc.val = unsafe_New(tt.elem)
2869 }
2870 }
2871
2872 chosen, recvOK = rselect(runcases)
2873 if runcases[chosen].dir == SelectRecv {
2874 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
2875 t := tt.elem
2876 p := runcases[chosen].val
2877 fl := flag(t.Kind())
2878 if ifaceIndir(t) {
2879 recv = Value{t, p, fl | flagIndir}
2880 } else {
2881 recv = Value{t, *(*unsafe.Pointer)(p), fl}
2882 }
2883 }
2884 return chosen, recv, recvOK
2885 }
2886
2887
2890
2891
2892 func unsafe_New(*rtype) unsafe.Pointer
2893 func unsafe_NewArray(*rtype, int) unsafe.Pointer
2894
2895
2896
2897 func MakeSlice(typ Type, len, cap int) Value {
2898 if typ.Kind() != Slice {
2899 panic("reflect.MakeSlice of non-slice type")
2900 }
2901 if len < 0 {
2902 panic("reflect.MakeSlice: negative len")
2903 }
2904 if cap < 0 {
2905 panic("reflect.MakeSlice: negative cap")
2906 }
2907 if len > cap {
2908 panic("reflect.MakeSlice: len > cap")
2909 }
2910
2911 s := unsafeheader.Slice{Data: unsafe_NewArray(typ.Elem().(*rtype), cap), Len: len, Cap: cap}
2912 return Value{typ.(*rtype), unsafe.Pointer(&s), flagIndir | flag(Slice)}
2913 }
2914
2915
2916 func MakeChan(typ Type, buffer int) Value {
2917 if typ.Kind() != Chan {
2918 panic("reflect.MakeChan of non-chan type")
2919 }
2920 if buffer < 0 {
2921 panic("reflect.MakeChan: negative buffer size")
2922 }
2923 if typ.ChanDir() != BothDir {
2924 panic("reflect.MakeChan: unidirectional channel type")
2925 }
2926 t := typ.(*rtype)
2927 ch := makechan(t, buffer)
2928 return Value{t, ch, flag(Chan)}
2929 }
2930
2931
2932 func MakeMap(typ Type) Value {
2933 return MakeMapWithSize(typ, 0)
2934 }
2935
2936
2937
2938 func MakeMapWithSize(typ Type, n int) Value {
2939 if typ.Kind() != Map {
2940 panic("reflect.MakeMapWithSize of non-map type")
2941 }
2942 t := typ.(*rtype)
2943 m := makemap(t, n)
2944 return Value{t, m, flag(Map)}
2945 }
2946
2947
2948
2949
2950 func Indirect(v Value) Value {
2951 if v.Kind() != Pointer {
2952 return v
2953 }
2954 return v.Elem()
2955 }
2956
2957
2958
2959 func ValueOf(i any) Value {
2960 if i == nil {
2961 return Value{}
2962 }
2963
2964
2965
2966
2967
2968 escapes(i)
2969
2970 return unpackEface(i)
2971 }
2972
2973
2974
2975
2976
2977
2978 func Zero(typ Type) Value {
2979 if typ == nil {
2980 panic("reflect: Zero(nil)")
2981 }
2982 t := typ.(*rtype)
2983 fl := flag(t.Kind())
2984 if ifaceIndir(t) {
2985 var p unsafe.Pointer
2986 if t.size <= maxZero {
2987 p = unsafe.Pointer(&zeroVal[0])
2988 } else {
2989 p = unsafe_New(t)
2990 }
2991 return Value{t, p, fl | flagIndir}
2992 }
2993 return Value{t, nil, fl}
2994 }
2995
2996
2997 const maxZero = 1024
2998
2999
3000 var zeroVal [maxZero]byte
3001
3002
3003
3004 func New(typ Type) Value {
3005 if typ == nil {
3006 panic("reflect: New(nil)")
3007 }
3008 t := typ.(*rtype)
3009 pt := t.ptrTo()
3010 if ifaceIndir(pt) {
3011
3012 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
3013 }
3014 ptr := unsafe_New(t)
3015 fl := flag(Pointer)
3016 return Value{pt, ptr, fl}
3017 }
3018
3019
3020
3021 func NewAt(typ Type, p unsafe.Pointer) Value {
3022 fl := flag(Pointer)
3023 t := typ.(*rtype)
3024 return Value{t.ptrTo(), p, fl}
3025 }
3026
3027
3028
3029
3030
3031 func (v Value) assignTo(context string, dst *rtype, target unsafe.Pointer) Value {
3032 if v.flag&flagMethod != 0 {
3033 v = makeMethodValue(context, v)
3034 }
3035
3036 switch {
3037 case directlyAssignable(dst, v.typ):
3038
3039
3040 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
3041 fl |= flag(dst.Kind())
3042 return Value{dst, v.ptr, fl}
3043
3044 case implements(dst, v.typ):
3045 if target == nil {
3046 target = unsafe_New(dst)
3047 }
3048 if v.Kind() == Interface && v.IsNil() {
3049
3050
3051
3052 return Value{dst, nil, flag(Interface)}
3053 }
3054 x := valueInterface(v, false)
3055 if dst.NumMethod() == 0 {
3056 *(*any)(target) = x
3057 } else {
3058 ifaceE2I(dst, x, target)
3059 }
3060 return Value{dst, target, flagIndir | flag(Interface)}
3061 }
3062
3063
3064 panic(context + ": value of type " + v.typ.String() + " is not assignable to type " + dst.String())
3065 }
3066
3067
3068
3069
3070 func (v Value) Convert(t Type) Value {
3071 if v.flag&flagMethod != 0 {
3072 v = makeMethodValue("Convert", v)
3073 }
3074 op := convertOp(t.common(), v.typ)
3075 if op == nil {
3076 panic("reflect.Value.Convert: value of type " + v.typ.String() + " cannot be converted to type " + t.String())
3077 }
3078 return op(v, t)
3079 }
3080
3081
3082
3083 func (v Value) CanConvert(t Type) bool {
3084 vt := v.Type()
3085 if !vt.ConvertibleTo(t) {
3086 return false
3087 }
3088
3089
3090
3091 if vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array {
3092 n := t.Elem().Len()
3093 if n > v.Len() {
3094 return false
3095 }
3096 }
3097 return true
3098 }
3099
3100
3101
3102 func convertOp(dst, src *rtype) func(Value, Type) Value {
3103 switch src.Kind() {
3104 case Int, Int8, Int16, Int32, Int64:
3105 switch dst.Kind() {
3106 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3107 return cvtInt
3108 case Float32, Float64:
3109 return cvtIntFloat
3110 case String:
3111 return cvtIntString
3112 }
3113
3114 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3115 switch dst.Kind() {
3116 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3117 return cvtUint
3118 case Float32, Float64:
3119 return cvtUintFloat
3120 case String:
3121 return cvtUintString
3122 }
3123
3124 case Float32, Float64:
3125 switch dst.Kind() {
3126 case Int, Int8, Int16, Int32, Int64:
3127 return cvtFloatInt
3128 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3129 return cvtFloatUint
3130 case Float32, Float64:
3131 return cvtFloat
3132 }
3133
3134 case Complex64, Complex128:
3135 switch dst.Kind() {
3136 case Complex64, Complex128:
3137 return cvtComplex
3138 }
3139
3140 case String:
3141 if dst.Kind() == Slice && dst.Elem().PkgPath() == "" {
3142 switch dst.Elem().Kind() {
3143 case Uint8:
3144 return cvtStringBytes
3145 case Int32:
3146 return cvtStringRunes
3147 }
3148 }
3149
3150 case Slice:
3151 if dst.Kind() == String && src.Elem().PkgPath() == "" {
3152 switch src.Elem().Kind() {
3153 case Uint8:
3154 return cvtBytesString
3155 case Int32:
3156 return cvtRunesString
3157 }
3158 }
3159
3160
3161 if dst.Kind() == Pointer && dst.Elem().Kind() == Array && src.Elem() == dst.Elem().Elem() {
3162 return cvtSliceArrayPtr
3163 }
3164
3165 case Chan:
3166 if dst.Kind() == Chan && specialChannelAssignability(dst, src) {
3167 return cvtDirect
3168 }
3169 }
3170
3171
3172 if haveIdenticalUnderlyingType(dst, src, false) {
3173 return cvtDirect
3174 }
3175
3176
3177 if dst.Kind() == Pointer && dst.Name() == "" &&
3178 src.Kind() == Pointer && src.Name() == "" &&
3179 haveIdenticalUnderlyingType(dst.Elem().common(), src.Elem().common(), false) {
3180 return cvtDirect
3181 }
3182
3183 if implements(dst, src) {
3184 if src.Kind() == Interface {
3185 return cvtI2I
3186 }
3187 return cvtT2I
3188 }
3189
3190 return nil
3191 }
3192
3193
3194
3195 func makeInt(f flag, bits uint64, t Type) Value {
3196 typ := t.common()
3197 ptr := unsafe_New(typ)
3198 switch typ.size {
3199 case 1:
3200 *(*uint8)(ptr) = uint8(bits)
3201 case 2:
3202 *(*uint16)(ptr) = uint16(bits)
3203 case 4:
3204 *(*uint32)(ptr) = uint32(bits)
3205 case 8:
3206 *(*uint64)(ptr) = bits
3207 }
3208 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3209 }
3210
3211
3212
3213 func makeFloat(f flag, v float64, t Type) Value {
3214 typ := t.common()
3215 ptr := unsafe_New(typ)
3216 switch typ.size {
3217 case 4:
3218 *(*float32)(ptr) = float32(v)
3219 case 8:
3220 *(*float64)(ptr) = v
3221 }
3222 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3223 }
3224
3225
3226 func makeFloat32(f flag, v float32, t Type) Value {
3227 typ := t.common()
3228 ptr := unsafe_New(typ)
3229 *(*float32)(ptr) = v
3230 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3231 }
3232
3233
3234
3235 func makeComplex(f flag, v complex128, t Type) Value {
3236 typ := t.common()
3237 ptr := unsafe_New(typ)
3238 switch typ.size {
3239 case 8:
3240 *(*complex64)(ptr) = complex64(v)
3241 case 16:
3242 *(*complex128)(ptr) = v
3243 }
3244 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3245 }
3246
3247 func makeString(f flag, v string, t Type) Value {
3248 ret := New(t).Elem()
3249 ret.SetString(v)
3250 ret.flag = ret.flag&^flagAddr | f
3251 return ret
3252 }
3253
3254 func makeBytes(f flag, v []byte, t Type) Value {
3255 ret := New(t).Elem()
3256 ret.SetBytes(v)
3257 ret.flag = ret.flag&^flagAddr | f
3258 return ret
3259 }
3260
3261 func makeRunes(f flag, v []rune, t Type) Value {
3262 ret := New(t).Elem()
3263 ret.setRunes(v)
3264 ret.flag = ret.flag&^flagAddr | f
3265 return ret
3266 }
3267
3268
3269
3270
3271
3272
3273
3274 func cvtInt(v Value, t Type) Value {
3275 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3276 }
3277
3278
3279 func cvtUint(v Value, t Type) Value {
3280 return makeInt(v.flag.ro(), v.Uint(), t)
3281 }
3282
3283
3284 func cvtFloatInt(v Value, t Type) Value {
3285 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3286 }
3287
3288
3289 func cvtFloatUint(v Value, t Type) Value {
3290 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3291 }
3292
3293
3294 func cvtIntFloat(v Value, t Type) Value {
3295 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3296 }
3297
3298
3299 func cvtUintFloat(v Value, t Type) Value {
3300 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3301 }
3302
3303
3304 func cvtFloat(v Value, t Type) Value {
3305 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3306
3307
3308
3309 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3310 }
3311 return makeFloat(v.flag.ro(), v.Float(), t)
3312 }
3313
3314
3315 func cvtComplex(v Value, t Type) Value {
3316 return makeComplex(v.flag.ro(), v.Complex(), t)
3317 }
3318
3319
3320 func cvtIntString(v Value, t Type) Value {
3321 s := "\uFFFD"
3322 if x := v.Int(); int64(rune(x)) == x {
3323 s = string(rune(x))
3324 }
3325 return makeString(v.flag.ro(), s, t)
3326 }
3327
3328
3329 func cvtUintString(v Value, t Type) Value {
3330 s := "\uFFFD"
3331 if x := v.Uint(); uint64(rune(x)) == x {
3332 s = string(rune(x))
3333 }
3334 return makeString(v.flag.ro(), s, t)
3335 }
3336
3337
3338 func cvtBytesString(v Value, t Type) Value {
3339 return makeString(v.flag.ro(), string(v.Bytes()), t)
3340 }
3341
3342
3343 func cvtStringBytes(v Value, t Type) Value {
3344 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3345 }
3346
3347
3348 func cvtRunesString(v Value, t Type) Value {
3349 return makeString(v.flag.ro(), string(v.runes()), t)
3350 }
3351
3352
3353 func cvtStringRunes(v Value, t Type) Value {
3354 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3355 }
3356
3357
3358 func cvtSliceArrayPtr(v Value, t Type) Value {
3359 n := t.Elem().Len()
3360 if n > v.Len() {
3361 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to pointer to array with length " + itoa.Itoa(n))
3362 }
3363 h := (*unsafeheader.Slice)(v.ptr)
3364 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
3365 }
3366
3367
3368 func cvtDirect(v Value, typ Type) Value {
3369 f := v.flag
3370 t := typ.common()
3371 ptr := v.ptr
3372 if f&flagAddr != 0 {
3373
3374 c := unsafe_New(t)
3375 typedmemmove(t, c, ptr)
3376 ptr = c
3377 f &^= flagAddr
3378 }
3379 return Value{t, ptr, v.flag.ro() | f}
3380 }
3381
3382
3383 func cvtT2I(v Value, typ Type) Value {
3384 target := unsafe_New(typ.common())
3385 x := valueInterface(v, false)
3386 if typ.NumMethod() == 0 {
3387 *(*any)(target) = x
3388 } else {
3389 ifaceE2I(typ.(*rtype), x, target)
3390 }
3391 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3392 }
3393
3394
3395 func cvtI2I(v Value, typ Type) Value {
3396 if v.IsNil() {
3397 ret := Zero(typ)
3398 ret.flag |= v.flag.ro()
3399 return ret
3400 }
3401 return cvtT2I(v.Elem(), typ)
3402 }
3403
3404
3405 func chancap(ch unsafe.Pointer) int
3406 func chanclose(ch unsafe.Pointer)
3407 func chanlen(ch unsafe.Pointer) int
3408
3409
3410
3411
3412
3413
3414
3415
3416
3417
3418 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3419
3420
3421 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3422
3423 func makechan(typ *rtype, size int) (ch unsafe.Pointer)
3424 func makemap(t *rtype, cap int) (m unsafe.Pointer)
3425
3426
3427 func mapaccess(t *rtype, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3428
3429
3430 func mapaccess_faststr(t *rtype, m unsafe.Pointer, key string) (val unsafe.Pointer)
3431
3432
3433 func mapassign(t *rtype, m unsafe.Pointer, key, val unsafe.Pointer)
3434
3435
3436 func mapassign_faststr(t *rtype, m unsafe.Pointer, key string, val unsafe.Pointer)
3437
3438
3439 func mapdelete(t *rtype, m unsafe.Pointer, key unsafe.Pointer)
3440
3441
3442 func mapdelete_faststr(t *rtype, m unsafe.Pointer, key string)
3443
3444
3445 func mapiterinit(t *rtype, m unsafe.Pointer, it *hiter)
3446
3447
3448 func mapiterkey(it *hiter) (key unsafe.Pointer)
3449
3450
3451 func mapiterelem(it *hiter) (elem unsafe.Pointer)
3452
3453
3454 func mapiternext(it *hiter)
3455
3456
3457 func maplen(m unsafe.Pointer) int
3458
3459
3460
3461
3462
3463
3464
3465
3466
3467
3468
3469
3470
3471
3472
3473
3474
3475
3476
3477
3478
3479
3480
3481
3482
3483
3484 func call(stackArgsType *rtype, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3485
3486 func ifaceE2I(t *rtype, src any, dst unsafe.Pointer)
3487
3488
3489
3490 func memmove(dst, src unsafe.Pointer, size uintptr)
3491
3492
3493
3494 func typedmemmove(t *rtype, dst, src unsafe.Pointer)
3495
3496
3497
3498
3499 func typedmemmovepartial(t *rtype, dst, src unsafe.Pointer, off, size uintptr)
3500
3501
3502
3503 func typedmemclr(t *rtype, ptr unsafe.Pointer)
3504
3505
3506
3507
3508 func typedmemclrpartial(t *rtype, ptr unsafe.Pointer, off, size uintptr)
3509
3510
3511
3512
3513 func typedslicecopy(elemType *rtype, dst, src unsafeheader.Slice) int
3514
3515
3516 func typehash(t *rtype, p unsafe.Pointer, h uintptr) uintptr
3517
3518 func verifyNotInHeapPtr(p uintptr) bool
3519
3520
3521
3522
3523 func escapes(x any) {
3524 if dummy.b {
3525 dummy.x = x
3526 }
3527 }
3528
3529 var dummy struct {
3530 b bool
3531 x any
3532 }
3533
View as plain text