Source file
src/runtime/mbitmap.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46 package runtime
47
48 import (
49 "internal/goarch"
50 "runtime/internal/atomic"
51 "runtime/internal/sys"
52 "unsafe"
53 )
54
55 const (
56 bitPointer = 1 << 0
57 bitScan = 1 << 4
58
59 heapBitsShift = 1
60 wordsPerBitmapByte = 8 / 2
61
62
63 bitScanAll = bitScan | bitScan<<heapBitsShift | bitScan<<(2*heapBitsShift) | bitScan<<(3*heapBitsShift)
64 bitPointerAll = bitPointer | bitPointer<<heapBitsShift | bitPointer<<(2*heapBitsShift) | bitPointer<<(3*heapBitsShift)
65 )
66
67
68
69
70 func addb(p *byte, n uintptr) *byte {
71
72
73
74 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + n))
75 }
76
77
78
79
80 func subtractb(p *byte, n uintptr) *byte {
81
82
83
84 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - n))
85 }
86
87
88
89
90 func add1(p *byte) *byte {
91
92
93
94 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + 1))
95 }
96
97
98
99
100
101
102 func subtract1(p *byte) *byte {
103
104
105
106 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - 1))
107 }
108
109
110
111
112
113 type heapBits struct {
114 bitp *uint8
115 shift uint32
116 arena uint32
117 last *uint8
118 }
119
120
121
122 var _ = heapBits{arena: (1<<heapAddrBits)/heapArenaBytes - 1}
123
124
125
126
127
128
129
130
131
132
133 type markBits struct {
134 bytep *uint8
135 mask uint8
136 index uintptr
137 }
138
139
140 func (s *mspan) allocBitsForIndex(allocBitIndex uintptr) markBits {
141 bytep, mask := s.allocBits.bitp(allocBitIndex)
142 return markBits{bytep, mask, allocBitIndex}
143 }
144
145
146
147
148
149 func (s *mspan) refillAllocCache(whichByte uintptr) {
150 bytes := (*[8]uint8)(unsafe.Pointer(s.allocBits.bytep(whichByte)))
151 aCache := uint64(0)
152 aCache |= uint64(bytes[0])
153 aCache |= uint64(bytes[1]) << (1 * 8)
154 aCache |= uint64(bytes[2]) << (2 * 8)
155 aCache |= uint64(bytes[3]) << (3 * 8)
156 aCache |= uint64(bytes[4]) << (4 * 8)
157 aCache |= uint64(bytes[5]) << (5 * 8)
158 aCache |= uint64(bytes[6]) << (6 * 8)
159 aCache |= uint64(bytes[7]) << (7 * 8)
160 s.allocCache = ^aCache
161 }
162
163
164
165
166
167 func (s *mspan) nextFreeIndex() uintptr {
168 sfreeindex := s.freeindex
169 snelems := s.nelems
170 if sfreeindex == snelems {
171 return sfreeindex
172 }
173 if sfreeindex > snelems {
174 throw("s.freeindex > s.nelems")
175 }
176
177 aCache := s.allocCache
178
179 bitIndex := sys.Ctz64(aCache)
180 for bitIndex == 64 {
181
182 sfreeindex = (sfreeindex + 64) &^ (64 - 1)
183 if sfreeindex >= snelems {
184 s.freeindex = snelems
185 return snelems
186 }
187 whichByte := sfreeindex / 8
188
189 s.refillAllocCache(whichByte)
190 aCache = s.allocCache
191 bitIndex = sys.Ctz64(aCache)
192
193
194 }
195 result := sfreeindex + uintptr(bitIndex)
196 if result >= snelems {
197 s.freeindex = snelems
198 return snelems
199 }
200
201 s.allocCache >>= uint(bitIndex + 1)
202 sfreeindex = result + 1
203
204 if sfreeindex%64 == 0 && sfreeindex != snelems {
205
206
207
208
209
210 whichByte := sfreeindex / 8
211 s.refillAllocCache(whichByte)
212 }
213 s.freeindex = sfreeindex
214 return result
215 }
216
217
218
219
220
221
222 func (s *mspan) isFree(index uintptr) bool {
223 if index < s.freeindex {
224 return false
225 }
226 bytep, mask := s.allocBits.bitp(index)
227 return *bytep&mask == 0
228 }
229
230
231
232
233
234 func (s *mspan) divideByElemSize(n uintptr) uintptr {
235 const doubleCheck = false
236
237
238 q := uintptr((uint64(n) * uint64(s.divMul)) >> 32)
239
240 if doubleCheck && q != n/s.elemsize {
241 println(n, "/", s.elemsize, "should be", n/s.elemsize, "but got", q)
242 throw("bad magic division")
243 }
244 return q
245 }
246
247 func (s *mspan) objIndex(p uintptr) uintptr {
248 return s.divideByElemSize(p - s.base())
249 }
250
251 func markBitsForAddr(p uintptr) markBits {
252 s := spanOf(p)
253 objIndex := s.objIndex(p)
254 return s.markBitsForIndex(objIndex)
255 }
256
257 func (s *mspan) markBitsForIndex(objIndex uintptr) markBits {
258 bytep, mask := s.gcmarkBits.bitp(objIndex)
259 return markBits{bytep, mask, objIndex}
260 }
261
262 func (s *mspan) markBitsForBase() markBits {
263 return markBits{(*uint8)(s.gcmarkBits), uint8(1), 0}
264 }
265
266
267 func (m markBits) isMarked() bool {
268 return *m.bytep&m.mask != 0
269 }
270
271
272 func (m markBits) setMarked() {
273
274
275
276 atomic.Or8(m.bytep, m.mask)
277 }
278
279
280 func (m markBits) setMarkedNonAtomic() {
281 *m.bytep |= m.mask
282 }
283
284
285 func (m markBits) clearMarked() {
286
287
288
289 atomic.And8(m.bytep, ^m.mask)
290 }
291
292
293 func markBitsForSpan(base uintptr) (mbits markBits) {
294 mbits = markBitsForAddr(base)
295 if mbits.mask != 1 {
296 throw("markBitsForSpan: unaligned start")
297 }
298 return mbits
299 }
300
301
302 func (m *markBits) advance() {
303 if m.mask == 1<<7 {
304 m.bytep = (*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(m.bytep)) + 1))
305 m.mask = 1
306 } else {
307 m.mask = m.mask << 1
308 }
309 m.index++
310 }
311
312
313
314
315
316
317
318 func heapBitsForAddr(addr uintptr) (h heapBits) {
319
320 arena := arenaIndex(addr)
321 ha := mheap_.arenas[arena.l1()][arena.l2()]
322
323
324
325 if ha == nil {
326
327
328 return
329 }
330 h.bitp = &ha.bitmap[(addr/(goarch.PtrSize*4))%heapArenaBitmapBytes]
331 h.shift = uint32((addr / goarch.PtrSize) & 3)
332 h.arena = uint32(arena)
333 h.last = &ha.bitmap[len(ha.bitmap)-1]
334 return
335 }
336
337
338
339 const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32))
340
341
342 func badPointer(s *mspan, p, refBase, refOff uintptr) {
343
344
345
346
347
348
349
350
351 printlock()
352 print("runtime: pointer ", hex(p))
353 if s != nil {
354 state := s.state.get()
355 if state != mSpanInUse {
356 print(" to unallocated span")
357 } else {
358 print(" to unused region of span")
359 }
360 print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
361 }
362 print("\n")
363 if refBase != 0 {
364 print("runtime: found in object at *(", hex(refBase), "+", hex(refOff), ")\n")
365 gcDumpObject("object", refBase, refOff)
366 }
367 getg().m.traceback = 2
368 throw("found bad pointer in Go heap (incorrect use of unsafe or cgo?)")
369 }
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385 func findObject(p, refBase, refOff uintptr) (base uintptr, s *mspan, objIndex uintptr) {
386 s = spanOf(p)
387
388
389 if s == nil {
390 if (GOARCH == "amd64" || GOARCH == "arm64") && p == clobberdeadPtr && debug.invalidptr != 0 {
391
392
393
394 badPointer(s, p, refBase, refOff)
395 }
396 return
397 }
398
399
400
401
402 if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
403
404 if state == mSpanManual {
405 return
406 }
407
408
409 if debug.invalidptr != 0 {
410 badPointer(s, p, refBase, refOff)
411 }
412 return
413 }
414
415 objIndex = s.objIndex(p)
416 base = s.base() + objIndex*s.elemsize
417 return
418 }
419
420
421
422 func reflect_verifyNotInHeapPtr(p uintptr) bool {
423
424
425
426 return spanOf(p) == nil && p != clobberdeadPtr
427 }
428
429
430
431
432
433
434
435 func (h heapBits) next() heapBits {
436 if h.shift < 3*heapBitsShift {
437 h.shift += heapBitsShift
438 } else if h.bitp != h.last {
439 h.bitp, h.shift = add1(h.bitp), 0
440 } else {
441
442 return h.nextArena()
443 }
444 return h
445 }
446
447
448
449
450
451
452
453
454
455
456 func (h heapBits) nextArena() heapBits {
457 h.arena++
458 ai := arenaIdx(h.arena)
459 l2 := mheap_.arenas[ai.l1()]
460 if l2 == nil {
461
462
463
464 return heapBits{}
465 }
466 ha := l2[ai.l2()]
467 if ha == nil {
468 return heapBits{}
469 }
470 h.bitp, h.shift = &ha.bitmap[0], 0
471 h.last = &ha.bitmap[len(ha.bitmap)-1]
472 return h
473 }
474
475
476
477
478
479
480
481 func (h heapBits) forward(n uintptr) heapBits {
482 n += uintptr(h.shift) / heapBitsShift
483 nbitp := uintptr(unsafe.Pointer(h.bitp)) + n/4
484 h.shift = uint32(n%4) * heapBitsShift
485 if nbitp <= uintptr(unsafe.Pointer(h.last)) {
486 h.bitp = (*uint8)(unsafe.Pointer(nbitp))
487 return h
488 }
489
490
491 past := nbitp - (uintptr(unsafe.Pointer(h.last)) + 1)
492 h.arena += 1 + uint32(past/heapArenaBitmapBytes)
493 ai := arenaIdx(h.arena)
494 if l2 := mheap_.arenas[ai.l1()]; l2 != nil && l2[ai.l2()] != nil {
495 a := l2[ai.l2()]
496 h.bitp = &a.bitmap[past%heapArenaBitmapBytes]
497 h.last = &a.bitmap[len(a.bitmap)-1]
498 } else {
499 h.bitp, h.last = nil, nil
500 }
501 return h
502 }
503
504
505
506
507 func (h heapBits) forwardOrBoundary(n uintptr) (heapBits, uintptr) {
508 maxn := 4 * ((uintptr(unsafe.Pointer(h.last)) + 1) - uintptr(unsafe.Pointer(h.bitp)))
509 if n > maxn {
510 n = maxn
511 }
512 return h.forward(n), n
513 }
514
515
516
517
518
519
520
521 func (h heapBits) bits() uint32 {
522
523
524 return uint32(*h.bitp) >> (h.shift & 31)
525 }
526
527
528
529
530 func (h heapBits) morePointers() bool {
531 return h.bits()&bitScan != 0
532 }
533
534
535
536
537
538 func (h heapBits) isPointer() bool {
539 return h.bits()&bitPointer != 0
540 }
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569 func bulkBarrierPreWrite(dst, src, size uintptr) {
570 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
571 throw("bulkBarrierPreWrite: unaligned arguments")
572 }
573 if !writeBarrier.needed {
574 return
575 }
576 if s := spanOf(dst); s == nil {
577
578
579 for _, datap := range activeModules() {
580 if datap.data <= dst && dst < datap.edata {
581 bulkBarrierBitmap(dst, src, size, dst-datap.data, datap.gcdatamask.bytedata)
582 return
583 }
584 }
585 for _, datap := range activeModules() {
586 if datap.bss <= dst && dst < datap.ebss {
587 bulkBarrierBitmap(dst, src, size, dst-datap.bss, datap.gcbssmask.bytedata)
588 return
589 }
590 }
591 return
592 } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
593
594
595
596
597
598
599 return
600 }
601
602 buf := &getg().m.p.ptr().wbBuf
603 h := heapBitsForAddr(dst)
604 if src == 0 {
605 for i := uintptr(0); i < size; i += goarch.PtrSize {
606 if h.isPointer() {
607 dstx := (*uintptr)(unsafe.Pointer(dst + i))
608 if !buf.putFast(*dstx, 0) {
609 wbBufFlush(nil, 0)
610 }
611 }
612 h = h.next()
613 }
614 } else {
615 for i := uintptr(0); i < size; i += goarch.PtrSize {
616 if h.isPointer() {
617 dstx := (*uintptr)(unsafe.Pointer(dst + i))
618 srcx := (*uintptr)(unsafe.Pointer(src + i))
619 if !buf.putFast(*dstx, *srcx) {
620 wbBufFlush(nil, 0)
621 }
622 }
623 h = h.next()
624 }
625 }
626 }
627
628
629
630
631
632
633
634
635
636
637 func bulkBarrierPreWriteSrcOnly(dst, src, size uintptr) {
638 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
639 throw("bulkBarrierPreWrite: unaligned arguments")
640 }
641 if !writeBarrier.needed {
642 return
643 }
644 buf := &getg().m.p.ptr().wbBuf
645 h := heapBitsForAddr(dst)
646 for i := uintptr(0); i < size; i += goarch.PtrSize {
647 if h.isPointer() {
648 srcx := (*uintptr)(unsafe.Pointer(src + i))
649 if !buf.putFast(0, *srcx) {
650 wbBufFlush(nil, 0)
651 }
652 }
653 h = h.next()
654 }
655 }
656
657
658
659
660
661
662
663
664
665 func bulkBarrierBitmap(dst, src, size, maskOffset uintptr, bits *uint8) {
666 word := maskOffset / goarch.PtrSize
667 bits = addb(bits, word/8)
668 mask := uint8(1) << (word % 8)
669
670 buf := &getg().m.p.ptr().wbBuf
671 for i := uintptr(0); i < size; i += goarch.PtrSize {
672 if mask == 0 {
673 bits = addb(bits, 1)
674 if *bits == 0 {
675
676 i += 7 * goarch.PtrSize
677 continue
678 }
679 mask = 1
680 }
681 if *bits&mask != 0 {
682 dstx := (*uintptr)(unsafe.Pointer(dst + i))
683 if src == 0 {
684 if !buf.putFast(*dstx, 0) {
685 wbBufFlush(nil, 0)
686 }
687 } else {
688 srcx := (*uintptr)(unsafe.Pointer(src + i))
689 if !buf.putFast(*dstx, *srcx) {
690 wbBufFlush(nil, 0)
691 }
692 }
693 }
694 mask <<= 1
695 }
696 }
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715 func typeBitsBulkBarrier(typ *_type, dst, src, size uintptr) {
716 if typ == nil {
717 throw("runtime: typeBitsBulkBarrier without type")
718 }
719 if typ.size != size {
720 println("runtime: typeBitsBulkBarrier with type ", typ.string(), " of size ", typ.size, " but memory size", size)
721 throw("runtime: invalid typeBitsBulkBarrier")
722 }
723 if typ.kind&kindGCProg != 0 {
724 println("runtime: typeBitsBulkBarrier with type ", typ.string(), " with GC prog")
725 throw("runtime: invalid typeBitsBulkBarrier")
726 }
727 if !writeBarrier.needed {
728 return
729 }
730 ptrmask := typ.gcdata
731 buf := &getg().m.p.ptr().wbBuf
732 var bits uint32
733 for i := uintptr(0); i < typ.ptrdata; i += goarch.PtrSize {
734 if i&(goarch.PtrSize*8-1) == 0 {
735 bits = uint32(*ptrmask)
736 ptrmask = addb(ptrmask, 1)
737 } else {
738 bits = bits >> 1
739 }
740 if bits&1 != 0 {
741 dstx := (*uintptr)(unsafe.Pointer(dst + i))
742 srcx := (*uintptr)(unsafe.Pointer(src + i))
743 if !buf.putFast(*dstx, *srcx) {
744 wbBufFlush(nil, 0)
745 }
746 }
747 }
748 }
749
750
751
752
753
754
755
756
757
758
759
760
761
762 func (h heapBits) initSpan(s *mspan) {
763
764 nw := (s.npages << _PageShift) / goarch.PtrSize
765 if nw%wordsPerBitmapByte != 0 {
766 throw("initSpan: unaligned length")
767 }
768 if h.shift != 0 {
769 throw("initSpan: unaligned base")
770 }
771 isPtrs := goarch.PtrSize == 8 && s.elemsize == goarch.PtrSize
772 for nw > 0 {
773 hNext, anw := h.forwardOrBoundary(nw)
774 nbyte := anw / wordsPerBitmapByte
775 if isPtrs {
776 bitp := h.bitp
777 for i := uintptr(0); i < nbyte; i++ {
778 *bitp = bitPointerAll | bitScanAll
779 bitp = add1(bitp)
780 }
781 } else {
782 memclrNoHeapPointers(unsafe.Pointer(h.bitp), nbyte)
783 }
784 h = hNext
785 nw -= anw
786 }
787 }
788
789
790
791 func (s *mspan) countAlloc() int {
792 count := 0
793 bytes := divRoundUp(s.nelems, 8)
794
795
796
797
798 for i := uintptr(0); i < bytes; i += 8 {
799
800
801
802
803 mrkBits := *(*uint64)(unsafe.Pointer(s.gcmarkBits.bytep(i)))
804 count += sys.OnesCount64(mrkBits)
805 }
806 return count
807 }
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832 func heapBitsSetType(x, size, dataSize uintptr, typ *_type) {
833 const doubleCheck = false
834
835 const (
836 mask1 = bitPointer | bitScan
837 mask2 = bitPointer | bitScan | mask1<<heapBitsShift
838 mask3 = bitPointer | bitScan | mask2<<heapBitsShift
839 )
840
841
842
843
844
845
846
847
848
849 if goarch.PtrSize == 8 && size == goarch.PtrSize {
850
851
852
853
854 if doubleCheck {
855 h := heapBitsForAddr(x)
856 if !h.isPointer() {
857 throw("heapBitsSetType: pointer bit missing")
858 }
859 if !h.morePointers() {
860 throw("heapBitsSetType: scan bit missing")
861 }
862 }
863 return
864 }
865
866 h := heapBitsForAddr(x)
867 ptrmask := typ.gcdata
868
869
870
871
872
873
874
875 if size == 2*goarch.PtrSize {
876 if typ.size == goarch.PtrSize {
877
878
879
880
881
882
883
884
885 if goarch.PtrSize == 4 && dataSize == goarch.PtrSize {
886
887
888 *h.bitp &^= (bitPointer | bitScan | (bitPointer|bitScan)<<heapBitsShift) << h.shift
889 *h.bitp |= (bitPointer | bitScan) << h.shift
890 } else {
891
892 *h.bitp |= (bitPointer | bitScan | (bitPointer|bitScan)<<heapBitsShift) << h.shift
893 }
894 return
895 }
896
897
898 if doubleCheck {
899 if typ.size != 2*goarch.PtrSize || typ.kind&kindGCProg != 0 {
900 print("runtime: heapBitsSetType size=", size, " but typ.size=", typ.size, " gcprog=", typ.kind&kindGCProg != 0, "\n")
901 throw("heapBitsSetType")
902 }
903 }
904 b := uint32(*ptrmask)
905 hb := b & 3
906 hb |= bitScanAll & ((bitScan << (typ.ptrdata / goarch.PtrSize)) - 1)
907
908
909 *h.bitp &^= (bitPointer | bitScan | ((bitPointer | bitScan) << heapBitsShift)) << h.shift
910 *h.bitp |= uint8(hb << h.shift)
911 return
912 } else if size == 3*goarch.PtrSize {
913 b := uint8(*ptrmask)
914 if doubleCheck {
915 if b == 0 {
916 println("runtime: invalid type ", typ.string())
917 throw("heapBitsSetType: called with non-pointer type")
918 }
919 if goarch.PtrSize != 8 {
920 throw("heapBitsSetType: unexpected 3 pointer wide size class on 32 bit")
921 }
922 if typ.kind&kindGCProg != 0 {
923 throw("heapBitsSetType: unexpected GC prog for 3 pointer wide size class")
924 }
925 if typ.size == 2*goarch.PtrSize {
926 print("runtime: heapBitsSetType size=", size, " but typ.size=", typ.size, "\n")
927 throw("heapBitsSetType: inconsistent object sizes")
928 }
929 }
930 if typ.size == goarch.PtrSize {
931
932
933 if doubleCheck && *typ.gcdata != 1 {
934 print("runtime: heapBitsSetType size=", size, " typ.size=", typ.size, "but *typ.gcdata", *typ.gcdata, "\n")
935 throw("heapBitsSetType: unexpected gcdata for 1 pointer wide type size in 3 pointer wide size class")
936 }
937
938 b = 7
939 }
940
941 hb := b & 7
942
943 hb |= hb << wordsPerBitmapByte
944
945 hb |= bitScan
946
947 hb |= hb & (bitScan << (2 * heapBitsShift)) >> 1
948
949
950
951 switch h.shift {
952 case 0:
953 *h.bitp &^= mask3 << 0
954 *h.bitp |= hb << 0
955 case 1:
956 *h.bitp &^= mask3 << 1
957 *h.bitp |= hb << 1
958 case 2:
959 *h.bitp &^= mask2 << 2
960 *h.bitp |= (hb & mask2) << 2
961
962
963 h = h.next().next()
964 *h.bitp &^= mask1
965 *h.bitp |= (hb >> 2) & mask1
966 case 3:
967 *h.bitp &^= mask1 << 3
968 *h.bitp |= (hb & mask1) << 3
969
970
971 h = h.next()
972 *h.bitp &^= mask2
973 *h.bitp |= (hb >> 1) & mask2
974 }
975 return
976 }
977
978
979
980
981
982
983
984
985 outOfPlace := false
986 if arenaIndex(x+size-1) != arenaIdx(h.arena) || (doubleCheck && fastrandn(2) == 0) {
987
988
989
990
991
992
993 outOfPlace = true
994 h.bitp = (*uint8)(unsafe.Pointer(x))
995 h.last = nil
996 }
997
998 var (
999
1000 p *byte
1001 b uintptr
1002 nb uintptr
1003 endp *byte
1004 endnb uintptr
1005 pbits uintptr
1006
1007
1008 w uintptr
1009 nw uintptr
1010 hbitp *byte
1011 hb uintptr
1012 )
1013
1014 hbitp = h.bitp
1015
1016
1017
1018
1019
1020 if typ.kind&kindGCProg != 0 {
1021 heapBitsSetTypeGCProg(h, typ.ptrdata, typ.size, dataSize, size, addb(typ.gcdata, 4))
1022 if doubleCheck {
1023
1024
1025
1026
1027
1028
1029
1030 lock(&debugPtrmask.lock)
1031 if debugPtrmask.data == nil {
1032 debugPtrmask.data = (*byte)(persistentalloc(1<<20, 1, &memstats.other_sys))
1033 }
1034 ptrmask = debugPtrmask.data
1035 runGCProg(addb(typ.gcdata, 4), nil, ptrmask, 1)
1036 }
1037 goto Phase4
1038 }
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071 p = ptrmask
1072 if typ.size < dataSize {
1073
1074
1075
1076 const maxBits = goarch.PtrSize*8 - 7
1077 if typ.ptrdata/goarch.PtrSize <= maxBits {
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088 nb = typ.ptrdata / goarch.PtrSize
1089 for i := uintptr(0); i < nb; i += 8 {
1090 b |= uintptr(*p) << i
1091 p = add1(p)
1092 }
1093 nb = typ.size / goarch.PtrSize
1094
1095
1096
1097
1098
1099
1100
1101 pbits = b
1102 endnb = nb
1103 if nb+nb <= maxBits {
1104 for endnb <= goarch.PtrSize*8 {
1105 pbits |= pbits << endnb
1106 endnb += endnb
1107 }
1108
1109
1110
1111 endnb = uintptr(maxBits/byte(nb)) * nb
1112 pbits &= 1<<endnb - 1
1113 b = pbits
1114 nb = endnb
1115 }
1116
1117
1118
1119 p = nil
1120 endp = nil
1121 } else {
1122
1123 n := (typ.ptrdata/goarch.PtrSize+7)/8 - 1
1124 endp = addb(ptrmask, n)
1125 endnb = typ.size/goarch.PtrSize - n*8
1126 }
1127 }
1128 if p != nil {
1129 b = uintptr(*p)
1130 p = add1(p)
1131 nb = 8
1132 }
1133
1134 if typ.size == dataSize {
1135
1136 nw = typ.ptrdata / goarch.PtrSize
1137 } else {
1138
1139
1140
1141 nw = ((dataSize/typ.size-1)*typ.size + typ.ptrdata) / goarch.PtrSize
1142 }
1143 if nw == 0 {
1144
1145 println("runtime: invalid type ", typ.string())
1146 throw("heapBitsSetType: called with non-pointer type")
1147 return
1148 }
1149
1150
1151
1152
1153
1154
1155 switch {
1156 default:
1157 throw("heapBitsSetType: unexpected shift")
1158
1159 case h.shift == 0:
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175 hb = b & bitPointerAll
1176 hb |= bitScanAll
1177 if w += 4; w >= nw {
1178 goto Phase3
1179 }
1180 *hbitp = uint8(hb)
1181 hbitp = add1(hbitp)
1182 b >>= 4
1183 nb -= 4
1184
1185 case h.shift == 2:
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202 hb = (b & (bitPointer | bitPointer<<heapBitsShift)) << (2 * heapBitsShift)
1203 hb |= bitScan << (2 * heapBitsShift)
1204 if nw > 1 {
1205 hb |= bitScan << (3 * heapBitsShift)
1206 }
1207 b >>= 2
1208 nb -= 2
1209 *hbitp &^= uint8((bitPointer | bitScan | ((bitPointer | bitScan) << heapBitsShift)) << (2 * heapBitsShift))
1210 *hbitp |= uint8(hb)
1211 hbitp = add1(hbitp)
1212 if w += 2; w >= nw {
1213
1214
1215
1216 hb = 0
1217 w += 4
1218 goto Phase3
1219 }
1220 }
1221
1222
1223
1224
1225
1226
1227
1228 nb -= 4
1229 for {
1230
1231
1232
1233
1234
1235 hb = b & bitPointerAll
1236 hb |= bitScanAll
1237 if w += 4; w >= nw {
1238 break
1239 }
1240 *hbitp = uint8(hb)
1241 hbitp = add1(hbitp)
1242 b >>= 4
1243
1244
1245 if p != endp {
1246
1247
1248
1249
1250 if nb < 8 {
1251 b |= uintptr(*p) << nb
1252 p = add1(p)
1253 } else {
1254
1255
1256
1257
1258 nb -= 8
1259 }
1260 } else if p == nil {
1261
1262
1263 if nb < 8 {
1264 b |= pbits << nb
1265 nb += endnb
1266 }
1267 nb -= 8
1268 } else {
1269
1270
1271 b |= uintptr(*p) << nb
1272 nb += endnb
1273 if nb < 8 {
1274 b |= uintptr(*ptrmask) << nb
1275 p = add1(ptrmask)
1276 } else {
1277 nb -= 8
1278 p = ptrmask
1279 }
1280 }
1281
1282
1283 hb = b & bitPointerAll
1284 hb |= bitScanAll
1285 if w += 4; w >= nw {
1286 break
1287 }
1288 *hbitp = uint8(hb)
1289 hbitp = add1(hbitp)
1290 b >>= 4
1291 }
1292
1293 Phase3:
1294
1295 if w > nw {
1296
1297
1298
1299 mask := uintptr(1)<<(4-(w-nw)) - 1
1300 hb &= mask | mask<<4
1301 }
1302
1303
1304 nw = size / goarch.PtrSize
1305
1306
1307
1308 if w <= nw {
1309 *hbitp = uint8(hb)
1310 hbitp = add1(hbitp)
1311 hb = 0
1312 for w += 4; w <= nw; w += 4 {
1313 *hbitp = 0
1314 hbitp = add1(hbitp)
1315 }
1316 }
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326 if w == nw+2 {
1327 *hbitp = *hbitp&^(bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift) | uint8(hb)
1328 }
1329
1330 Phase4:
1331
1332 if outOfPlace {
1333
1334
1335 h := heapBitsForAddr(x)
1336
1337
1338 cnw := size / goarch.PtrSize
1339 src := (*uint8)(unsafe.Pointer(x))
1340
1341
1342
1343
1344
1345
1346
1347 if doubleCheck {
1348 if !(h.shift == 0 || h.shift == 2) {
1349 print("x=", x, " size=", size, " cnw=", h.shift, "\n")
1350 throw("bad start shift")
1351 }
1352 }
1353 if h.shift == 2 {
1354 *h.bitp = *h.bitp&^((bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift)<<(2*heapBitsShift)) | *src
1355 h = h.next().next()
1356 cnw -= 2
1357 src = addb(src, 1)
1358 }
1359
1360
1361
1362 for cnw >= 4 {
1363
1364
1365 hNext, words := h.forwardOrBoundary(cnw / 4 * 4)
1366
1367
1368 n := words / 4
1369 memmove(unsafe.Pointer(h.bitp), unsafe.Pointer(src), n)
1370 cnw -= words
1371 h = hNext
1372 src = addb(src, n)
1373 }
1374 if doubleCheck && h.shift != 0 {
1375 print("cnw=", cnw, " h.shift=", h.shift, "\n")
1376 throw("bad shift after block copy")
1377 }
1378
1379 if cnw == 2 {
1380 *h.bitp = *h.bitp&^(bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift) | *src
1381 src = addb(src, 1)
1382 h = h.next().next()
1383 }
1384 if doubleCheck {
1385 if uintptr(unsafe.Pointer(src)) > x+size {
1386 throw("copy exceeded object size")
1387 }
1388 if !(cnw == 0 || cnw == 2) {
1389 print("x=", x, " size=", size, " cnw=", cnw, "\n")
1390 throw("bad number of remaining words")
1391 }
1392
1393 hbitp = h.bitp
1394 }
1395
1396 memclrNoHeapPointers(unsafe.Pointer(x), uintptr(unsafe.Pointer(src))-x)
1397 }
1398
1399
1400 if doubleCheck {
1401
1402
1403 end := heapBitsForAddr(x + size - goarch.PtrSize)
1404 if outOfPlace {
1405
1406
1407 end = end.next()
1408 } else {
1409
1410
1411
1412 end.shift += heapBitsShift
1413 if end.shift == 4*heapBitsShift {
1414 end.bitp, end.shift = add1(end.bitp), 0
1415 }
1416 }
1417 if typ.kind&kindGCProg == 0 && (hbitp != end.bitp || (w == nw+2) != (end.shift == 2)) {
1418 println("ended at wrong bitmap byte for", typ.string(), "x", dataSize/typ.size)
1419 print("typ.size=", typ.size, " typ.ptrdata=", typ.ptrdata, " dataSize=", dataSize, " size=", size, "\n")
1420 print("w=", w, " nw=", nw, " b=", hex(b), " nb=", nb, " hb=", hex(hb), "\n")
1421 h0 := heapBitsForAddr(x)
1422 print("initial bits h0.bitp=", h0.bitp, " h0.shift=", h0.shift, "\n")
1423 print("ended at hbitp=", hbitp, " but next starts at bitp=", end.bitp, " shift=", end.shift, "\n")
1424 throw("bad heapBitsSetType")
1425 }
1426
1427
1428
1429 h := heapBitsForAddr(x)
1430 nptr := typ.ptrdata / goarch.PtrSize
1431 ndata := typ.size / goarch.PtrSize
1432 count := dataSize / typ.size
1433 totalptr := ((count-1)*typ.size + typ.ptrdata) / goarch.PtrSize
1434 for i := uintptr(0); i < size/goarch.PtrSize; i++ {
1435 j := i % ndata
1436 var have, want uint8
1437 have = (*h.bitp >> h.shift) & (bitPointer | bitScan)
1438 if i >= totalptr {
1439 if typ.kind&kindGCProg != 0 && i < (totalptr+3)/4*4 {
1440
1441
1442 want = bitScan
1443 }
1444 } else {
1445 if j < nptr && (*addb(ptrmask, j/8)>>(j%8))&1 != 0 {
1446 want |= bitPointer
1447 }
1448 want |= bitScan
1449 }
1450 if have != want {
1451 println("mismatch writing bits for", typ.string(), "x", dataSize/typ.size)
1452 print("typ.size=", typ.size, " typ.ptrdata=", typ.ptrdata, " dataSize=", dataSize, " size=", size, "\n")
1453 print("kindGCProg=", typ.kind&kindGCProg != 0, " outOfPlace=", outOfPlace, "\n")
1454 print("w=", w, " nw=", nw, " b=", hex(b), " nb=", nb, " hb=", hex(hb), "\n")
1455 h0 := heapBitsForAddr(x)
1456 print("initial bits h0.bitp=", h0.bitp, " h0.shift=", h0.shift, "\n")
1457 print("current bits h.bitp=", h.bitp, " h.shift=", h.shift, " *h.bitp=", hex(*h.bitp), "\n")
1458 print("ptrmask=", ptrmask, " p=", p, " endp=", endp, " endnb=", endnb, " pbits=", hex(pbits), " b=", hex(b), " nb=", nb, "\n")
1459 println("at word", i, "offset", i*goarch.PtrSize, "have", hex(have), "want", hex(want))
1460 if typ.kind&kindGCProg != 0 {
1461 println("GC program:")
1462 dumpGCProg(addb(typ.gcdata, 4))
1463 }
1464 throw("bad heapBitsSetType")
1465 }
1466 h = h.next()
1467 }
1468 if ptrmask == debugPtrmask.data {
1469 unlock(&debugPtrmask.lock)
1470 }
1471 }
1472 }
1473
1474 var debugPtrmask struct {
1475 lock mutex
1476 data *byte
1477 }
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489 func heapBitsSetTypeGCProg(h heapBits, progSize, elemSize, dataSize, allocSize uintptr, prog *byte) {
1490 if goarch.PtrSize == 8 && allocSize%(4*goarch.PtrSize) != 0 {
1491
1492 throw("heapBitsSetTypeGCProg: small allocation")
1493 }
1494 var totalBits uintptr
1495 if elemSize == dataSize {
1496 totalBits = runGCProg(prog, nil, h.bitp, 2)
1497 if totalBits*goarch.PtrSize != progSize {
1498 println("runtime: heapBitsSetTypeGCProg: total bits", totalBits, "but progSize", progSize)
1499 throw("heapBitsSetTypeGCProg: unexpected bit count")
1500 }
1501 } else {
1502 count := dataSize / elemSize
1503
1504
1505
1506
1507
1508
1509
1510 var trailer [40]byte
1511 i := 0
1512 if n := elemSize/goarch.PtrSize - progSize/goarch.PtrSize; n > 0 {
1513
1514 trailer[i] = 0x01
1515 i++
1516 trailer[i] = 0
1517 i++
1518 if n > 1 {
1519
1520 trailer[i] = 0x81
1521 i++
1522 n--
1523 for ; n >= 0x80; n >>= 7 {
1524 trailer[i] = byte(n | 0x80)
1525 i++
1526 }
1527 trailer[i] = byte(n)
1528 i++
1529 }
1530 }
1531
1532 trailer[i] = 0x80
1533 i++
1534 n := elemSize / goarch.PtrSize
1535 for ; n >= 0x80; n >>= 7 {
1536 trailer[i] = byte(n | 0x80)
1537 i++
1538 }
1539 trailer[i] = byte(n)
1540 i++
1541 n = count - 1
1542 for ; n >= 0x80; n >>= 7 {
1543 trailer[i] = byte(n | 0x80)
1544 i++
1545 }
1546 trailer[i] = byte(n)
1547 i++
1548 trailer[i] = 0
1549 i++
1550
1551 runGCProg(prog, &trailer[0], h.bitp, 2)
1552
1553
1554
1555
1556
1557
1558 totalBits = (elemSize*(count-1) + progSize) / goarch.PtrSize
1559 }
1560 endProg := unsafe.Pointer(addb(h.bitp, (totalBits+3)/4))
1561 endAlloc := unsafe.Pointer(addb(h.bitp, allocSize/goarch.PtrSize/wordsPerBitmapByte))
1562 memclrNoHeapPointers(endProg, uintptr(endAlloc)-uintptr(endProg))
1563 }
1564
1565
1566
1567
1568 func progToPointerMask(prog *byte, size uintptr) bitvector {
1569 n := (size/goarch.PtrSize + 7) / 8
1570 x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
1571 x[len(x)-1] = 0xa1
1572 n = runGCProg(prog, nil, &x[0], 1)
1573 if x[len(x)-1] != 0xa1 {
1574 throw("progToPointerMask: overflow")
1575 }
1576 return bitvector{int32(n), &x[0]}
1577 }
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601 func runGCProg(prog, trailer, dst *byte, size int) uintptr {
1602 dstStart := dst
1603
1604
1605 var bits uintptr
1606 var nbits uintptr
1607
1608 p := prog
1609 Run:
1610 for {
1611
1612
1613 for ; nbits >= 8; nbits -= 8 {
1614 if size == 1 {
1615 *dst = uint8(bits)
1616 dst = add1(dst)
1617 bits >>= 8
1618 } else {
1619 v := bits&bitPointerAll | bitScanAll
1620 *dst = uint8(v)
1621 dst = add1(dst)
1622 bits >>= 4
1623 v = bits&bitPointerAll | bitScanAll
1624 *dst = uint8(v)
1625 dst = add1(dst)
1626 bits >>= 4
1627 }
1628 }
1629
1630
1631 inst := uintptr(*p)
1632 p = add1(p)
1633 n := inst & 0x7F
1634 if inst&0x80 == 0 {
1635
1636 if n == 0 {
1637
1638 if trailer != nil {
1639 p = trailer
1640 trailer = nil
1641 continue
1642 }
1643 break Run
1644 }
1645 nbyte := n / 8
1646 for i := uintptr(0); i < nbyte; i++ {
1647 bits |= uintptr(*p) << nbits
1648 p = add1(p)
1649 if size == 1 {
1650 *dst = uint8(bits)
1651 dst = add1(dst)
1652 bits >>= 8
1653 } else {
1654 v := bits&0xf | bitScanAll
1655 *dst = uint8(v)
1656 dst = add1(dst)
1657 bits >>= 4
1658 v = bits&0xf | bitScanAll
1659 *dst = uint8(v)
1660 dst = add1(dst)
1661 bits >>= 4
1662 }
1663 }
1664 if n %= 8; n > 0 {
1665 bits |= uintptr(*p) << nbits
1666 p = add1(p)
1667 nbits += n
1668 }
1669 continue Run
1670 }
1671
1672
1673 if n == 0 {
1674 for off := uint(0); ; off += 7 {
1675 x := uintptr(*p)
1676 p = add1(p)
1677 n |= (x & 0x7F) << off
1678 if x&0x80 == 0 {
1679 break
1680 }
1681 }
1682 }
1683
1684
1685 c := uintptr(0)
1686 for off := uint(0); ; off += 7 {
1687 x := uintptr(*p)
1688 p = add1(p)
1689 c |= (x & 0x7F) << off
1690 if x&0x80 == 0 {
1691 break
1692 }
1693 }
1694 c *= n
1695
1696
1697
1698
1699
1700
1701
1702
1703 src := dst
1704 const maxBits = goarch.PtrSize*8 - 7
1705 if n <= maxBits {
1706
1707 pattern := bits
1708 npattern := nbits
1709
1710
1711 if size == 1 {
1712 src = subtract1(src)
1713 for npattern < n {
1714 pattern <<= 8
1715 pattern |= uintptr(*src)
1716 src = subtract1(src)
1717 npattern += 8
1718 }
1719 } else {
1720 src = subtract1(src)
1721 for npattern < n {
1722 pattern <<= 4
1723 pattern |= uintptr(*src) & 0xf
1724 src = subtract1(src)
1725 npattern += 4
1726 }
1727 }
1728
1729
1730
1731
1732
1733 if npattern > n {
1734 pattern >>= npattern - n
1735 npattern = n
1736 }
1737
1738
1739 if npattern == 1 {
1740
1741
1742
1743
1744
1745
1746 if pattern == 1 {
1747 pattern = 1<<maxBits - 1
1748 npattern = maxBits
1749 } else {
1750 npattern = c
1751 }
1752 } else {
1753 b := pattern
1754 nb := npattern
1755 if nb+nb <= maxBits {
1756
1757 for nb <= goarch.PtrSize*8 {
1758 b |= b << nb
1759 nb += nb
1760 }
1761
1762
1763 nb = maxBits / npattern * npattern
1764 b &= 1<<nb - 1
1765 pattern = b
1766 npattern = nb
1767 }
1768 }
1769
1770
1771
1772
1773 for ; c >= npattern; c -= npattern {
1774 bits |= pattern << nbits
1775 nbits += npattern
1776 if size == 1 {
1777 for nbits >= 8 {
1778 *dst = uint8(bits)
1779 dst = add1(dst)
1780 bits >>= 8
1781 nbits -= 8
1782 }
1783 } else {
1784 for nbits >= 4 {
1785 *dst = uint8(bits&0xf | bitScanAll)
1786 dst = add1(dst)
1787 bits >>= 4
1788 nbits -= 4
1789 }
1790 }
1791 }
1792
1793
1794 if c > 0 {
1795 pattern &= 1<<c - 1
1796 bits |= pattern << nbits
1797 nbits += c
1798 }
1799 continue Run
1800 }
1801
1802
1803
1804
1805 off := n - nbits
1806 if size == 1 {
1807
1808 src = subtractb(src, (off+7)/8)
1809 if frag := off & 7; frag != 0 {
1810 bits |= uintptr(*src) >> (8 - frag) << nbits
1811 src = add1(src)
1812 nbits += frag
1813 c -= frag
1814 }
1815
1816
1817 for i := c / 8; i > 0; i-- {
1818 bits |= uintptr(*src) << nbits
1819 src = add1(src)
1820 *dst = uint8(bits)
1821 dst = add1(dst)
1822 bits >>= 8
1823 }
1824
1825 if c %= 8; c > 0 {
1826 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1827 nbits += c
1828 }
1829 } else {
1830
1831 src = subtractb(src, (off+3)/4)
1832 if frag := off & 3; frag != 0 {
1833 bits |= (uintptr(*src) & 0xf) >> (4 - frag) << nbits
1834 src = add1(src)
1835 nbits += frag
1836 c -= frag
1837 }
1838
1839
1840 for i := c / 4; i > 0; i-- {
1841 bits |= (uintptr(*src) & 0xf) << nbits
1842 src = add1(src)
1843 *dst = uint8(bits&0xf | bitScanAll)
1844 dst = add1(dst)
1845 bits >>= 4
1846 }
1847
1848 if c %= 4; c > 0 {
1849 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1850 nbits += c
1851 }
1852 }
1853 }
1854
1855
1856 var totalBits uintptr
1857 if size == 1 {
1858 totalBits = (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*8 + nbits
1859 nbits += -nbits & 7
1860 for ; nbits > 0; nbits -= 8 {
1861 *dst = uint8(bits)
1862 dst = add1(dst)
1863 bits >>= 8
1864 }
1865 } else {
1866 totalBits = (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*4 + nbits
1867 nbits += -nbits & 3
1868 for ; nbits > 0; nbits -= 4 {
1869 v := bits&0xf | bitScanAll
1870 *dst = uint8(v)
1871 dst = add1(dst)
1872 bits >>= 4
1873 }
1874 }
1875 return totalBits
1876 }
1877
1878
1879
1880
1881
1882
1883 func materializeGCProg(ptrdata uintptr, prog *byte) *mspan {
1884
1885 bitmapBytes := divRoundUp(ptrdata, 8*goarch.PtrSize)
1886
1887 pages := divRoundUp(bitmapBytes, pageSize)
1888 s := mheap_.allocManual(pages, spanAllocPtrScalarBits)
1889 runGCProg(addb(prog, 4), nil, (*byte)(unsafe.Pointer(s.startAddr)), 1)
1890 return s
1891 }
1892 func dematerializeGCProg(s *mspan) {
1893 mheap_.freeManual(s, spanAllocPtrScalarBits)
1894 }
1895
1896 func dumpGCProg(p *byte) {
1897 nptr := 0
1898 for {
1899 x := *p
1900 p = add1(p)
1901 if x == 0 {
1902 print("\t", nptr, " end\n")
1903 break
1904 }
1905 if x&0x80 == 0 {
1906 print("\t", nptr, " lit ", x, ":")
1907 n := int(x+7) / 8
1908 for i := 0; i < n; i++ {
1909 print(" ", hex(*p))
1910 p = add1(p)
1911 }
1912 print("\n")
1913 nptr += int(x)
1914 } else {
1915 nbit := int(x &^ 0x80)
1916 if nbit == 0 {
1917 for nb := uint(0); ; nb += 7 {
1918 x := *p
1919 p = add1(p)
1920 nbit |= int(x&0x7f) << nb
1921 if x&0x80 == 0 {
1922 break
1923 }
1924 }
1925 }
1926 count := 0
1927 for nb := uint(0); ; nb += 7 {
1928 x := *p
1929 p = add1(p)
1930 count |= int(x&0x7f) << nb
1931 if x&0x80 == 0 {
1932 break
1933 }
1934 }
1935 print("\t", nptr, " repeat ", nbit, " × ", count, "\n")
1936 nptr += nbit * count
1937 }
1938 }
1939 }
1940
1941
1942
1943 func getgcmaskcb(frame *stkframe, ctxt unsafe.Pointer) bool {
1944 target := (*stkframe)(ctxt)
1945 if frame.sp <= target.sp && target.sp < frame.varp {
1946 *target = *frame
1947 return false
1948 }
1949 return true
1950 }
1951
1952
1953
1954
1955 func reflect_gcbits(x any) []byte {
1956 ret := getgcmask(x)
1957 typ := (*ptrtype)(unsafe.Pointer(efaceOf(&x)._type)).elem
1958 nptr := typ.ptrdata / goarch.PtrSize
1959 for uintptr(len(ret)) > nptr && ret[len(ret)-1] == 0 {
1960 ret = ret[:len(ret)-1]
1961 }
1962 return ret
1963 }
1964
1965
1966
1967
1968 func getgcmask(ep any) (mask []byte) {
1969 e := *efaceOf(&ep)
1970 p := e.data
1971 t := e._type
1972
1973 for _, datap := range activeModules() {
1974
1975 if datap.data <= uintptr(p) && uintptr(p) < datap.edata {
1976 bitmap := datap.gcdatamask.bytedata
1977 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
1978 mask = make([]byte, n/goarch.PtrSize)
1979 for i := uintptr(0); i < n; i += goarch.PtrSize {
1980 off := (uintptr(p) + i - datap.data) / goarch.PtrSize
1981 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1982 }
1983 return
1984 }
1985
1986
1987 if datap.bss <= uintptr(p) && uintptr(p) < datap.ebss {
1988 bitmap := datap.gcbssmask.bytedata
1989 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
1990 mask = make([]byte, n/goarch.PtrSize)
1991 for i := uintptr(0); i < n; i += goarch.PtrSize {
1992 off := (uintptr(p) + i - datap.bss) / goarch.PtrSize
1993 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1994 }
1995 return
1996 }
1997 }
1998
1999
2000 if base, s, _ := findObject(uintptr(p), 0, 0); base != 0 {
2001 hbits := heapBitsForAddr(base)
2002 n := s.elemsize
2003 mask = make([]byte, n/goarch.PtrSize)
2004 for i := uintptr(0); i < n; i += goarch.PtrSize {
2005 if hbits.isPointer() {
2006 mask[i/goarch.PtrSize] = 1
2007 }
2008 if !hbits.morePointers() {
2009 mask = mask[:i/goarch.PtrSize]
2010 break
2011 }
2012 hbits = hbits.next()
2013 }
2014 return
2015 }
2016
2017
2018 if _g_ := getg(); _g_.m.curg.stack.lo <= uintptr(p) && uintptr(p) < _g_.m.curg.stack.hi {
2019 var frame stkframe
2020 frame.sp = uintptr(p)
2021 _g_ := getg()
2022 gentraceback(_g_.m.curg.sched.pc, _g_.m.curg.sched.sp, 0, _g_.m.curg, 0, nil, 1000, getgcmaskcb, noescape(unsafe.Pointer(&frame)), 0)
2023 if frame.fn.valid() {
2024 locals, _, _ := getStackMap(&frame, nil, false)
2025 if locals.n == 0 {
2026 return
2027 }
2028 size := uintptr(locals.n) * goarch.PtrSize
2029 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
2030 mask = make([]byte, n/goarch.PtrSize)
2031 for i := uintptr(0); i < n; i += goarch.PtrSize {
2032 off := (uintptr(p) + i - frame.varp + size) / goarch.PtrSize
2033 mask[i/goarch.PtrSize] = locals.ptrbit(off)
2034 }
2035 }
2036 return
2037 }
2038
2039
2040
2041
2042 return
2043 }
2044
View as plain text