Source file
src/runtime/mbitmap.go
Documentation: runtime
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46 package runtime
47
48 import (
49 "runtime/internal/atomic"
50 "runtime/internal/sys"
51 "unsafe"
52 )
53
54 const (
55 bitPointer = 1 << 0
56 bitScan = 1 << 4
57
58 heapBitsShift = 1
59 wordsPerBitmapByte = 8 / 2
60
61
62 bitScanAll = bitScan | bitScan<<heapBitsShift | bitScan<<(2*heapBitsShift) | bitScan<<(3*heapBitsShift)
63 bitPointerAll = bitPointer | bitPointer<<heapBitsShift | bitPointer<<(2*heapBitsShift) | bitPointer<<(3*heapBitsShift)
64 )
65
66
67
68
69 func addb(p *byte, n uintptr) *byte {
70
71
72
73 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + n))
74 }
75
76
77
78
79 func subtractb(p *byte, n uintptr) *byte {
80
81
82
83 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - n))
84 }
85
86
87
88
89 func add1(p *byte) *byte {
90
91
92
93 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + 1))
94 }
95
96
97
98
99
100
101 func subtract1(p *byte) *byte {
102
103
104
105 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - 1))
106 }
107
108
109
110
111
112 type heapBits struct {
113 bitp *uint8
114 shift uint32
115 arena uint32
116 last *uint8
117 }
118
119
120
121 var _ = heapBits{arena: (1<<heapAddrBits)/heapArenaBytes - 1}
122
123
124
125
126
127
128
129
130
131
132 type markBits struct {
133 bytep *uint8
134 mask uint8
135 index uintptr
136 }
137
138
139 func (s *mspan) allocBitsForIndex(allocBitIndex uintptr) markBits {
140 bytep, mask := s.allocBits.bitp(allocBitIndex)
141 return markBits{bytep, mask, allocBitIndex}
142 }
143
144
145
146
147
148 func (s *mspan) refillAllocCache(whichByte uintptr) {
149 bytes := (*[8]uint8)(unsafe.Pointer(s.allocBits.bytep(whichByte)))
150 aCache := uint64(0)
151 aCache |= uint64(bytes[0])
152 aCache |= uint64(bytes[1]) << (1 * 8)
153 aCache |= uint64(bytes[2]) << (2 * 8)
154 aCache |= uint64(bytes[3]) << (3 * 8)
155 aCache |= uint64(bytes[4]) << (4 * 8)
156 aCache |= uint64(bytes[5]) << (5 * 8)
157 aCache |= uint64(bytes[6]) << (6 * 8)
158 aCache |= uint64(bytes[7]) << (7 * 8)
159 s.allocCache = ^aCache
160 }
161
162
163
164
165
166 func (s *mspan) nextFreeIndex() uintptr {
167 sfreeindex := s.freeindex
168 snelems := s.nelems
169 if sfreeindex == snelems {
170 return sfreeindex
171 }
172 if sfreeindex > snelems {
173 throw("s.freeindex > s.nelems")
174 }
175
176 aCache := s.allocCache
177
178 bitIndex := sys.Ctz64(aCache)
179 for bitIndex == 64 {
180
181 sfreeindex = (sfreeindex + 64) &^ (64 - 1)
182 if sfreeindex >= snelems {
183 s.freeindex = snelems
184 return snelems
185 }
186 whichByte := sfreeindex / 8
187
188 s.refillAllocCache(whichByte)
189 aCache = s.allocCache
190 bitIndex = sys.Ctz64(aCache)
191
192
193 }
194 result := sfreeindex + uintptr(bitIndex)
195 if result >= snelems {
196 s.freeindex = snelems
197 return snelems
198 }
199
200 s.allocCache >>= uint(bitIndex + 1)
201 sfreeindex = result + 1
202
203 if sfreeindex%64 == 0 && sfreeindex != snelems {
204
205
206
207
208
209 whichByte := sfreeindex / 8
210 s.refillAllocCache(whichByte)
211 }
212 s.freeindex = sfreeindex
213 return result
214 }
215
216
217
218
219
220
221 func (s *mspan) isFree(index uintptr) bool {
222 if index < s.freeindex {
223 return false
224 }
225 bytep, mask := s.allocBits.bitp(index)
226 return *bytep&mask == 0
227 }
228
229
230
231
232
233 func (s *mspan) divideByElemSize(n uintptr) uintptr {
234 const doubleCheck = false
235
236
237 q := uintptr((uint64(n) * uint64(s.divMul)) >> 32)
238
239 if doubleCheck && q != n/s.elemsize {
240 println(n, "/", s.elemsize, "should be", n/s.elemsize, "but got", q)
241 throw("bad magic division")
242 }
243 return q
244 }
245
246 func (s *mspan) objIndex(p uintptr) uintptr {
247 return s.divideByElemSize(p - s.base())
248 }
249
250 func markBitsForAddr(p uintptr) markBits {
251 s := spanOf(p)
252 objIndex := s.objIndex(p)
253 return s.markBitsForIndex(objIndex)
254 }
255
256 func (s *mspan) markBitsForIndex(objIndex uintptr) markBits {
257 bytep, mask := s.gcmarkBits.bitp(objIndex)
258 return markBits{bytep, mask, objIndex}
259 }
260
261 func (s *mspan) markBitsForBase() markBits {
262 return markBits{(*uint8)(s.gcmarkBits), uint8(1), 0}
263 }
264
265
266 func (m markBits) isMarked() bool {
267 return *m.bytep&m.mask != 0
268 }
269
270
271 func (m markBits) setMarked() {
272
273
274
275 atomic.Or8(m.bytep, m.mask)
276 }
277
278
279 func (m markBits) setMarkedNonAtomic() {
280 *m.bytep |= m.mask
281 }
282
283
284 func (m markBits) clearMarked() {
285
286
287
288 atomic.And8(m.bytep, ^m.mask)
289 }
290
291
292 func markBitsForSpan(base uintptr) (mbits markBits) {
293 mbits = markBitsForAddr(base)
294 if mbits.mask != 1 {
295 throw("markBitsForSpan: unaligned start")
296 }
297 return mbits
298 }
299
300
301 func (m *markBits) advance() {
302 if m.mask == 1<<7 {
303 m.bytep = (*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(m.bytep)) + 1))
304 m.mask = 1
305 } else {
306 m.mask = m.mask << 1
307 }
308 m.index++
309 }
310
311
312
313
314
315
316
317 func heapBitsForAddr(addr uintptr) (h heapBits) {
318
319 arena := arenaIndex(addr)
320 ha := mheap_.arenas[arena.l1()][arena.l2()]
321
322
323
324 if ha == nil {
325
326
327 return
328 }
329 h.bitp = &ha.bitmap[(addr/(sys.PtrSize*4))%heapArenaBitmapBytes]
330 h.shift = uint32((addr / sys.PtrSize) & 3)
331 h.arena = uint32(arena)
332 h.last = &ha.bitmap[len(ha.bitmap)-1]
333 return
334 }
335
336
337
338 const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32))
339
340
341 func badPointer(s *mspan, p, refBase, refOff uintptr) {
342
343
344
345
346
347
348
349
350 printlock()
351 print("runtime: pointer ", hex(p))
352 if s != nil {
353 state := s.state.get()
354 if state != mSpanInUse {
355 print(" to unallocated span")
356 } else {
357 print(" to unused region of span")
358 }
359 print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
360 }
361 print("\n")
362 if refBase != 0 {
363 print("runtime: found in object at *(", hex(refBase), "+", hex(refOff), ")\n")
364 gcDumpObject("object", refBase, refOff)
365 }
366 getg().m.traceback = 2
367 throw("found bad pointer in Go heap (incorrect use of unsafe or cgo?)")
368 }
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384 func findObject(p, refBase, refOff uintptr) (base uintptr, s *mspan, objIndex uintptr) {
385 s = spanOf(p)
386
387
388 if s == nil {
389 if GOARCH == "amd64" && p == clobberdeadPtr && debug.invalidptr != 0 {
390
391
392
393 badPointer(s, p, refBase, refOff)
394 }
395 return
396 }
397
398
399
400
401 if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
402
403 if state == mSpanManual {
404 return
405 }
406
407
408 if debug.invalidptr != 0 {
409 badPointer(s, p, refBase, refOff)
410 }
411 return
412 }
413
414 objIndex = s.objIndex(p)
415 base = s.base() + objIndex*s.elemsize
416 return
417 }
418
419
420
421
422
423
424
425 func (h heapBits) next() heapBits {
426 if h.shift < 3*heapBitsShift {
427 h.shift += heapBitsShift
428 } else if h.bitp != h.last {
429 h.bitp, h.shift = add1(h.bitp), 0
430 } else {
431
432 return h.nextArena()
433 }
434 return h
435 }
436
437
438
439
440
441
442
443
444
445
446 func (h heapBits) nextArena() heapBits {
447 h.arena++
448 ai := arenaIdx(h.arena)
449 l2 := mheap_.arenas[ai.l1()]
450 if l2 == nil {
451
452
453
454 return heapBits{}
455 }
456 ha := l2[ai.l2()]
457 if ha == nil {
458 return heapBits{}
459 }
460 h.bitp, h.shift = &ha.bitmap[0], 0
461 h.last = &ha.bitmap[len(ha.bitmap)-1]
462 return h
463 }
464
465
466
467
468
469
470
471 func (h heapBits) forward(n uintptr) heapBits {
472 n += uintptr(h.shift) / heapBitsShift
473 nbitp := uintptr(unsafe.Pointer(h.bitp)) + n/4
474 h.shift = uint32(n%4) * heapBitsShift
475 if nbitp <= uintptr(unsafe.Pointer(h.last)) {
476 h.bitp = (*uint8)(unsafe.Pointer(nbitp))
477 return h
478 }
479
480
481 past := nbitp - (uintptr(unsafe.Pointer(h.last)) + 1)
482 h.arena += 1 + uint32(past/heapArenaBitmapBytes)
483 ai := arenaIdx(h.arena)
484 if l2 := mheap_.arenas[ai.l1()]; l2 != nil && l2[ai.l2()] != nil {
485 a := l2[ai.l2()]
486 h.bitp = &a.bitmap[past%heapArenaBitmapBytes]
487 h.last = &a.bitmap[len(a.bitmap)-1]
488 } else {
489 h.bitp, h.last = nil, nil
490 }
491 return h
492 }
493
494
495
496
497 func (h heapBits) forwardOrBoundary(n uintptr) (heapBits, uintptr) {
498 maxn := 4 * ((uintptr(unsafe.Pointer(h.last)) + 1) - uintptr(unsafe.Pointer(h.bitp)))
499 if n > maxn {
500 n = maxn
501 }
502 return h.forward(n), n
503 }
504
505
506
507
508
509
510
511 func (h heapBits) bits() uint32 {
512
513
514 return uint32(*h.bitp) >> (h.shift & 31)
515 }
516
517
518
519
520 func (h heapBits) morePointers() bool {
521 return h.bits()&bitScan != 0
522 }
523
524
525
526
527
528 func (h heapBits) isPointer() bool {
529 return h.bits()&bitPointer != 0
530 }
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559 func bulkBarrierPreWrite(dst, src, size uintptr) {
560 if (dst|src|size)&(sys.PtrSize-1) != 0 {
561 throw("bulkBarrierPreWrite: unaligned arguments")
562 }
563 if !writeBarrier.needed {
564 return
565 }
566 if s := spanOf(dst); s == nil {
567
568
569 for _, datap := range activeModules() {
570 if datap.data <= dst && dst < datap.edata {
571 bulkBarrierBitmap(dst, src, size, dst-datap.data, datap.gcdatamask.bytedata)
572 return
573 }
574 }
575 for _, datap := range activeModules() {
576 if datap.bss <= dst && dst < datap.ebss {
577 bulkBarrierBitmap(dst, src, size, dst-datap.bss, datap.gcbssmask.bytedata)
578 return
579 }
580 }
581 return
582 } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
583
584
585
586
587
588
589 return
590 }
591
592 buf := &getg().m.p.ptr().wbBuf
593 h := heapBitsForAddr(dst)
594 if src == 0 {
595 for i := uintptr(0); i < size; i += sys.PtrSize {
596 if h.isPointer() {
597 dstx := (*uintptr)(unsafe.Pointer(dst + i))
598 if !buf.putFast(*dstx, 0) {
599 wbBufFlush(nil, 0)
600 }
601 }
602 h = h.next()
603 }
604 } else {
605 for i := uintptr(0); i < size; i += sys.PtrSize {
606 if h.isPointer() {
607 dstx := (*uintptr)(unsafe.Pointer(dst + i))
608 srcx := (*uintptr)(unsafe.Pointer(src + i))
609 if !buf.putFast(*dstx, *srcx) {
610 wbBufFlush(nil, 0)
611 }
612 }
613 h = h.next()
614 }
615 }
616 }
617
618
619
620
621
622
623
624
625
626
627 func bulkBarrierPreWriteSrcOnly(dst, src, size uintptr) {
628 if (dst|src|size)&(sys.PtrSize-1) != 0 {
629 throw("bulkBarrierPreWrite: unaligned arguments")
630 }
631 if !writeBarrier.needed {
632 return
633 }
634 buf := &getg().m.p.ptr().wbBuf
635 h := heapBitsForAddr(dst)
636 for i := uintptr(0); i < size; i += sys.PtrSize {
637 if h.isPointer() {
638 srcx := (*uintptr)(unsafe.Pointer(src + i))
639 if !buf.putFast(0, *srcx) {
640 wbBufFlush(nil, 0)
641 }
642 }
643 h = h.next()
644 }
645 }
646
647
648
649
650
651
652
653
654
655 func bulkBarrierBitmap(dst, src, size, maskOffset uintptr, bits *uint8) {
656 word := maskOffset / sys.PtrSize
657 bits = addb(bits, word/8)
658 mask := uint8(1) << (word % 8)
659
660 buf := &getg().m.p.ptr().wbBuf
661 for i := uintptr(0); i < size; i += sys.PtrSize {
662 if mask == 0 {
663 bits = addb(bits, 1)
664 if *bits == 0 {
665
666 i += 7 * sys.PtrSize
667 continue
668 }
669 mask = 1
670 }
671 if *bits&mask != 0 {
672 dstx := (*uintptr)(unsafe.Pointer(dst + i))
673 if src == 0 {
674 if !buf.putFast(*dstx, 0) {
675 wbBufFlush(nil, 0)
676 }
677 } else {
678 srcx := (*uintptr)(unsafe.Pointer(src + i))
679 if !buf.putFast(*dstx, *srcx) {
680 wbBufFlush(nil, 0)
681 }
682 }
683 }
684 mask <<= 1
685 }
686 }
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705 func typeBitsBulkBarrier(typ *_type, dst, src, size uintptr) {
706 if typ == nil {
707 throw("runtime: typeBitsBulkBarrier without type")
708 }
709 if typ.size != size {
710 println("runtime: typeBitsBulkBarrier with type ", typ.string(), " of size ", typ.size, " but memory size", size)
711 throw("runtime: invalid typeBitsBulkBarrier")
712 }
713 if typ.kind&kindGCProg != 0 {
714 println("runtime: typeBitsBulkBarrier with type ", typ.string(), " with GC prog")
715 throw("runtime: invalid typeBitsBulkBarrier")
716 }
717 if !writeBarrier.needed {
718 return
719 }
720 ptrmask := typ.gcdata
721 buf := &getg().m.p.ptr().wbBuf
722 var bits uint32
723 for i := uintptr(0); i < typ.ptrdata; i += sys.PtrSize {
724 if i&(sys.PtrSize*8-1) == 0 {
725 bits = uint32(*ptrmask)
726 ptrmask = addb(ptrmask, 1)
727 } else {
728 bits = bits >> 1
729 }
730 if bits&1 != 0 {
731 dstx := (*uintptr)(unsafe.Pointer(dst + i))
732 srcx := (*uintptr)(unsafe.Pointer(src + i))
733 if !buf.putFast(*dstx, *srcx) {
734 wbBufFlush(nil, 0)
735 }
736 }
737 }
738 }
739
740
741
742
743
744
745
746
747
748
749
750
751
752 func (h heapBits) initSpan(s *mspan) {
753
754 nw := (s.npages << _PageShift) / sys.PtrSize
755 if nw%wordsPerBitmapByte != 0 {
756 throw("initSpan: unaligned length")
757 }
758 if h.shift != 0 {
759 throw("initSpan: unaligned base")
760 }
761 isPtrs := sys.PtrSize == 8 && s.elemsize == sys.PtrSize
762 for nw > 0 {
763 hNext, anw := h.forwardOrBoundary(nw)
764 nbyte := anw / wordsPerBitmapByte
765 if isPtrs {
766 bitp := h.bitp
767 for i := uintptr(0); i < nbyte; i++ {
768 *bitp = bitPointerAll | bitScanAll
769 bitp = add1(bitp)
770 }
771 } else {
772 memclrNoHeapPointers(unsafe.Pointer(h.bitp), nbyte)
773 }
774 h = hNext
775 nw -= anw
776 }
777 }
778
779
780
781 func (s *mspan) countAlloc() int {
782 count := 0
783 bytes := divRoundUp(s.nelems, 8)
784
785
786
787
788 for i := uintptr(0); i < bytes; i += 8 {
789
790
791
792
793 mrkBits := *(*uint64)(unsafe.Pointer(s.gcmarkBits.bytep(i)))
794 count += sys.OnesCount64(mrkBits)
795 }
796 return count
797 }
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822 func heapBitsSetType(x, size, dataSize uintptr, typ *_type) {
823 const doubleCheck = false
824
825 const (
826 mask1 = bitPointer | bitScan
827 mask2 = bitPointer | bitScan | mask1<<heapBitsShift
828 mask3 = bitPointer | bitScan | mask2<<heapBitsShift
829 )
830
831
832
833
834
835
836
837
838
839 if sys.PtrSize == 8 && size == sys.PtrSize {
840
841
842
843
844 if doubleCheck {
845 h := heapBitsForAddr(x)
846 if !h.isPointer() {
847 throw("heapBitsSetType: pointer bit missing")
848 }
849 if !h.morePointers() {
850 throw("heapBitsSetType: scan bit missing")
851 }
852 }
853 return
854 }
855
856 h := heapBitsForAddr(x)
857 ptrmask := typ.gcdata
858
859
860
861
862
863
864
865 if size == 2*sys.PtrSize {
866 if typ.size == sys.PtrSize {
867
868
869
870
871
872
873
874
875 if sys.PtrSize == 4 && dataSize == sys.PtrSize {
876
877
878 *h.bitp &^= (bitPointer | bitScan | (bitPointer|bitScan)<<heapBitsShift) << h.shift
879 *h.bitp |= (bitPointer | bitScan) << h.shift
880 } else {
881
882 *h.bitp |= (bitPointer | bitScan | (bitPointer|bitScan)<<heapBitsShift) << h.shift
883 }
884 return
885 }
886
887
888 if doubleCheck {
889 if typ.size != 2*sys.PtrSize || typ.kind&kindGCProg != 0 {
890 print("runtime: heapBitsSetType size=", size, " but typ.size=", typ.size, " gcprog=", typ.kind&kindGCProg != 0, "\n")
891 throw("heapBitsSetType")
892 }
893 }
894 b := uint32(*ptrmask)
895 hb := b & 3
896 hb |= bitScanAll & ((bitScan << (typ.ptrdata / sys.PtrSize)) - 1)
897
898
899 *h.bitp &^= (bitPointer | bitScan | ((bitPointer | bitScan) << heapBitsShift)) << h.shift
900 *h.bitp |= uint8(hb << h.shift)
901 return
902 } else if size == 3*sys.PtrSize {
903 b := uint8(*ptrmask)
904 if doubleCheck {
905 if b == 0 {
906 println("runtime: invalid type ", typ.string())
907 throw("heapBitsSetType: called with non-pointer type")
908 }
909 if sys.PtrSize != 8 {
910 throw("heapBitsSetType: unexpected 3 pointer wide size class on 32 bit")
911 }
912 if typ.kind&kindGCProg != 0 {
913 throw("heapBitsSetType: unexpected GC prog for 3 pointer wide size class")
914 }
915 if typ.size == 2*sys.PtrSize {
916 print("runtime: heapBitsSetType size=", size, " but typ.size=", typ.size, "\n")
917 throw("heapBitsSetType: inconsistent object sizes")
918 }
919 }
920 if typ.size == sys.PtrSize {
921
922
923 if doubleCheck && *typ.gcdata != 1 {
924 print("runtime: heapBitsSetType size=", size, " typ.size=", typ.size, "but *typ.gcdata", *typ.gcdata, "\n")
925 throw("heapBitsSetType: unexpected gcdata for 1 pointer wide type size in 3 pointer wide size class")
926 }
927
928 b = 7
929 }
930
931 hb := b & 7
932
933 hb |= hb << wordsPerBitmapByte
934
935 hb |= bitScan
936
937 hb |= hb & (bitScan << (2 * heapBitsShift)) >> 1
938
939
940
941 switch h.shift {
942 case 0:
943 *h.bitp &^= mask3 << 0
944 *h.bitp |= hb << 0
945 case 1:
946 *h.bitp &^= mask3 << 1
947 *h.bitp |= hb << 1
948 case 2:
949 *h.bitp &^= mask2 << 2
950 *h.bitp |= (hb & mask2) << 2
951
952
953 h = h.next().next()
954 *h.bitp &^= mask1
955 *h.bitp |= (hb >> 2) & mask1
956 case 3:
957 *h.bitp &^= mask1 << 3
958 *h.bitp |= (hb & mask1) << 3
959
960
961 h = h.next()
962 *h.bitp &^= mask2
963 *h.bitp |= (hb >> 1) & mask2
964 }
965 return
966 }
967
968
969
970
971
972
973
974
975 outOfPlace := false
976 if arenaIndex(x+size-1) != arenaIdx(h.arena) || (doubleCheck && fastrand()%2 == 0) {
977
978
979
980
981
982
983 outOfPlace = true
984 h.bitp = (*uint8)(unsafe.Pointer(x))
985 h.last = nil
986 }
987
988 var (
989
990 p *byte
991 b uintptr
992 nb uintptr
993 endp *byte
994 endnb uintptr
995 pbits uintptr
996
997
998 w uintptr
999 nw uintptr
1000 hbitp *byte
1001 hb uintptr
1002 )
1003
1004 hbitp = h.bitp
1005
1006
1007
1008
1009
1010 if typ.kind&kindGCProg != 0 {
1011 heapBitsSetTypeGCProg(h, typ.ptrdata, typ.size, dataSize, size, addb(typ.gcdata, 4))
1012 if doubleCheck {
1013
1014
1015
1016
1017
1018
1019
1020 lock(&debugPtrmask.lock)
1021 if debugPtrmask.data == nil {
1022 debugPtrmask.data = (*byte)(persistentalloc(1<<20, 1, &memstats.other_sys))
1023 }
1024 ptrmask = debugPtrmask.data
1025 runGCProg(addb(typ.gcdata, 4), nil, ptrmask, 1)
1026 }
1027 goto Phase4
1028 }
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061 p = ptrmask
1062 if typ.size < dataSize {
1063
1064
1065
1066 const maxBits = sys.PtrSize*8 - 7
1067 if typ.ptrdata/sys.PtrSize <= maxBits {
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078 nb = typ.ptrdata / sys.PtrSize
1079 for i := uintptr(0); i < nb; i += 8 {
1080 b |= uintptr(*p) << i
1081 p = add1(p)
1082 }
1083 nb = typ.size / sys.PtrSize
1084
1085
1086
1087
1088
1089
1090
1091 pbits = b
1092 endnb = nb
1093 if nb+nb <= maxBits {
1094 for endnb <= sys.PtrSize*8 {
1095 pbits |= pbits << endnb
1096 endnb += endnb
1097 }
1098
1099
1100
1101 endnb = uintptr(maxBits/byte(nb)) * nb
1102 pbits &= 1<<endnb - 1
1103 b = pbits
1104 nb = endnb
1105 }
1106
1107
1108
1109 p = nil
1110 endp = nil
1111 } else {
1112
1113 n := (typ.ptrdata/sys.PtrSize+7)/8 - 1
1114 endp = addb(ptrmask, n)
1115 endnb = typ.size/sys.PtrSize - n*8
1116 }
1117 }
1118 if p != nil {
1119 b = uintptr(*p)
1120 p = add1(p)
1121 nb = 8
1122 }
1123
1124 if typ.size == dataSize {
1125
1126 nw = typ.ptrdata / sys.PtrSize
1127 } else {
1128
1129
1130
1131 nw = ((dataSize/typ.size-1)*typ.size + typ.ptrdata) / sys.PtrSize
1132 }
1133 if nw == 0 {
1134
1135 println("runtime: invalid type ", typ.string())
1136 throw("heapBitsSetType: called with non-pointer type")
1137 return
1138 }
1139
1140
1141
1142
1143
1144
1145 switch {
1146 default:
1147 throw("heapBitsSetType: unexpected shift")
1148
1149 case h.shift == 0:
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165 hb = b & bitPointerAll
1166 hb |= bitScanAll
1167 if w += 4; w >= nw {
1168 goto Phase3
1169 }
1170 *hbitp = uint8(hb)
1171 hbitp = add1(hbitp)
1172 b >>= 4
1173 nb -= 4
1174
1175 case h.shift == 2:
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192 hb = (b & (bitPointer | bitPointer<<heapBitsShift)) << (2 * heapBitsShift)
1193 hb |= bitScan << (2 * heapBitsShift)
1194 if nw > 1 {
1195 hb |= bitScan << (3 * heapBitsShift)
1196 }
1197 b >>= 2
1198 nb -= 2
1199 *hbitp &^= uint8((bitPointer | bitScan | ((bitPointer | bitScan) << heapBitsShift)) << (2 * heapBitsShift))
1200 *hbitp |= uint8(hb)
1201 hbitp = add1(hbitp)
1202 if w += 2; w >= nw {
1203
1204
1205
1206 hb = 0
1207 w += 4
1208 goto Phase3
1209 }
1210 }
1211
1212
1213
1214
1215
1216
1217
1218 nb -= 4
1219 for {
1220
1221
1222
1223
1224
1225 hb = b & bitPointerAll
1226 hb |= bitScanAll
1227 if w += 4; w >= nw {
1228 break
1229 }
1230 *hbitp = uint8(hb)
1231 hbitp = add1(hbitp)
1232 b >>= 4
1233
1234
1235 if p != endp {
1236
1237
1238
1239
1240 if nb < 8 {
1241 b |= uintptr(*p) << nb
1242 p = add1(p)
1243 } else {
1244
1245
1246
1247
1248 nb -= 8
1249 }
1250 } else if p == nil {
1251
1252
1253 if nb < 8 {
1254 b |= pbits << nb
1255 nb += endnb
1256 }
1257 nb -= 8
1258 } else {
1259
1260
1261 b |= uintptr(*p) << nb
1262 nb += endnb
1263 if nb < 8 {
1264 b |= uintptr(*ptrmask) << nb
1265 p = add1(ptrmask)
1266 } else {
1267 nb -= 8
1268 p = ptrmask
1269 }
1270 }
1271
1272
1273 hb = b & bitPointerAll
1274 hb |= bitScanAll
1275 if w += 4; w >= nw {
1276 break
1277 }
1278 *hbitp = uint8(hb)
1279 hbitp = add1(hbitp)
1280 b >>= 4
1281 }
1282
1283 Phase3:
1284
1285 if w > nw {
1286
1287
1288
1289 mask := uintptr(1)<<(4-(w-nw)) - 1
1290 hb &= mask | mask<<4
1291 }
1292
1293
1294 nw = size / sys.PtrSize
1295
1296
1297
1298 if w <= nw {
1299 *hbitp = uint8(hb)
1300 hbitp = add1(hbitp)
1301 hb = 0
1302 for w += 4; w <= nw; w += 4 {
1303 *hbitp = 0
1304 hbitp = add1(hbitp)
1305 }
1306 }
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316 if w == nw+2 {
1317 *hbitp = *hbitp&^(bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift) | uint8(hb)
1318 }
1319
1320 Phase4:
1321
1322 if outOfPlace {
1323
1324
1325 h := heapBitsForAddr(x)
1326
1327
1328 cnw := size / sys.PtrSize
1329 src := (*uint8)(unsafe.Pointer(x))
1330
1331
1332
1333
1334
1335
1336
1337 if doubleCheck {
1338 if !(h.shift == 0 || h.shift == 2) {
1339 print("x=", x, " size=", size, " cnw=", h.shift, "\n")
1340 throw("bad start shift")
1341 }
1342 }
1343 if h.shift == 2 {
1344 *h.bitp = *h.bitp&^((bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift)<<(2*heapBitsShift)) | *src
1345 h = h.next().next()
1346 cnw -= 2
1347 src = addb(src, 1)
1348 }
1349
1350
1351
1352 for cnw >= 4 {
1353
1354
1355 hNext, words := h.forwardOrBoundary(cnw / 4 * 4)
1356
1357
1358 n := words / 4
1359 memmove(unsafe.Pointer(h.bitp), unsafe.Pointer(src), n)
1360 cnw -= words
1361 h = hNext
1362 src = addb(src, n)
1363 }
1364 if doubleCheck && h.shift != 0 {
1365 print("cnw=", cnw, " h.shift=", h.shift, "\n")
1366 throw("bad shift after block copy")
1367 }
1368
1369 if cnw == 2 {
1370 *h.bitp = *h.bitp&^(bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift) | *src
1371 src = addb(src, 1)
1372 h = h.next().next()
1373 }
1374 if doubleCheck {
1375 if uintptr(unsafe.Pointer(src)) > x+size {
1376 throw("copy exceeded object size")
1377 }
1378 if !(cnw == 0 || cnw == 2) {
1379 print("x=", x, " size=", size, " cnw=", cnw, "\n")
1380 throw("bad number of remaining words")
1381 }
1382
1383 hbitp = h.bitp
1384 }
1385
1386 memclrNoHeapPointers(unsafe.Pointer(x), uintptr(unsafe.Pointer(src))-x)
1387 }
1388
1389
1390 if doubleCheck {
1391
1392
1393 end := heapBitsForAddr(x + size - sys.PtrSize)
1394 if outOfPlace {
1395
1396
1397 end = end.next()
1398 } else {
1399
1400
1401
1402 end.shift += heapBitsShift
1403 if end.shift == 4*heapBitsShift {
1404 end.bitp, end.shift = add1(end.bitp), 0
1405 }
1406 }
1407 if typ.kind&kindGCProg == 0 && (hbitp != end.bitp || (w == nw+2) != (end.shift == 2)) {
1408 println("ended at wrong bitmap byte for", typ.string(), "x", dataSize/typ.size)
1409 print("typ.size=", typ.size, " typ.ptrdata=", typ.ptrdata, " dataSize=", dataSize, " size=", size, "\n")
1410 print("w=", w, " nw=", nw, " b=", hex(b), " nb=", nb, " hb=", hex(hb), "\n")
1411 h0 := heapBitsForAddr(x)
1412 print("initial bits h0.bitp=", h0.bitp, " h0.shift=", h0.shift, "\n")
1413 print("ended at hbitp=", hbitp, " but next starts at bitp=", end.bitp, " shift=", end.shift, "\n")
1414 throw("bad heapBitsSetType")
1415 }
1416
1417
1418
1419 h := heapBitsForAddr(x)
1420 nptr := typ.ptrdata / sys.PtrSize
1421 ndata := typ.size / sys.PtrSize
1422 count := dataSize / typ.size
1423 totalptr := ((count-1)*typ.size + typ.ptrdata) / sys.PtrSize
1424 for i := uintptr(0); i < size/sys.PtrSize; i++ {
1425 j := i % ndata
1426 var have, want uint8
1427 have = (*h.bitp >> h.shift) & (bitPointer | bitScan)
1428 if i >= totalptr {
1429 if typ.kind&kindGCProg != 0 && i < (totalptr+3)/4*4 {
1430
1431
1432 want = bitScan
1433 }
1434 } else {
1435 if j < nptr && (*addb(ptrmask, j/8)>>(j%8))&1 != 0 {
1436 want |= bitPointer
1437 }
1438 want |= bitScan
1439 }
1440 if have != want {
1441 println("mismatch writing bits for", typ.string(), "x", dataSize/typ.size)
1442 print("typ.size=", typ.size, " typ.ptrdata=", typ.ptrdata, " dataSize=", dataSize, " size=", size, "\n")
1443 print("kindGCProg=", typ.kind&kindGCProg != 0, " outOfPlace=", outOfPlace, "\n")
1444 print("w=", w, " nw=", nw, " b=", hex(b), " nb=", nb, " hb=", hex(hb), "\n")
1445 h0 := heapBitsForAddr(x)
1446 print("initial bits h0.bitp=", h0.bitp, " h0.shift=", h0.shift, "\n")
1447 print("current bits h.bitp=", h.bitp, " h.shift=", h.shift, " *h.bitp=", hex(*h.bitp), "\n")
1448 print("ptrmask=", ptrmask, " p=", p, " endp=", endp, " endnb=", endnb, " pbits=", hex(pbits), " b=", hex(b), " nb=", nb, "\n")
1449 println("at word", i, "offset", i*sys.PtrSize, "have", hex(have), "want", hex(want))
1450 if typ.kind&kindGCProg != 0 {
1451 println("GC program:")
1452 dumpGCProg(addb(typ.gcdata, 4))
1453 }
1454 throw("bad heapBitsSetType")
1455 }
1456 h = h.next()
1457 }
1458 if ptrmask == debugPtrmask.data {
1459 unlock(&debugPtrmask.lock)
1460 }
1461 }
1462 }
1463
1464 var debugPtrmask struct {
1465 lock mutex
1466 data *byte
1467 }
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479 func heapBitsSetTypeGCProg(h heapBits, progSize, elemSize, dataSize, allocSize uintptr, prog *byte) {
1480 if sys.PtrSize == 8 && allocSize%(4*sys.PtrSize) != 0 {
1481
1482 throw("heapBitsSetTypeGCProg: small allocation")
1483 }
1484 var totalBits uintptr
1485 if elemSize == dataSize {
1486 totalBits = runGCProg(prog, nil, h.bitp, 2)
1487 if totalBits*sys.PtrSize != progSize {
1488 println("runtime: heapBitsSetTypeGCProg: total bits", totalBits, "but progSize", progSize)
1489 throw("heapBitsSetTypeGCProg: unexpected bit count")
1490 }
1491 } else {
1492 count := dataSize / elemSize
1493
1494
1495
1496
1497
1498
1499
1500 var trailer [40]byte
1501 i := 0
1502 if n := elemSize/sys.PtrSize - progSize/sys.PtrSize; n > 0 {
1503
1504 trailer[i] = 0x01
1505 i++
1506 trailer[i] = 0
1507 i++
1508 if n > 1 {
1509
1510 trailer[i] = 0x81
1511 i++
1512 n--
1513 for ; n >= 0x80; n >>= 7 {
1514 trailer[i] = byte(n | 0x80)
1515 i++
1516 }
1517 trailer[i] = byte(n)
1518 i++
1519 }
1520 }
1521
1522 trailer[i] = 0x80
1523 i++
1524 n := elemSize / sys.PtrSize
1525 for ; n >= 0x80; n >>= 7 {
1526 trailer[i] = byte(n | 0x80)
1527 i++
1528 }
1529 trailer[i] = byte(n)
1530 i++
1531 n = count - 1
1532 for ; n >= 0x80; n >>= 7 {
1533 trailer[i] = byte(n | 0x80)
1534 i++
1535 }
1536 trailer[i] = byte(n)
1537 i++
1538 trailer[i] = 0
1539 i++
1540
1541 runGCProg(prog, &trailer[0], h.bitp, 2)
1542
1543
1544
1545
1546
1547
1548 totalBits = (elemSize*(count-1) + progSize) / sys.PtrSize
1549 }
1550 endProg := unsafe.Pointer(addb(h.bitp, (totalBits+3)/4))
1551 endAlloc := unsafe.Pointer(addb(h.bitp, allocSize/sys.PtrSize/wordsPerBitmapByte))
1552 memclrNoHeapPointers(endProg, uintptr(endAlloc)-uintptr(endProg))
1553 }
1554
1555
1556
1557
1558 func progToPointerMask(prog *byte, size uintptr) bitvector {
1559 n := (size/sys.PtrSize + 7) / 8
1560 x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
1561 x[len(x)-1] = 0xa1
1562 n = runGCProg(prog, nil, &x[0], 1)
1563 if x[len(x)-1] != 0xa1 {
1564 throw("progToPointerMask: overflow")
1565 }
1566 return bitvector{int32(n), &x[0]}
1567 }
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591 func runGCProg(prog, trailer, dst *byte, size int) uintptr {
1592 dstStart := dst
1593
1594
1595 var bits uintptr
1596 var nbits uintptr
1597
1598 p := prog
1599 Run:
1600 for {
1601
1602
1603 for ; nbits >= 8; nbits -= 8 {
1604 if size == 1 {
1605 *dst = uint8(bits)
1606 dst = add1(dst)
1607 bits >>= 8
1608 } else {
1609 v := bits&bitPointerAll | bitScanAll
1610 *dst = uint8(v)
1611 dst = add1(dst)
1612 bits >>= 4
1613 v = bits&bitPointerAll | bitScanAll
1614 *dst = uint8(v)
1615 dst = add1(dst)
1616 bits >>= 4
1617 }
1618 }
1619
1620
1621 inst := uintptr(*p)
1622 p = add1(p)
1623 n := inst & 0x7F
1624 if inst&0x80 == 0 {
1625
1626 if n == 0 {
1627
1628 if trailer != nil {
1629 p = trailer
1630 trailer = nil
1631 continue
1632 }
1633 break Run
1634 }
1635 nbyte := n / 8
1636 for i := uintptr(0); i < nbyte; i++ {
1637 bits |= uintptr(*p) << nbits
1638 p = add1(p)
1639 if size == 1 {
1640 *dst = uint8(bits)
1641 dst = add1(dst)
1642 bits >>= 8
1643 } else {
1644 v := bits&0xf | bitScanAll
1645 *dst = uint8(v)
1646 dst = add1(dst)
1647 bits >>= 4
1648 v = bits&0xf | bitScanAll
1649 *dst = uint8(v)
1650 dst = add1(dst)
1651 bits >>= 4
1652 }
1653 }
1654 if n %= 8; n > 0 {
1655 bits |= uintptr(*p) << nbits
1656 p = add1(p)
1657 nbits += n
1658 }
1659 continue Run
1660 }
1661
1662
1663 if n == 0 {
1664 for off := uint(0); ; off += 7 {
1665 x := uintptr(*p)
1666 p = add1(p)
1667 n |= (x & 0x7F) << off
1668 if x&0x80 == 0 {
1669 break
1670 }
1671 }
1672 }
1673
1674
1675 c := uintptr(0)
1676 for off := uint(0); ; off += 7 {
1677 x := uintptr(*p)
1678 p = add1(p)
1679 c |= (x & 0x7F) << off
1680 if x&0x80 == 0 {
1681 break
1682 }
1683 }
1684 c *= n
1685
1686
1687
1688
1689
1690
1691
1692
1693 src := dst
1694 const maxBits = sys.PtrSize*8 - 7
1695 if n <= maxBits {
1696
1697 pattern := bits
1698 npattern := nbits
1699
1700
1701 if size == 1 {
1702 src = subtract1(src)
1703 for npattern < n {
1704 pattern <<= 8
1705 pattern |= uintptr(*src)
1706 src = subtract1(src)
1707 npattern += 8
1708 }
1709 } else {
1710 src = subtract1(src)
1711 for npattern < n {
1712 pattern <<= 4
1713 pattern |= uintptr(*src) & 0xf
1714 src = subtract1(src)
1715 npattern += 4
1716 }
1717 }
1718
1719
1720
1721
1722
1723 if npattern > n {
1724 pattern >>= npattern - n
1725 npattern = n
1726 }
1727
1728
1729 if npattern == 1 {
1730
1731
1732
1733
1734
1735
1736 if pattern == 1 {
1737 pattern = 1<<maxBits - 1
1738 npattern = maxBits
1739 } else {
1740 npattern = c
1741 }
1742 } else {
1743 b := pattern
1744 nb := npattern
1745 if nb+nb <= maxBits {
1746
1747 for nb <= sys.PtrSize*8 {
1748 b |= b << nb
1749 nb += nb
1750 }
1751
1752
1753 nb = maxBits / npattern * npattern
1754 b &= 1<<nb - 1
1755 pattern = b
1756 npattern = nb
1757 }
1758 }
1759
1760
1761
1762
1763 for ; c >= npattern; c -= npattern {
1764 bits |= pattern << nbits
1765 nbits += npattern
1766 if size == 1 {
1767 for nbits >= 8 {
1768 *dst = uint8(bits)
1769 dst = add1(dst)
1770 bits >>= 8
1771 nbits -= 8
1772 }
1773 } else {
1774 for nbits >= 4 {
1775 *dst = uint8(bits&0xf | bitScanAll)
1776 dst = add1(dst)
1777 bits >>= 4
1778 nbits -= 4
1779 }
1780 }
1781 }
1782
1783
1784 if c > 0 {
1785 pattern &= 1<<c - 1
1786 bits |= pattern << nbits
1787 nbits += c
1788 }
1789 continue Run
1790 }
1791
1792
1793
1794
1795 off := n - nbits
1796 if size == 1 {
1797
1798 src = subtractb(src, (off+7)/8)
1799 if frag := off & 7; frag != 0 {
1800 bits |= uintptr(*src) >> (8 - frag) << nbits
1801 src = add1(src)
1802 nbits += frag
1803 c -= frag
1804 }
1805
1806
1807 for i := c / 8; i > 0; i-- {
1808 bits |= uintptr(*src) << nbits
1809 src = add1(src)
1810 *dst = uint8(bits)
1811 dst = add1(dst)
1812 bits >>= 8
1813 }
1814
1815 if c %= 8; c > 0 {
1816 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1817 nbits += c
1818 }
1819 } else {
1820
1821 src = subtractb(src, (off+3)/4)
1822 if frag := off & 3; frag != 0 {
1823 bits |= (uintptr(*src) & 0xf) >> (4 - frag) << nbits
1824 src = add1(src)
1825 nbits += frag
1826 c -= frag
1827 }
1828
1829
1830 for i := c / 4; i > 0; i-- {
1831 bits |= (uintptr(*src) & 0xf) << nbits
1832 src = add1(src)
1833 *dst = uint8(bits&0xf | bitScanAll)
1834 dst = add1(dst)
1835 bits >>= 4
1836 }
1837
1838 if c %= 4; c > 0 {
1839 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1840 nbits += c
1841 }
1842 }
1843 }
1844
1845
1846 var totalBits uintptr
1847 if size == 1 {
1848 totalBits = (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*8 + nbits
1849 nbits += -nbits & 7
1850 for ; nbits > 0; nbits -= 8 {
1851 *dst = uint8(bits)
1852 dst = add1(dst)
1853 bits >>= 8
1854 }
1855 } else {
1856 totalBits = (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*4 + nbits
1857 nbits += -nbits & 3
1858 for ; nbits > 0; nbits -= 4 {
1859 v := bits&0xf | bitScanAll
1860 *dst = uint8(v)
1861 dst = add1(dst)
1862 bits >>= 4
1863 }
1864 }
1865 return totalBits
1866 }
1867
1868
1869
1870
1871
1872
1873 func materializeGCProg(ptrdata uintptr, prog *byte) *mspan {
1874
1875 bitmapBytes := divRoundUp(ptrdata, 8*sys.PtrSize)
1876
1877 pages := divRoundUp(bitmapBytes, pageSize)
1878 s := mheap_.allocManual(pages, spanAllocPtrScalarBits)
1879 runGCProg(addb(prog, 4), nil, (*byte)(unsafe.Pointer(s.startAddr)), 1)
1880 return s
1881 }
1882 func dematerializeGCProg(s *mspan) {
1883 mheap_.freeManual(s, spanAllocPtrScalarBits)
1884 }
1885
1886 func dumpGCProg(p *byte) {
1887 nptr := 0
1888 for {
1889 x := *p
1890 p = add1(p)
1891 if x == 0 {
1892 print("\t", nptr, " end\n")
1893 break
1894 }
1895 if x&0x80 == 0 {
1896 print("\t", nptr, " lit ", x, ":")
1897 n := int(x+7) / 8
1898 for i := 0; i < n; i++ {
1899 print(" ", hex(*p))
1900 p = add1(p)
1901 }
1902 print("\n")
1903 nptr += int(x)
1904 } else {
1905 nbit := int(x &^ 0x80)
1906 if nbit == 0 {
1907 for nb := uint(0); ; nb += 7 {
1908 x := *p
1909 p = add1(p)
1910 nbit |= int(x&0x7f) << nb
1911 if x&0x80 == 0 {
1912 break
1913 }
1914 }
1915 }
1916 count := 0
1917 for nb := uint(0); ; nb += 7 {
1918 x := *p
1919 p = add1(p)
1920 count |= int(x&0x7f) << nb
1921 if x&0x80 == 0 {
1922 break
1923 }
1924 }
1925 print("\t", nptr, " repeat ", nbit, " × ", count, "\n")
1926 nptr += nbit * count
1927 }
1928 }
1929 }
1930
1931
1932
1933 func getgcmaskcb(frame *stkframe, ctxt unsafe.Pointer) bool {
1934 target := (*stkframe)(ctxt)
1935 if frame.sp <= target.sp && target.sp < frame.varp {
1936 *target = *frame
1937 return false
1938 }
1939 return true
1940 }
1941
1942
1943
1944
1945 func reflect_gcbits(x interface{}) []byte {
1946 ret := getgcmask(x)
1947 typ := (*ptrtype)(unsafe.Pointer(efaceOf(&x)._type)).elem
1948 nptr := typ.ptrdata / sys.PtrSize
1949 for uintptr(len(ret)) > nptr && ret[len(ret)-1] == 0 {
1950 ret = ret[:len(ret)-1]
1951 }
1952 return ret
1953 }
1954
1955
1956
1957
1958 func getgcmask(ep interface{}) (mask []byte) {
1959 e := *efaceOf(&ep)
1960 p := e.data
1961 t := e._type
1962
1963 for _, datap := range activeModules() {
1964
1965 if datap.data <= uintptr(p) && uintptr(p) < datap.edata {
1966 bitmap := datap.gcdatamask.bytedata
1967 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
1968 mask = make([]byte, n/sys.PtrSize)
1969 for i := uintptr(0); i < n; i += sys.PtrSize {
1970 off := (uintptr(p) + i - datap.data) / sys.PtrSize
1971 mask[i/sys.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1972 }
1973 return
1974 }
1975
1976
1977 if datap.bss <= uintptr(p) && uintptr(p) < datap.ebss {
1978 bitmap := datap.gcbssmask.bytedata
1979 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
1980 mask = make([]byte, n/sys.PtrSize)
1981 for i := uintptr(0); i < n; i += sys.PtrSize {
1982 off := (uintptr(p) + i - datap.bss) / sys.PtrSize
1983 mask[i/sys.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1984 }
1985 return
1986 }
1987 }
1988
1989
1990 if base, s, _ := findObject(uintptr(p), 0, 0); base != 0 {
1991 hbits := heapBitsForAddr(base)
1992 n := s.elemsize
1993 mask = make([]byte, n/sys.PtrSize)
1994 for i := uintptr(0); i < n; i += sys.PtrSize {
1995 if hbits.isPointer() {
1996 mask[i/sys.PtrSize] = 1
1997 }
1998 if !hbits.morePointers() {
1999 mask = mask[:i/sys.PtrSize]
2000 break
2001 }
2002 hbits = hbits.next()
2003 }
2004 return
2005 }
2006
2007
2008 if _g_ := getg(); _g_.m.curg.stack.lo <= uintptr(p) && uintptr(p) < _g_.m.curg.stack.hi {
2009 var frame stkframe
2010 frame.sp = uintptr(p)
2011 _g_ := getg()
2012 gentraceback(_g_.m.curg.sched.pc, _g_.m.curg.sched.sp, 0, _g_.m.curg, 0, nil, 1000, getgcmaskcb, noescape(unsafe.Pointer(&frame)), 0)
2013 if frame.fn.valid() {
2014 locals, _, _ := getStackMap(&frame, nil, false)
2015 if locals.n == 0 {
2016 return
2017 }
2018 size := uintptr(locals.n) * sys.PtrSize
2019 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
2020 mask = make([]byte, n/sys.PtrSize)
2021 for i := uintptr(0); i < n; i += sys.PtrSize {
2022 off := (uintptr(p) + i - frame.varp + size) / sys.PtrSize
2023 mask[i/sys.PtrSize] = locals.ptrbit(off)
2024 }
2025 }
2026 return
2027 }
2028
2029
2030
2031
2032 return
2033 }
2034
View as plain text