Documentation: reflect
1
2
3
4
5 package reflect
6
7 import (
8 "internal/abi"
9 "internal/goexperiment"
10 "unsafe"
11 )
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31 var (
32 intArgRegs = abi.IntArgRegs * goexperiment.RegabiArgsInt
33 floatArgRegs = abi.FloatArgRegs * goexperiment.RegabiArgsInt
34 floatRegSize = uintptr(abi.EffectiveFloatRegSize * goexperiment.RegabiArgsInt)
35 )
36
37
38
39
40 type abiStep struct {
41 kind abiStepKind
42
43
44
45 offset uintptr
46 size uintptr
47
48
49 stkOff uintptr
50 ireg int
51 freg int
52 }
53
54
55 type abiStepKind int
56
57 const (
58 abiStepBad abiStepKind = iota
59 abiStepStack
60 abiStepIntReg
61 abiStepPointer
62 abiStepFloatReg
63 )
64
65
66
67
68
69
70 type abiSeq struct {
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85 steps []abiStep
86 valueStart []int
87
88 stackBytes uintptr
89 iregs, fregs int
90 }
91
92 func (a *abiSeq) dump() {
93 for i, p := range a.steps {
94 println("part", i, p.kind, p.offset, p.size, p.stkOff, p.ireg, p.freg)
95 }
96 print("values ")
97 for _, i := range a.valueStart {
98 print(i, " ")
99 }
100 println()
101 println("stack", a.stackBytes)
102 println("iregs", a.iregs)
103 println("fregs", a.fregs)
104 }
105
106
107
108
109 func (a *abiSeq) stepsForValue(i int) []abiStep {
110 s := a.valueStart[i]
111 var e int
112 if i == len(a.valueStart)-1 {
113 e = len(a.steps)
114 } else {
115 e = a.valueStart[i+1]
116 }
117 return a.steps[s:e]
118 }
119
120
121
122
123
124 func (a *abiSeq) addArg(t *rtype) *abiStep {
125
126 pStart := len(a.steps)
127 a.valueStart = append(a.valueStart, pStart)
128 if t.size == 0 {
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143 a.stackBytes = align(a.stackBytes, uintptr(t.align))
144 return nil
145 }
146
147
148 aOld := *a
149 if !a.regAssign(t, 0) {
150
151
152 *a = aOld
153 a.stackAssign(t.size, uintptr(t.align))
154 return &a.steps[len(a.steps)-1]
155 }
156 return nil
157 }
158
159
160
161
162
163
164
165 func (a *abiSeq) addRcvr(rcvr *rtype) (*abiStep, bool) {
166
167 a.valueStart = append(a.valueStart, len(a.steps))
168 var ok, ptr bool
169 if ifaceIndir(rcvr) || rcvr.pointers() {
170 ok = a.assignIntN(0, ptrSize, 1, 0b1)
171 ptr = true
172 } else {
173
174
175
176
177
178
179 ok = a.assignIntN(0, ptrSize, 1, 0b0)
180 ptr = false
181 }
182 if !ok {
183 a.stackAssign(ptrSize, ptrSize)
184 return &a.steps[len(a.steps)-1], ptr
185 }
186 return nil, ptr
187 }
188
189
190
191
192
193
194
195
196
197
198 func (a *abiSeq) regAssign(t *rtype, offset uintptr) bool {
199 switch t.Kind() {
200 case UnsafePointer, Ptr, Chan, Map, Func:
201 return a.assignIntN(offset, t.size, 1, 0b1)
202 case Bool, Int, Uint, Int8, Uint8, Int16, Uint16, Int32, Uint32, Uintptr:
203 return a.assignIntN(offset, t.size, 1, 0b0)
204 case Int64, Uint64:
205 switch ptrSize {
206 case 4:
207 return a.assignIntN(offset, 4, 2, 0b0)
208 case 8:
209 return a.assignIntN(offset, 8, 1, 0b0)
210 }
211 case Float32, Float64:
212 return a.assignFloatN(offset, t.size, 1)
213 case Complex64:
214 return a.assignFloatN(offset, 4, 2)
215 case Complex128:
216 return a.assignFloatN(offset, 8, 2)
217 case String:
218 return a.assignIntN(offset, ptrSize, 2, 0b01)
219 case Interface:
220 return a.assignIntN(offset, ptrSize, 2, 0b10)
221 case Slice:
222 return a.assignIntN(offset, ptrSize, 3, 0b001)
223 case Array:
224 tt := (*arrayType)(unsafe.Pointer(t))
225 switch tt.len {
226 case 0:
227
228
229
230 return true
231 case 1:
232 return a.regAssign(tt.elem, offset)
233 default:
234 return false
235 }
236 case Struct:
237 st := (*structType)(unsafe.Pointer(t))
238 for i := range st.fields {
239 f := &st.fields[i]
240 if !a.regAssign(f.typ, offset+f.offset()) {
241 return false
242 }
243 }
244 return true
245 default:
246 print("t.Kind == ", t.Kind(), "\n")
247 panic("unknown type kind")
248 }
249 panic("unhandled register assignment path")
250 }
251
252
253
254
255
256
257
258
259
260
261 func (a *abiSeq) assignIntN(offset, size uintptr, n int, ptrMap uint8) bool {
262 if n > 8 || n < 0 {
263 panic("invalid n")
264 }
265 if ptrMap != 0 && size != ptrSize {
266 panic("non-empty pointer map passed for non-pointer-size values")
267 }
268 if a.iregs+n > intArgRegs {
269 return false
270 }
271 for i := 0; i < n; i++ {
272 kind := abiStepIntReg
273 if ptrMap&(uint8(1)<<i) != 0 {
274 kind = abiStepPointer
275 }
276 a.steps = append(a.steps, abiStep{
277 kind: kind,
278 offset: offset + uintptr(i)*size,
279 size: size,
280 ireg: a.iregs,
281 })
282 a.iregs++
283 }
284 return true
285 }
286
287
288
289
290
291
292
293 func (a *abiSeq) assignFloatN(offset, size uintptr, n int) bool {
294 if n < 0 {
295 panic("invalid n")
296 }
297 if a.fregs+n > floatArgRegs || floatRegSize < size {
298 return false
299 }
300 for i := 0; i < n; i++ {
301 a.steps = append(a.steps, abiStep{
302 kind: abiStepFloatReg,
303 offset: offset + uintptr(i)*size,
304 size: size,
305 freg: a.fregs,
306 })
307 a.fregs++
308 }
309 return true
310 }
311
312
313
314
315
316 func (a *abiSeq) stackAssign(size, alignment uintptr) {
317 a.stackBytes = align(a.stackBytes, alignment)
318 a.steps = append(a.steps, abiStep{
319 kind: abiStepStack,
320 offset: 0,
321 size: size,
322 stkOff: a.stackBytes,
323 })
324 a.stackBytes += size
325 }
326
327
328 type abiDesc struct {
329
330
331 call, ret abiSeq
332
333
334
335
336
337
338
339
340 stackCallArgsSize, retOffset, spill uintptr
341
342
343
344
345
346
347 stackPtrs *bitVector
348
349
350
351
352
353
354
355
356
357 inRegPtrs, outRegPtrs abi.IntArgRegBitmap
358 }
359
360 func (a *abiDesc) dump() {
361 println("ABI")
362 println("call")
363 a.call.dump()
364 println("ret")
365 a.ret.dump()
366 println("stackCallArgsSize", a.stackCallArgsSize)
367 println("retOffset", a.retOffset)
368 println("spill", a.spill)
369 print("inRegPtrs:")
370 dumpPtrBitMap(a.inRegPtrs)
371 println()
372 print("outRegPtrs:")
373 dumpPtrBitMap(a.outRegPtrs)
374 println()
375 }
376
377 func dumpPtrBitMap(b abi.IntArgRegBitmap) {
378 for i := 0; i < intArgRegs; i++ {
379 x := 0
380 if b.Get(i) {
381 x = 1
382 }
383 print(" ", x)
384 }
385 }
386
387 func newAbiDesc(t *funcType, rcvr *rtype) abiDesc {
388
389
390
391
392
393
394
395
396 spill := uintptr(0)
397
398
399 stackPtrs := new(bitVector)
400
401
402
403 inRegPtrs := abi.IntArgRegBitmap{}
404
405
406 var in abiSeq
407 if rcvr != nil {
408 stkStep, isPtr := in.addRcvr(rcvr)
409 if stkStep != nil {
410 if isPtr {
411 stackPtrs.append(1)
412 } else {
413 stackPtrs.append(0)
414 }
415 } else {
416 spill += ptrSize
417 }
418 }
419 for i, arg := range t.in() {
420 stkStep := in.addArg(arg)
421 if stkStep != nil {
422 addTypeBits(stackPtrs, stkStep.stkOff, arg)
423 } else {
424 spill = align(spill, uintptr(arg.align))
425 spill += arg.size
426 for _, st := range in.stepsForValue(i) {
427 if st.kind == abiStepPointer {
428 inRegPtrs.Set(st.ireg)
429 }
430 }
431 }
432 }
433 spill = align(spill, ptrSize)
434
435
436
437 stackCallArgsSize := in.stackBytes
438 retOffset := align(in.stackBytes, ptrSize)
439
440
441
442 outRegPtrs := abi.IntArgRegBitmap{}
443
444
445 var out abiSeq
446
447
448
449
450
451 out.stackBytes = retOffset
452 for i, res := range t.out() {
453 stkStep := out.addArg(res)
454 if stkStep != nil {
455 addTypeBits(stackPtrs, stkStep.stkOff, res)
456 } else {
457 for _, st := range out.stepsForValue(i) {
458 if st.kind == abiStepPointer {
459 outRegPtrs.Set(st.ireg)
460 }
461 }
462 }
463 }
464
465
466 out.stackBytes -= retOffset
467 return abiDesc{in, out, stackCallArgsSize, retOffset, spill, stackPtrs, inRegPtrs, outRegPtrs}
468 }
469
View as plain text