|
1 | 1 | package fn |
2 | 2 |
|
3 | | -import "errors" |
| 3 | +import ( |
| 4 | + "errors" |
| 5 | + "reflect" |
| 6 | + "unsafe" |
| 7 | +) |
4 | 8 |
|
5 | 9 | var ( |
6 | 10 | // ErrNilPointerDeference is returned when a nil pointer is |
7 | 11 | // dereferenced. |
8 | 12 | ErrNilPointerDeference = errors.New("nil pointer dereference") |
9 | 13 | ) |
10 | 14 |
|
| 15 | +var ( |
| 16 | + // sliceHeaderSize is the size of a slice header. |
| 17 | + sliceHeaderSize = uint64(unsafe.Sizeof([]byte(nil))) |
| 18 | + |
| 19 | + // stringHeaderSize is the size of a string header. |
| 20 | + stringHeaderSize = uint64(unsafe.Sizeof("")) |
| 21 | +) |
| 22 | + |
11 | 23 | // Ptr returns the pointer of the given value. This is useful in instances |
12 | 24 | // where a function returns the value, but a pointer is wanted. Without this, |
13 | 25 | // then an intermediate variable is needed. |
@@ -68,3 +80,146 @@ func DerefPanic[T any](ptr *T) T { |
68 | 80 |
|
69 | 81 | return *ptr |
70 | 82 | } |
| 83 | + |
| 84 | +// LowerBoundByteSize returns a conservative deep-size estimate in bytes. |
| 85 | +// |
| 86 | +// Notes: |
| 87 | +// - Pointer-recursive and cycle safe; each heap allocation is counted once |
| 88 | +// using its data pointer. |
| 89 | +// - Lower bound: ignores allocator overhead, GC metadata, unused slice |
| 90 | +// capacity, map buckets/overflow, evacuation, rounding, and runtime |
| 91 | +// internals (chan/func). |
| 92 | +func LowerBoundByteSize(x any) uint64 { |
| 93 | + // seen is a map of heap object identities which have already been |
| 94 | + // counted. |
| 95 | + seen := make(map[uintptr]struct{}) |
| 96 | + return byteSizeVisit(reflect.ValueOf(x), true, seen) |
| 97 | +} |
| 98 | + |
| 99 | +// byteSizeVisit returns a conservative lower-bound byte count for `subject`. |
| 100 | +// |
| 101 | +// Notes: |
| 102 | +// - addSelf: include subject’s inline bytes when true. Parents pass false. |
| 103 | +// - seen: set of heap data pointers to avoid double counting and break |
| 104 | +// cycles. |
| 105 | +// |
| 106 | +// Lower bound: ignores allocator overhead, GC metadata, unused capacity, and |
| 107 | +// runtime internals. |
| 108 | +func byteSizeVisit(subject reflect.Value, addSelf bool, |
| 109 | + seen map[uintptr]struct{}) uint64 { |
| 110 | + |
| 111 | + if !subject.IsValid() { |
| 112 | + return 0 |
| 113 | + } |
| 114 | + |
| 115 | + subjectType := subject.Type() |
| 116 | + subjectTypeKind := subjectType.Kind() |
| 117 | + |
| 118 | + if subjectTypeKind == reflect.Interface { |
| 119 | + n := uint64(unsafe.Sizeof(subject.Interface())) |
| 120 | + if !subject.IsNil() { |
| 121 | + n += byteSizeVisit(subject.Elem(), true, seen) |
| 122 | + } |
| 123 | + return n |
| 124 | + } |
| 125 | + |
| 126 | + switch subjectTypeKind { |
| 127 | + case reflect.Ptr: |
| 128 | + if subject.IsNil() { |
| 129 | + return 0 |
| 130 | + } |
| 131 | + |
| 132 | + ptr := subject.Pointer() |
| 133 | + if markSeen(ptr, seen) { |
| 134 | + return 0 |
| 135 | + } |
| 136 | + |
| 137 | + return byteSizeVisit(subject.Elem(), true, seen) |
| 138 | + |
| 139 | + case reflect.Struct: |
| 140 | + n := uint64(0) |
| 141 | + if addSelf { |
| 142 | + n += uint64(subjectType.Size()) |
| 143 | + } |
| 144 | + |
| 145 | + for i := 0; i < subject.NumField(); i++ { |
| 146 | + n += byteSizeVisit(subject.Field(i), false, seen) |
| 147 | + } |
| 148 | + |
| 149 | + return n |
| 150 | + |
| 151 | + case reflect.Array: |
| 152 | + n := uint64(0) |
| 153 | + if addSelf { |
| 154 | + n += uint64(subjectType.Size()) |
| 155 | + } |
| 156 | + |
| 157 | + for i := 0; i < subject.Len(); i++ { |
| 158 | + n += byteSizeVisit(subject.Index(i), false, seen) |
| 159 | + } |
| 160 | + |
| 161 | + return n |
| 162 | + |
| 163 | + case reflect.Slice: |
| 164 | + if subject.IsNil() { |
| 165 | + return 0 |
| 166 | + } |
| 167 | + |
| 168 | + n := sliceHeaderSize |
| 169 | + dataPtr := subject.Pointer() |
| 170 | + if dataPtr != 0 && !markSeen(dataPtr, seen) { |
| 171 | + elem := subjectType.Elem() |
| 172 | + n += uint64(subject.Len()) * uint64(elem.Size()) |
| 173 | + } |
| 174 | + |
| 175 | + for i := 0; i < subject.Len(); i++ { |
| 176 | + n += byteSizeVisit(subject.Index(i), false, seen) |
| 177 | + } |
| 178 | + |
| 179 | + return n |
| 180 | + |
| 181 | + case reflect.String: |
| 182 | + n := stringHeaderSize |
| 183 | + dataPtr := subject.Pointer() |
| 184 | + if dataPtr != 0 && markSeen(dataPtr, seen) { |
| 185 | + return n |
| 186 | + } |
| 187 | + |
| 188 | + return n + uint64(subject.Len()) |
| 189 | + |
| 190 | + case reflect.Map: |
| 191 | + n := uint64(unsafe.Sizeof(subject.Interface())) |
| 192 | + if subject.IsNil() { |
| 193 | + return n |
| 194 | + } |
| 195 | + |
| 196 | + it := subject.MapRange() |
| 197 | + for it.Next() { |
| 198 | + n += byteSizeVisit(it.Key(), false, seen) |
| 199 | + n += byteSizeVisit(it.Value(), false, seen) |
| 200 | + } |
| 201 | + |
| 202 | + return n |
| 203 | + |
| 204 | + case reflect.Chan, reflect.Func, reflect.UnsafePointer: |
| 205 | + return uint64(unsafe.Sizeof(subject.Interface())) |
| 206 | + |
| 207 | + default: |
| 208 | + if addSelf { |
| 209 | + return uint64(subjectType.Size()) |
| 210 | + } |
| 211 | + |
| 212 | + return 0 |
| 213 | + } |
| 214 | +} |
| 215 | + |
| 216 | +// markSeen marks the given pointer as seen and returns true if it was already |
| 217 | +// seen. |
| 218 | +func markSeen(ptr uintptr, seen map[uintptr]struct{}) bool { |
| 219 | + if _, ok := seen[ptr]; ok { |
| 220 | + return true |
| 221 | + } |
| 222 | + |
| 223 | + seen[ptr] = struct{}{} |
| 224 | + return false |
| 225 | +} |
0 commit comments