-
Notifications
You must be signed in to change notification settings - Fork 124
Expand file tree
/
Copy pathsizeclasstable.h
More file actions
135 lines (115 loc) · 4.5 KB
/
sizeclasstable.h
File metadata and controls
135 lines (115 loc) · 4.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#pragma once
#include "../ds/helpers.h"
#include "superslab.h"
namespace snmalloc
{
constexpr size_t PTR_BITS = bits::next_pow2_bits_const(sizeof(void*));
constexpr static SNMALLOC_PURE size_t sizeclass_lookup_index(const size_t s)
{
// We subtract and shirt to reduce the size of the table, i.e. we don't have
// to store a value for every size class.
// We could shift by MIN_ALLOC_BITS, as this would give us the most
// compressed table, but by shifting by PTR_BITS the code-gen is better
// as the most important path using this subsequently shifts left by
// PTR_BITS, hence they can be fused into a single mask.
return (s - 1) >> PTR_BITS;
}
constexpr static size_t sizeclass_lookup_size =
sizeclass_lookup_index(SLAB_SIZE + 1);
struct SizeClassTable
{
sizeclass_t sizeclass_lookup[sizeclass_lookup_size] = {{}};
ModArray<NUM_SIZECLASSES, size_t> size;
ModArray<NUM_SIZECLASSES, size_t> cache_friendly_mask;
ModArray<NUM_SIZECLASSES, size_t> inverse_cache_friendly_mask;
ModArray<NUM_SMALL_CLASSES, uint16_t> initial_offset_ptr;
ModArray<NUM_SMALL_CLASSES, uint16_t> short_initial_offset_ptr;
ModArray<NUM_MEDIUM_CLASSES, uint16_t> medium_slab_slots;
constexpr SizeClassTable()
: size(),
cache_friendly_mask(),
inverse_cache_friendly_mask(),
initial_offset_ptr(),
short_initial_offset_ptr(),
medium_slab_slots()
{
size_t curr = 1;
for (sizeclass_t sizeclass = 0; sizeclass < NUM_SIZECLASSES; sizeclass++)
{
size[sizeclass] =
bits::from_exp_mant<INTERMEDIATE_BITS, MIN_ALLOC_BITS>(sizeclass);
if (sizeclass < NUM_SMALL_CLASSES)
{
for (; curr <= size[sizeclass]; curr += 1 << PTR_BITS)
{
sizeclass_lookup[sizeclass_lookup_index(curr)] = sizeclass;
}
}
size_t alignment = bits::min(
bits::one_at_bit(bits::ctz_const(size[sizeclass])), OS_PAGE_SIZE);
cache_friendly_mask[sizeclass] = (alignment - 1);
inverse_cache_friendly_mask[sizeclass] = ~(alignment - 1);
}
size_t header_size = sizeof(Superslab);
size_t short_slab_size = SLAB_SIZE - header_size;
for (sizeclass_t i = 0; i < NUM_SMALL_CLASSES; i++)
{
// We align to the end of the block to remove special cases for the
// short block. Calculate remainders
size_t short_correction = short_slab_size % size[i];
size_t correction = SLAB_SIZE % size[i];
// First element in the block is the link
initial_offset_ptr[i] = static_cast<uint16_t>(correction);
short_initial_offset_ptr[i] =
static_cast<uint16_t>(header_size + short_correction);
}
for (sizeclass_t i = NUM_SMALL_CLASSES; i < NUM_SIZECLASSES; i++)
{
medium_slab_slots[i - NUM_SMALL_CLASSES] = static_cast<uint16_t>(
(SUPERSLAB_SIZE - Mediumslab::header_size()) / size[i]);
}
}
};
static constexpr SizeClassTable sizeclass_metadata = SizeClassTable();
static inline constexpr uint16_t
get_initial_offset(sizeclass_t sc, bool is_short)
{
if (is_short)
return sizeclass_metadata.short_initial_offset_ptr[sc];
return sizeclass_metadata.initial_offset_ptr[sc];
}
constexpr static inline size_t sizeclass_to_size(sizeclass_t sizeclass)
{
return sizeclass_metadata.size[sizeclass];
}
constexpr static inline size_t
sizeclass_to_cache_friendly_mask(sizeclass_t sizeclass)
{
return sizeclass_metadata.cache_friendly_mask[sizeclass];
}
constexpr static SNMALLOC_FAST_PATH size_t
sizeclass_to_inverse_cache_friendly_mask(sizeclass_t sizeclass)
{
return sizeclass_metadata.inverse_cache_friendly_mask[sizeclass];
}
template<bool ASSUME_SMALL>
static inline sizeclass_t size_to_sizeclass(size_t size)
{
if ((size - 1) <= (SLAB_SIZE - 1) || ASSUME_SMALL)
{
auto index = sizeclass_lookup_index(size);
SNMALLOC_ASSUME(index <= sizeclass_lookup_index(SLAB_SIZE));
return sizeclass_metadata.sizeclass_lookup[index];
}
// Don't use sizeclasses that are not a multiple of the alignment.
// For example, 24 byte allocations can be
// problematic for some data due to alignment issues.
return static_cast<sizeclass_t>(
bits::to_exp_mant<INTERMEDIATE_BITS, MIN_ALLOC_BITS>(size));
}
constexpr static inline uint16_t medium_slab_free(sizeclass_t sizeclass)
{
return sizeclass_metadata
.medium_slab_slots[(sizeclass - NUM_SMALL_CLASSES)];
}
} // namespace snmalloc