diff options
Diffstat (limited to 'libbcachefs/eytzinger.h')
-rw-r--r-- | libbcachefs/eytzinger.h | 246 |
1 files changed, 170 insertions, 76 deletions
diff --git a/libbcachefs/eytzinger.h b/libbcachefs/eytzinger.h index 13d54e5e..dc23e44d 100644 --- a/libbcachefs/eytzinger.h +++ b/libbcachefs/eytzinger.h @@ -9,160 +9,162 @@ /* * Traversal for trees in eytzinger layout - a full binary tree layed out in an * array + */ + +/* + * One based indexing version: * - * We used one based indexing, not zero based: with one based indexing, each - * level of the tree starts at a power of two - leading to better alignment - - * and it's what you want for implementing next/prev and to/from inorder. - * - * To/from inorder also uses 1 based indexing. + * With one based indexing each level of the tree starts at a power of two - + * good for cacheline alignment: * * Size parameter is treated as if we were using 0 based indexing, however: - * valid nodes, and inorder indices, are in the range [1..size) + * valid nodes, and inorder indices, are in the range [1..size) - that is, there + * are actually size - 1 elements */ -static inline unsigned eytzinger_child(unsigned j, unsigned child) +static inline unsigned eytzinger1_child(unsigned i, unsigned child) { EBUG_ON(child > 1); - return (j << 1) + child; + return (i << 1) + child; } -static inline unsigned eytzinger_left_child(unsigned j) +static inline unsigned eytzinger1_left_child(unsigned i) { - return eytzinger_child(j, 0); + return eytzinger1_child(i, 0); } -static inline unsigned eytzinger_right_child(unsigned j) +static inline unsigned eytzinger1_right_child(unsigned i) { - return eytzinger_child(j, 1); + return eytzinger1_child(i, 1); } -static inline unsigned eytzinger_first(unsigned size) +static inline unsigned eytzinger1_first(unsigned size) { return rounddown_pow_of_two(size - 1); } -static inline unsigned eytzinger_last(unsigned size) +static inline unsigned eytzinger1_last(unsigned size) { return rounddown_pow_of_two(size) - 1; } /* - * eytzinger_next() and eytzinger_prev() have the nice properties that + * eytzinger1_next() and eytzinger1_prev() have the nice properties that * - * eytzinger_next(0) == eytzinger_first()) - * eytzinger_prev(0) == eytzinger_last()) + * eytzinger1_next(0) == eytzinger1_first()) + * eytzinger1_prev(0) == eytzinger1_last()) * - * eytzinger_prev(eytzinger_first()) == 0 - * eytzinger_next(eytzinger_last()) == 0 + * eytzinger1_prev(eytzinger1_first()) == 0 + * eytzinger1_next(eytzinger1_last()) == 0 */ -static inline unsigned eytzinger_next(unsigned j, unsigned size) +static inline unsigned eytzinger1_next(unsigned i, unsigned size) { - EBUG_ON(j >= size); + EBUG_ON(i >= size); - if (eytzinger_right_child(j) < size) { - j = eytzinger_right_child(j); + if (eytzinger1_right_child(i) < size) { + i = eytzinger1_right_child(i); - j <<= __fls(size) - __fls(j); - j >>= j >= size; + i <<= __fls(size) - __fls(i); + i >>= i >= size; } else { - j >>= ffz(j) + 1; + i >>= ffz(i) + 1; } - return j; + return i; } -static inline unsigned eytzinger_prev(unsigned j, unsigned size) +static inline unsigned eytzinger1_prev(unsigned i, unsigned size) { - EBUG_ON(j >= size); + EBUG_ON(i >= size); - if (eytzinger_left_child(j) < size) { - j = eytzinger_left_child(j); + if (eytzinger1_left_child(i) < size) { + i = eytzinger1_left_child(i); - j <<= __fls(size) - __fls(j); - j -= 1; - j >>= j >= size; + i <<= __fls(size) - __fls(i); + i -= 1; + i >>= i >= size; } else { - j >>= __ffs(j) + 1; + i >>= __ffs(i) + 1; } - return j; + return i; } -static inline unsigned eytzinger_extra(unsigned size) +static inline unsigned eytzinger1_extra(unsigned size) { return (size - rounddown_pow_of_two(size - 1)) << 1; } -static inline unsigned __eytzinger_to_inorder(unsigned j, unsigned size, +static inline unsigned __eytzinger1_to_inorder(unsigned i, unsigned size, unsigned extra) { - unsigned b = __fls(j); + unsigned b = __fls(i); unsigned shift = __fls(size - 1) - b; int s; - EBUG_ON(!j || j >= size); + EBUG_ON(!i || i >= size); - j ^= 1U << b; - j <<= 1; - j |= 1; - j <<= shift; + i ^= 1U << b; + i <<= 1; + i |= 1; + i <<= shift; /* * sign bit trick: * - * if (j > extra) - * j -= (j - extra) >> 1; + * if (i > extra) + * i -= (i - extra) >> 1; */ - s = extra - j; - j += (s >> 1) & (s >> 31); + s = extra - i; + i += (s >> 1) & (s >> 31); - return j; + return i; } -static inline unsigned __inorder_to_eytzinger(unsigned j, unsigned size, - unsigned extra) +static inline unsigned __inorder_to_eytzinger1(unsigned i, unsigned size, + unsigned extra) { unsigned shift; int s; - EBUG_ON(!j || j >= size); + EBUG_ON(!i || i >= size); /* * sign bit trick: * - * if (j > extra) - * j += j - extra; + * if (i > extra) + * i += i - extra; */ - s = extra - j; - j -= s & (s >> 31); + s = extra - i; + i -= s & (s >> 31); - shift = __ffs(j); + shift = __ffs(i); - j >>= shift + 1; - j |= 1U << (__fls(size - 1) - shift); + i >>= shift + 1; + i |= 1U << (__fls(size - 1) - shift); - return j; + return i; } -static inline unsigned eytzinger_to_inorder(unsigned j, unsigned size) +static inline unsigned eytzinger1_to_inorder(unsigned i, unsigned size) { - return __eytzinger_to_inorder(j, size, eytzinger_extra(size)); + return __eytzinger1_to_inorder(i, size, eytzinger1_extra(size)); } -static inline unsigned inorder_to_eytzinger(unsigned j, unsigned size) +static inline unsigned inorder_to_eytzinger1(unsigned i, unsigned size) { - return __inorder_to_eytzinger(j, size, eytzinger_extra(size)); + return __inorder_to_eytzinger1(i, size, eytzinger1_extra(size)); } -#define eytzinger_for_each(_i, _size) \ - for ((_i) = eytzinger_first((_size)); \ +#define eytzinger1_for_each(_i, _size) \ + for ((_i) = eytzinger1_first((_size)); \ (_i) != 0; \ - (_i) = eytzinger_next((_i), (_size))) + (_i) = eytzinger1_next((_i), (_size))) #if 0 -void eytzinger_test(void) +void eytzinger0_test(void) { unsigned i, j, size; @@ -172,20 +174,20 @@ void eytzinger_test(void) if (!(size % 4096)) printk(KERN_INFO "tree size %u\n", size); - assert(eytzinger_prev(0, size) == eytzinger_last(size)); - assert(eytzinger_next(0, size) == eytzinger_first(size)); + assert(eytzinger1_prev(0, size) == eytzinger1_last(size)); + assert(eytzinger1_next(0, size) == eytzinger1_first(size)); - assert(eytzinger_prev(eytzinger_first(size), size) == 0); - assert(eytzinger_next(eytzinger_last(size), size) == 0); + assert(eytzinger1_prev(eytzinger1_first(size), size) == 0); + assert(eytzinger1_next(eytzinger1_last(size), size) == 0); - eytzinger_for_each(j, size) { + eytzinger1_for_each(j, size) { assert(from_inorder(i, size) == j); assert(to_inorder(j, size) == i); - if (j != eytzinger_last(size)) { - unsigned next = eytzinger_next(j, size); + if (j != eytzinger1_last(size)) { + unsigned next = eytzinger1_next(j, size); - assert(eytzinger_prev(next, size) == j); + assert(eytzinger1_prev(next, size) == j); } } } @@ -193,4 +195,96 @@ void eytzinger_test(void) } #endif +/* Zero based indexing version: */ + +static inline unsigned eytzinger0_child(unsigned i, unsigned child) +{ + EBUG_ON(child > 1); + + return (i << 1) + 1 + child; +} + +static inline unsigned eytzinger0_left_child(unsigned i) +{ + return eytzinger0_child(i, 0); +} + +static inline unsigned eytzinger0_right_child(unsigned i) +{ + return eytzinger0_child(i, 1); +} + +#if 0 +static inline unsigned eytzinger0_first(unsigned size) +{ +} + +static inline unsigned eytzinger0_last(unsigned size) +{ +} + +static inline unsigned eytzinger0_next(unsigned i, unsigned size) +{ +} + +static inline unsigned eytzinger0_prev(unsigned i, unsigned size) +{ +} +#endif + +static inline unsigned eytzinger0_extra(unsigned size) +{ + return (size + 1 - rounddown_pow_of_two(size)) << 1; +} + +static inline unsigned __eytzinger0_to_inorder(unsigned i, unsigned size, + unsigned extra) +{ + return __eytzinger1_to_inorder(i + 1, size + 1, extra) - 1; +} + +static inline unsigned __inorder_to_eytzinger0(unsigned i, unsigned size, + unsigned extra) +{ + return __inorder_to_eytzinger1(i + 1, size + 1, extra) - 1; +} + +static inline unsigned eytzinger0_to_inorder(unsigned i, unsigned size) +{ + return __eytzinger0_to_inorder(i, size, eytzinger0_extra(size)); +} + +static inline unsigned inorder_to_eytzinger0(unsigned i, unsigned size) +{ + return __inorder_to_eytzinger0(i, size, eytzinger0_extra(size)); +} + +#define eytzinger0_find(base, _nr, _size, _cmp, _search) \ +({ \ + void *_base = base; \ + size_t _i = 0; \ + int _res; \ + \ + while (_i < (_nr) && \ + (_res = _cmp(_search, _base + _i * (_size), _size))) \ + _i = eytzinger0_child(_i, _res > 0); \ + \ + if (IS_ENABLED(CONFIG_BCACHEFS_DEBUG)) { \ + bool found1 = _i < _nr, found2 = false; \ + unsigned _j; \ + \ + for (_j = 0; _j < _nr; _j++) \ + if (!_cmp(_base + _j * (_size), _search, _size))\ + found2 = true; \ + \ + BUG_ON(found1 != found2); \ + } \ + \ + _i; \ +}) + +void eytzinger0_sort(void *, size_t, size_t, + int (*cmp_func)(const void *, const void *, size_t), + void (*swap_func)(void *, void *, size_t)); + #endif /* _EYTZINGER_H */ |