miniconf/packed.rs
1use core::num::NonZero;
2
3use crate::{DescendError, Internal, IntoKeys, Key, KeyError, Keys, Schema, Transcode};
4
5/// A bit-packed representation of multiple indices.
6///
7/// Given known bit width of each index, the bits are
8/// concatenated above a marker bit.
9///
10/// The value consists of (from storage MSB to LSB):
11///
12/// * Zero or more groups of variable bit length, concatenated, each containing
13/// one index. The first is aligned with the storage MSB.
14/// * A set bit to mark the end of the used bits.
15/// * Zero or more cleared bits corresponding to unused index space.
16///
17/// [`Packed::EMPTY`] has the marker at the MSB.
18/// During [`Packed::push_lsb()`] the indices are inserted with their MSB
19/// where the marker was and the marker moves toward the storage LSB.
20/// During [`Packed::pop_msb()`] the indices are removed with their MSB
21/// aligned with the storage MSB and the remaining bits and the marker move
22/// toward the storage MSB.
23///
24/// The representation is MSB aligned to make `PartialOrd`/`Ord` more natural and stable.
25/// The `Packed` key `Ord` matches the ordering of nodes in a horizontal leaf tree
26/// traversal. New nodes can be added/removed to the tree without changing the implicit
27/// encoding (and ordering!) as long no new bits need to be allocated/deallocated (
28/// as long as the number of child nodes of an internal node does not cross a
29/// power-of-two boundary).
30/// Under this condition the mapping between indices/paths and `Packed` representation
31/// is stable even if child nodes are added/removed.
32///
33/// "Small numbers" in LSB-aligned representation can be obtained through
34/// [`Packed::into_lsb()`]/[`Packed::from_lsb()`] but don't have the ordering
35/// and stability properties.
36///
37/// `Packed` can be used to uniquely identify
38/// nodes in a `TreeSchema` using only a very small amount of bits.
39/// For many realistic `TreeSchema`s a `u16` or even a `u8` is sufficient
40/// to hold a `Packed` in LSB notation. Together with the
41/// `postcard` `serde` format, this then gives access to any node in a nested
42/// heterogeneous `Tree` with just a `u16` or `u8` as compact key and `[u8]` as
43/// compact value.
44///
45/// ```
46/// use miniconf::Packed;
47///
48/// let mut p = Packed::EMPTY;
49/// let mut p_lsb = 0b1; // marker
50/// for (bits, value) in [(2, 0b11), (1, 0b0), (0, 0b0), (3, 0b101)] {
51/// p.push_lsb(bits, value).unwrap();
52/// p_lsb <<= bits;
53/// p_lsb |= value;
54/// }
55/// assert_eq!(p_lsb, 0b1_11_0__101);
56/// // ^ marker
57/// assert_eq!(p, Packed::from_lsb(p_lsb.try_into().unwrap()));
58/// assert_eq!(p.get(), 0b11_0__101_1 << (Packed::CAPACITY - p.len()));
59/// // ^ marker
60/// ```
61#[derive(
62 Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, serde::Serialize, serde::Deserialize,
63)]
64#[repr(transparent)]
65#[serde(transparent)]
66pub struct Packed(pub NonZero<usize>);
67
68impl Default for Packed {
69 fn default() -> Self {
70 Self::EMPTY
71 }
72}
73
74const TWO: NonZero<usize> = NonZero::<usize>::MIN.saturating_add(1);
75
76impl Packed {
77 /// Number of bits in the representation including the marker bit
78 pub const BITS: u32 = NonZero::<usize>::BITS;
79
80 /// The total number of bits this representation can store.
81 pub const CAPACITY: u32 = Self::BITS - 1;
82
83 /// The empty value
84 pub const EMPTY: Self = Self(
85 // Slightly cumbersome to generate it with `const`
86 TWO.saturating_pow(Self::CAPACITY),
87 );
88
89 /// Create a new `Packed` from a `usize`.
90 ///
91 /// The value must not be zero.
92 pub const fn new(value: usize) -> Option<Self> {
93 match NonZero::new(value) {
94 Some(value) => Some(Self(value)),
95 None => None,
96 }
97 }
98
99 /// Create a new `Packed` from LSB aligned `usize`
100 ///
101 /// The value must not be zero.
102 pub const fn new_from_lsb(value: usize) -> Option<Self> {
103 match NonZero::new(value) {
104 Some(value) => Some(Self::from_lsb(value)),
105 None => None,
106 }
107 }
108
109 /// The primitive value
110 pub const fn get(&self) -> usize {
111 self.0.get()
112 }
113
114 /// The value is empty.
115 pub const fn is_empty(&self) -> bool {
116 matches!(*self, Self::EMPTY)
117 }
118
119 /// Number of bits stored.
120 pub const fn len(&self) -> u32 {
121 Self::CAPACITY - self.0.trailing_zeros()
122 }
123
124 /// Return the representation aligned to the LSB with the marker bit
125 /// moved from the LSB to the MSB.
126 pub const fn into_lsb(self) -> NonZero<usize> {
127 TWO.saturating_pow(self.len())
128 .saturating_add((self.get() >> 1) >> self.0.trailing_zeros())
129 }
130
131 /// Build a `Packed` from a LSB-aligned representation with the marker bit
132 /// moved from the MSB the LSB.
133 pub const fn from_lsb(value: NonZero<usize>) -> Self {
134 Self(
135 TWO.saturating_pow(value.leading_zeros())
136 .saturating_add((value.get() << 1) << value.leading_zeros()),
137 )
138 }
139
140 /// Return the number of bits required to represent `num`.
141 ///
142 /// Ensures that at least one bit is allocated.
143 pub const fn bits_for(num: usize) -> u32 {
144 match usize::BITS - num.leading_zeros() {
145 0 => 1,
146 v => v,
147 }
148 }
149
150 /// Remove the given number of MSBs and return them.
151 ///
152 /// If the value does not contain sufficient bits
153 /// it is left unchanged and `None` is returned.
154 ///
155 /// # Args
156 /// * `bits`: Number of bits to pop. `bits <= Self::CAPACITY`
157 pub fn pop_msb(&mut self, bits: u32) -> Option<usize> {
158 let s = self.get();
159 // Remove value from self
160 Self::new(s << bits).map(|new| {
161 *self = new;
162 // Extract value from old self
163 // Done in two steps as bits + 1 can be Self::BITS which would wrap.
164 (s >> (Self::CAPACITY - bits)) >> 1
165 })
166 }
167
168 /// Push the given number `bits` of `value` as new LSBs.
169 ///
170 /// Returns the remaining number of unused bits on success.
171 ///
172 /// # Args
173 /// * `bits`: Number of bits to push. `bits <= Self::CAPACITY`
174 /// * `value`: Value to push. `value >> bits == 0`
175 pub fn push_lsb(&mut self, bits: u32, value: usize) -> Option<u32> {
176 debug_assert_eq!(value >> bits, 0);
177 let mut n = self.0.trailing_zeros();
178 let old_marker = 1 << n;
179 Self::new(old_marker >> bits).map(|new_marker| {
180 n -= bits;
181 // * Remove old marker
182 // * Add value at offset n + 1
183 // Done in two steps as n + 1 can be Self::BITS, which would wrap.
184 // * Add new marker
185 self.0 = (self.get() ^ old_marker) | ((value << n) << 1) | new_marker.0;
186 n
187 })
188 }
189}
190
191impl core::fmt::Display for Packed {
192 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
193 self.0.fmt(f)
194 }
195}
196
197impl Keys for Packed {
198 fn next(&mut self, internal: &Internal) -> Result<usize, KeyError> {
199 let bits = Self::bits_for(internal.len().get() - 1);
200 let index = self.pop_msb(bits).ok_or(KeyError::TooShort)?;
201 index.find(internal).ok_or(KeyError::NotFound)
202 }
203
204 fn finalize(&mut self) -> Result<(), KeyError> {
205 if self.is_empty() {
206 Ok(())
207 } else {
208 Err(KeyError::TooLong)
209 }
210 }
211}
212
213impl IntoKeys for Packed {
214 type IntoKeys = Self;
215
216 fn into_keys(self) -> Self::IntoKeys {
217 self
218 }
219}
220
221impl Transcode for Packed {
222 type Error = ();
223
224 fn transcode(
225 &mut self,
226 schema: &Schema,
227 keys: impl IntoKeys,
228 ) -> Result<(), DescendError<Self::Error>> {
229 schema.descend(keys.into_keys(), |_meta, idx_schema| {
230 if let Some((index, internal)) = idx_schema {
231 let bits = Packed::bits_for(internal.len().get() - 1);
232 self.push_lsb(bits, index).ok_or(())?;
233 }
234 Ok(())
235 })
236 }
237}
238
239#[cfg(test)]
240mod test {
241 use super::*;
242
243 #[test]
244 fn test() {
245 // Check path encoding round trip.
246 let t = [1usize, 3, 4, 0, 1];
247 let mut p = Packed::EMPTY;
248 for t in t {
249 let bits = Packed::bits_for(t);
250 p.push_lsb(bits, t).unwrap();
251 }
252 for t in t {
253 let bits = Packed::bits_for(t);
254 assert_eq!(p.pop_msb(bits).unwrap(), t);
255 }
256 }
257}