subspace_proof_of_space/chiapos/
tables.rs

1#[cfg(test)]
2mod tests;
3
4#[cfg(not(feature = "std"))]
5extern crate alloc;
6
7use crate::chiapos::table::types::{Metadata, Position, X, Y};
8pub use crate::chiapos::table::TablesCache;
9use crate::chiapos::table::{
10    compute_f1, compute_fn, has_match, metadata_size_bytes, partial_y, Table,
11    COMPUTE_F1_SIMD_FACTOR,
12};
13use crate::chiapos::utils::EvaluatableUsize;
14use crate::chiapos::{Challenge, Quality, Seed};
15#[cfg(not(feature = "std"))]
16use alloc::vec::Vec;
17use core::mem;
18use sha2::{Digest, Sha256};
19
20/// Pick position in `table_number` based on challenge bits
21const fn pick_position(
22    [left_position, right_position]: [Position; 2],
23    last_5_challenge_bits: u8,
24    table_number: u8,
25) -> Position {
26    if ((last_5_challenge_bits >> (table_number - 2)) & 1) == 0 {
27        left_position
28    } else {
29        right_position
30    }
31}
32
33/// Collection of Chia tables
34#[derive(Debug)]
35pub(super) struct TablesGeneric<const K: u8>
36where
37    EvaluatableUsize<{ metadata_size_bytes(K, 1) }>: Sized,
38    EvaluatableUsize<{ metadata_size_bytes(K, 2) }>: Sized,
39    EvaluatableUsize<{ metadata_size_bytes(K, 3) }>: Sized,
40    EvaluatableUsize<{ metadata_size_bytes(K, 4) }>: Sized,
41    EvaluatableUsize<{ metadata_size_bytes(K, 5) }>: Sized,
42    EvaluatableUsize<{ metadata_size_bytes(K, 6) }>: Sized,
43    EvaluatableUsize<{ metadata_size_bytes(K, 7) }>: Sized,
44{
45    table_1: Table<K, 1>,
46    table_2: Table<K, 2>,
47    table_3: Table<K, 3>,
48    table_4: Table<K, 4>,
49    table_5: Table<K, 5>,
50    table_6: Table<K, 6>,
51    table_7: Table<K, 7>,
52}
53
54impl<const K: u8> TablesGeneric<K>
55where
56    EvaluatableUsize<{ metadata_size_bytes(K, 1) }>: Sized,
57    EvaluatableUsize<{ metadata_size_bytes(K, 2) }>: Sized,
58    EvaluatableUsize<{ metadata_size_bytes(K, 3) }>: Sized,
59    EvaluatableUsize<{ metadata_size_bytes(K, 4) }>: Sized,
60    EvaluatableUsize<{ metadata_size_bytes(K, 5) }>: Sized,
61    EvaluatableUsize<{ metadata_size_bytes(K, 6) }>: Sized,
62    EvaluatableUsize<{ metadata_size_bytes(K, 7) }>: Sized,
63    EvaluatableUsize<{ K as usize * COMPUTE_F1_SIMD_FACTOR / u8::BITS as usize }>: Sized,
64    EvaluatableUsize<{ 64 * K as usize / 8 }>: Sized,
65{
66    /// Create Chia proof of space tables. There also exists [`Self::create_parallel()`] that trades
67    /// CPU efficiency and memory usage for lower latency.
68    pub(super) fn create(seed: Seed, cache: &mut TablesCache<K>) -> Self {
69        let table_1 = Table::<K, 1>::create(seed);
70        let table_2 = Table::<K, 2>::create(&table_1, cache);
71        let table_3 = Table::<K, 3>::create(&table_2, cache);
72        let table_4 = Table::<K, 4>::create(&table_3, cache);
73        let table_5 = Table::<K, 5>::create(&table_4, cache);
74        let table_6 = Table::<K, 6>::create(&table_5, cache);
75        let table_7 = Table::<K, 7>::create(&table_6, cache);
76
77        Self {
78            table_1,
79            table_2,
80            table_3,
81            table_4,
82            table_5,
83            table_6,
84            table_7,
85        }
86    }
87
88    /// Almost the same as [`Self::create()`], but uses parallelism internally for better
89    /// performance (though not efficiency of CPU and memory usage), if you create multiple tables
90    /// in parallel, prefer [`Self::create()`] for better overall performance.
91    #[cfg(any(feature = "parallel", test))]
92    pub(super) fn create_parallel(seed: Seed, cache: &mut TablesCache<K>) -> Self {
93        let table_1 = Table::<K, 1>::create_parallel(seed);
94        let table_2 = Table::<K, 2>::create_parallel(&table_1, cache);
95        let table_3 = Table::<K, 3>::create_parallel(&table_2, cache);
96        let table_4 = Table::<K, 4>::create_parallel(&table_3, cache);
97        let table_5 = Table::<K, 5>::create_parallel(&table_4, cache);
98        let table_6 = Table::<K, 6>::create_parallel(&table_5, cache);
99        let table_7 = Table::<K, 7>::create_parallel(&table_6, cache);
100
101        Self {
102            table_1,
103            table_2,
104            table_3,
105            table_4,
106            table_5,
107            table_6,
108            table_7,
109        }
110    }
111
112    /// Find proof of space quality for given challenge.
113    pub(super) fn find_quality<'a>(
114        &'a self,
115        challenge: &'a Challenge,
116    ) -> impl Iterator<Item = Quality> + 'a {
117        let last_5_challenge_bits = challenge[challenge.len() - 1] & 0b00011111;
118
119        let ys = self.table_7.ys();
120        // We take advantage of the fact that entries are sorted by `y` (as big-endian numbers) to
121        // quickly seek to desired offset
122        let first_k_challenge_bits = u32::from_be_bytes(
123            challenge[..mem::size_of::<u32>()]
124                .try_into()
125                .expect("Challenge is known to statically have enough bytes; qed"),
126        ) >> (u32::BITS as usize - usize::from(K));
127        let mut first_matching_element = ys
128            .binary_search_by(|&y| y.first_k_bits::<K>().cmp(&first_k_challenge_bits))
129            .unwrap_or_else(|insert| insert);
130
131        // We only compare first K bits above, which is why `binary_search_by` is not guaranteed to
132        // find the very first match in case there are multiple
133        for index in (0..first_matching_element).rev() {
134            if ys[index].first_k_bits::<K>() == first_k_challenge_bits {
135                first_matching_element = index;
136            } else {
137                break;
138            }
139        }
140
141        // Iterate just over elements that are matching `first_k_challenge_bits` prefix
142        ys[first_matching_element..]
143            .iter()
144            .take_while(move |&&y| {
145                // Check if first K bits of `y` match
146                y.first_k_bits::<K>() == first_k_challenge_bits
147            })
148            .zip(Position::from(first_matching_element as u32)..)
149            .map(move |(_y, position)| {
150                let positions = self
151                    .table_7
152                    .position(position)
153                    .expect("Internally generated pointers must be correct; qed");
154                let positions = self
155                    .table_6
156                    .position(pick_position(positions, last_5_challenge_bits, 6))
157                    .expect("Internally generated pointers must be correct; qed");
158                let positions = self
159                    .table_5
160                    .position(pick_position(positions, last_5_challenge_bits, 5))
161                    .expect("Internally generated pointers must be correct; qed");
162                let positions = self
163                    .table_4
164                    .position(pick_position(positions, last_5_challenge_bits, 4))
165                    .expect("Internally generated pointers must be correct; qed");
166                let positions = self
167                    .table_3
168                    .position(pick_position(positions, last_5_challenge_bits, 3))
169                    .expect("Internally generated pointers must be correct; qed");
170                let [left_position, right_position] = self
171                    .table_2
172                    .position(pick_position(positions, last_5_challenge_bits, 2))
173                    .expect("Internally generated pointers must be correct; qed");
174
175                let left_x = *self
176                    .table_1
177                    .xs()
178                    .get(usize::from(left_position))
179                    .expect("Internally generated pointers must be correct; qed");
180                let right_x = *self
181                    .table_1
182                    .xs()
183                    .get(usize::from(right_position))
184                    .expect("Internally generated pointers must be correct; qed");
185
186                let mut hasher = Sha256::new();
187                hasher.update(challenge);
188                let left_right_xs = (u64::from(left_x) << (u64::BITS as usize - usize::from(K)))
189                    | (u64::from(right_x) << (u64::BITS as usize - usize::from(K * 2)));
190                hasher.update(
191                    &left_right_xs.to_be_bytes()[..(K as usize * 2).div_ceil(u8::BITS as usize)],
192                );
193                hasher.finalize().into()
194            })
195    }
196
197    /// Find proof of space for given challenge.
198    pub(super) fn find_proof<'a>(
199        &'a self,
200        challenge: &'a Challenge,
201    ) -> impl Iterator<Item = [u8; 64 * K as usize / 8]> + 'a {
202        let ys = self.table_7.ys();
203        // We take advantage of the fact that entries are sorted by `y` (as big-endian numbers) to
204        // quickly seek to desired offset
205        let first_k_challenge_bits = u32::from_be_bytes(
206            challenge[..mem::size_of::<u32>()]
207                .try_into()
208                .expect("Challenge is known to statically have enough bytes; qed"),
209        ) >> (u32::BITS as usize - usize::from(K));
210        let mut first_matching_element = ys
211            .binary_search_by(|&y| y.first_k_bits::<K>().cmp(&first_k_challenge_bits))
212            .unwrap_or_else(|insert| insert);
213
214        // We only compare first K bits above, which is why `binary_search_by` is not guaranteed to
215        // find the very first match in case there are multiple
216        for index in (0..first_matching_element).rev() {
217            if ys[index].first_k_bits::<K>() == first_k_challenge_bits {
218                first_matching_element = index;
219            } else {
220                break;
221            }
222        }
223
224        // Iterate just over elements that are matching `first_k_challenge_bits` prefix
225        ys[first_matching_element..]
226            .iter()
227            .take_while(move |&&y| {
228                // Check if first K bits of `y` match
229                y.first_k_bits::<K>() == first_k_challenge_bits
230            })
231            .zip(Position::from(first_matching_element as u32)..)
232            .map(move |(_y, position)| {
233                let mut proof = [0u8; 64 * K as usize / 8];
234
235                self.table_7
236                    .position(position)
237                    .expect("Internally generated pointers must be correct; qed")
238                    .into_iter()
239                    .flat_map(|position| {
240                        self.table_6
241                            .position(position)
242                            .expect("Internally generated pointers must be correct; qed")
243                    })
244                    .flat_map(|position| {
245                        self.table_5
246                            .position(position)
247                            .expect("Internally generated pointers must be correct; qed")
248                    })
249                    .flat_map(|position| {
250                        self.table_4
251                            .position(position)
252                            .expect("Internally generated pointers must be correct; qed")
253                    })
254                    .flat_map(|position| {
255                        self.table_3
256                            .position(position)
257                            .expect("Internally generated pointers must be correct; qed")
258                    })
259                    .flat_map(|position| {
260                        self.table_2
261                            .position(position)
262                            .expect("Internally generated pointers must be correct; qed")
263                    })
264                    .map(|position| {
265                        self.table_1
266                            .xs()
267                            .get(usize::from(position))
268                            .expect("Internally generated pointers must be correct; qed")
269                    })
270                    .enumerate()
271                    .for_each(|(offset, &x)| {
272                        let x_offset_in_bits = usize::from(K) * offset;
273                        // Collect bytes where bits of `x` will be written
274                        let proof_bytes = &mut proof[x_offset_in_bits / u8::BITS as usize..]
275                            [..(x_offset_in_bits % u8::BITS as usize + usize::from(K))
276                                .div_ceil(u8::BITS as usize)];
277
278                        // Bits of `x` already shifted to correct location as they will appear in
279                        // `proof`
280                        let x_shifted = u32::from(x)
281                            << (u32::BITS as usize
282                                - (usize::from(K) + x_offset_in_bits % u8::BITS as usize));
283
284                        // Copy `x` bits into proof
285                        x_shifted
286                            .to_be_bytes()
287                            .iter()
288                            .zip(proof_bytes)
289                            .for_each(|(from, to)| {
290                                *to |= from;
291                            });
292                    });
293
294                proof
295            })
296    }
297
298    /// Verify proof of space for given seed and challenge.
299    ///
300    /// Returns quality on successful verification.
301    pub(super) fn verify(
302        seed: Seed,
303        challenge: &Challenge,
304        proof_of_space: &[u8; 64 * K as usize / 8],
305    ) -> Option<Quality>
306    where
307        EvaluatableUsize<{ (K as usize * 2).div_ceil(u8::BITS as usize) }>: Sized,
308    {
309        let last_5_challenge_bits = challenge[challenge.len() - 1] & 0b00011111;
310        let first_k_challenge_bits = u32::from_be_bytes(
311            challenge[..mem::size_of::<u32>()]
312                .try_into()
313                .expect("Challenge is known to statically have enough bytes; qed"),
314        ) >> (u32::BITS as usize - usize::from(K));
315
316        let ys_and_metadata = (0..64_usize)
317            .map(|offset| {
318                let mut pre_x_bytes = 0u64.to_be_bytes();
319                let offset_in_bits = usize::from(K) * offset;
320                let bytes_to_copy = (offset_in_bits % u8::BITS as usize + usize::from(K))
321                    .div_ceil(u8::BITS as usize);
322                // Copy full bytes that contain bits of `x`
323                pre_x_bytes[..bytes_to_copy].copy_from_slice(
324                    &proof_of_space[offset_in_bits / u8::BITS as usize..][..bytes_to_copy],
325                );
326                // Extract `pre_x` whose last `K` bits start with `x`
327                let pre_x = u64::from_be_bytes(pre_x_bytes)
328                    >> (u64::BITS as usize - (usize::from(K) + offset_in_bits % u8::BITS as usize));
329                // Convert to desired type and clear extra bits
330                let x = X::from(pre_x as u32 & (u32::MAX >> (u32::BITS as usize - usize::from(K))));
331
332                let (partial_y, partial_y_offset) = partial_y::<K>(seed, x);
333                let y = compute_f1::<K>(x, &partial_y, partial_y_offset);
334
335                (y, Metadata::from(x))
336            })
337            .collect::<Vec<_>>();
338
339        Self::collect_ys_and_metadata::<2, 1>(&ys_and_metadata)
340            .and_then(|ys_and_metadata| Self::collect_ys_and_metadata::<3, 2>(&ys_and_metadata))
341            .and_then(|ys_and_metadata| Self::collect_ys_and_metadata::<4, 3>(&ys_and_metadata))
342            .and_then(|ys_and_metadata| Self::collect_ys_and_metadata::<5, 4>(&ys_and_metadata))
343            .and_then(|ys_and_metadata| Self::collect_ys_and_metadata::<6, 5>(&ys_and_metadata))
344            .and_then(|ys_and_metadata| Self::collect_ys_and_metadata::<7, 6>(&ys_and_metadata))
345            .filter(|ys_and_metadata| {
346                let (y, _metadata) = ys_and_metadata
347                    .first()
348                    .expect("On success returns exactly one entry; qed");
349
350                // Check if first K bits of `y` match
351                y.first_k_bits::<K>() == first_k_challenge_bits
352            })
353            .map(|_| {
354                let mut quality_index = 0_usize.to_be_bytes();
355                quality_index[0] = last_5_challenge_bits;
356                let quality_index = usize::from_be_bytes(quality_index);
357
358                let mut hasher = Sha256::new();
359                hasher.update(challenge);
360
361                // NOTE: this works correctly, but may overflow if `quality_index` is changed to
362                // not be zero-initialized anymore
363                let left_right_xs_bit_offset = quality_index * usize::from(K * 2);
364                // Collect `left_x` and `right_x` bits, potentially with extra bits at the beginning
365                // and the end
366                let left_right_xs_bytes =
367                    &proof_of_space[left_right_xs_bit_offset / u8::BITS as usize..]
368                        [..(left_right_xs_bit_offset % u8::BITS as usize + usize::from(K * 2))
369                            .div_ceil(u8::BITS as usize)];
370
371                let mut left_right_xs = 0u64.to_be_bytes();
372                left_right_xs[..left_right_xs_bytes.len()].copy_from_slice(left_right_xs_bytes);
373                // Move `left_x` and `right_x` bits to most significant bits
374                let left_right_xs = u64::from_be_bytes(left_right_xs)
375                    << (left_right_xs_bit_offset % u8::BITS as usize);
376                // Clear extra bits
377                let left_right_xs_mask = u64::MAX << (u64::BITS as usize - usize::from(K * 2));
378                let left_right_xs = left_right_xs & left_right_xs_mask;
379
380                hasher.update(
381                    &left_right_xs.to_be_bytes()[..usize::from(K * 2).div_ceil(u8::BITS as usize)],
382                );
383
384                hasher.finalize().into()
385            })
386    }
387
388    fn collect_ys_and_metadata<const TABLE_NUMBER: u8, const PARENT_TABLE_NUMBER: u8>(
389        ys_and_metadata: &[(Y, Metadata<K, PARENT_TABLE_NUMBER>)],
390    ) -> Option<Vec<(Y, Metadata<K, TABLE_NUMBER>)>>
391    where
392        EvaluatableUsize<{ metadata_size_bytes(K, TABLE_NUMBER) }>: Sized,
393        EvaluatableUsize<{ metadata_size_bytes(K, PARENT_TABLE_NUMBER) }>: Sized,
394    {
395        ys_and_metadata
396            .array_chunks::<2>()
397            .map(|&[(left_y, left_metadata), (right_y, right_metadata)]| {
398                has_match(left_y, right_y).then_some(compute_fn::<
399                    K,
400                    TABLE_NUMBER,
401                    PARENT_TABLE_NUMBER,
402                >(
403                    left_y, left_metadata, right_metadata
404                ))
405            })
406            .collect()
407    }
408}