subspace_core_primitives/
pieces.rs

1//! Pieces-related data structures.
2
3#[cfg(not(feature = "std"))]
4extern crate alloc;
5
6use crate::segments::{ArchivedHistorySegment, RecordedHistorySegment, SegmentIndex};
7use crate::ScalarBytes;
8#[cfg(feature = "serde")]
9use ::serde::{Deserialize, Serialize};
10#[cfg(feature = "serde")]
11use ::serde::{Deserializer, Serializer};
12#[cfg(not(feature = "std"))]
13use alloc::boxed::Box;
14#[cfg(not(feature = "std"))]
15use alloc::format;
16#[cfg(not(feature = "std"))]
17use alloc::vec::Vec;
18use bytes::{Bytes, BytesMut};
19use core::array::TryFromSliceError;
20use core::hash::{Hash, Hasher};
21use core::iter::Step;
22use core::{fmt, mem, slice};
23use derive_more::{
24    Add, AddAssign, AsMut, AsRef, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul,
25    MulAssign, Sub, SubAssign,
26};
27use parity_scale_codec::{Decode, Encode, EncodeLike, Input, MaxEncodedLen, Output};
28#[cfg(feature = "parallel")]
29use rayon::prelude::*;
30use scale_info::build::Fields;
31use scale_info::{Path, Type, TypeInfo};
32#[cfg(feature = "serde")]
33use serde_big_array::BigArray;
34
35/// Piece index in consensus
36#[derive(
37    Debug,
38    Display,
39    Default,
40    Copy,
41    Clone,
42    Ord,
43    PartialOrd,
44    Eq,
45    PartialEq,
46    Hash,
47    Encode,
48    Decode,
49    Add,
50    AddAssign,
51    Sub,
52    SubAssign,
53    Mul,
54    MulAssign,
55    Div,
56    DivAssign,
57    TypeInfo,
58    MaxEncodedLen,
59)]
60#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
61#[repr(transparent)]
62pub struct PieceIndex(u64);
63
64impl Step for PieceIndex {
65    #[inline]
66    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
67        u64::steps_between(&start.0, &end.0)
68    }
69
70    #[inline]
71    fn forward_checked(start: Self, count: usize) -> Option<Self> {
72        u64::forward_checked(start.0, count).map(Self)
73    }
74
75    #[inline]
76    fn backward_checked(start: Self, count: usize) -> Option<Self> {
77        u64::backward_checked(start.0, count).map(Self)
78    }
79}
80
81impl From<u64> for PieceIndex {
82    #[inline]
83    fn from(original: u64) -> Self {
84        Self(original)
85    }
86}
87
88impl From<PieceIndex> for u64 {
89    #[inline]
90    fn from(original: PieceIndex) -> Self {
91        original.0
92    }
93}
94
95impl PieceIndex {
96    /// Size in bytes.
97    pub const SIZE: usize = mem::size_of::<u64>();
98    /// Piece index 0.
99    pub const ZERO: PieceIndex = PieceIndex(0);
100    /// Piece index 1.
101    pub const ONE: PieceIndex = PieceIndex(1);
102
103    /// Create new instance
104    #[inline]
105    pub const fn new(n: u64) -> Self {
106        Self(n)
107    }
108
109    /// Create piece index from bytes.
110    #[inline]
111    pub const fn from_bytes(bytes: [u8; Self::SIZE]) -> Self {
112        Self(u64::from_le_bytes(bytes))
113    }
114
115    /// Convert piece index to bytes.
116    #[inline]
117    pub const fn to_bytes(self) -> [u8; Self::SIZE] {
118        self.0.to_le_bytes()
119    }
120
121    /// Segment index piece index corresponds to
122    #[inline]
123    pub const fn segment_index(&self) -> SegmentIndex {
124        SegmentIndex::new(self.0 / ArchivedHistorySegment::NUM_PIECES as u64)
125    }
126
127    /// Position of a piece in a segment
128    #[inline]
129    pub const fn position(&self) -> u32 {
130        // Position is statically guaranteed to fit into u32
131        (self.0 % ArchivedHistorySegment::NUM_PIECES as u64) as u32
132    }
133
134    /// Position of a source piece in the source pieces for a segment.
135    /// Panics if the piece is not a source piece.
136    #[inline]
137    pub const fn source_position(&self) -> u32 {
138        assert!(self.is_source());
139
140        let source_start = self.position() / RecordedHistorySegment::ERASURE_CODING_RATE.1 as u32
141            * RecordedHistorySegment::ERASURE_CODING_RATE.0 as u32;
142        let source_offset = self.position() % RecordedHistorySegment::ERASURE_CODING_RATE.1 as u32;
143
144        source_start + source_offset
145    }
146
147    /// Returns the piece index for a source position and segment index.
148    /// Overflows to the next segment if the position is greater than the last source position.
149    #[inline]
150    pub const fn from_source_position(
151        source_position: u32,
152        segment_index: SegmentIndex,
153    ) -> PieceIndex {
154        let source_position = source_position as u64;
155        let start = source_position / RecordedHistorySegment::ERASURE_CODING_RATE.0 as u64
156            * RecordedHistorySegment::ERASURE_CODING_RATE.1 as u64;
157        let offset = source_position % RecordedHistorySegment::ERASURE_CODING_RATE.0 as u64;
158
159        PieceIndex(segment_index.first_piece_index().0 + start + offset)
160    }
161
162    /// Is this piece index a source piece?
163    #[inline]
164    pub const fn is_source(&self) -> bool {
165        // Source pieces are interleaved with parity pieces, source first
166        self.0 % (RecordedHistorySegment::ERASURE_CODING_RATE.1 as u64)
167            < (RecordedHistorySegment::ERASURE_CODING_RATE.0 as u64)
168    }
169
170    /// Returns the next source piece index.
171    /// Panics if the piece is not a source piece.
172    #[inline]
173    pub const fn next_source_index(&self) -> PieceIndex {
174        PieceIndex::from_source_position(self.source_position() + 1, self.segment_index())
175    }
176
177    /// Returns the previous source piece index, if there is one.
178    /// Panics if the piece is not a source piece.
179    #[inline]
180    pub const fn prev_source_index(&self) -> Option<PieceIndex> {
181        if self.source_position() == 0 {
182            // TODO: when Option::map or ? become const, use them here
183            match self.segment_index().checked_sub(SegmentIndex::ONE) {
184                Some(segment_index) => Some(PieceIndex::from_source_position(
185                    RecordedHistorySegment::NUM_RAW_RECORDS as u32 - 1,
186                    segment_index,
187                )),
188                None => None,
189            }
190        } else {
191            Some(PieceIndex::from_source_position(
192                self.source_position() - 1,
193                self.segment_index(),
194            ))
195        }
196    }
197}
198
199/// Piece offset in sector
200#[derive(
201    Debug,
202    Display,
203    Default,
204    Copy,
205    Clone,
206    Ord,
207    PartialOrd,
208    Eq,
209    PartialEq,
210    Hash,
211    Encode,
212    Decode,
213    Add,
214    AddAssign,
215    Sub,
216    SubAssign,
217    Mul,
218    MulAssign,
219    Div,
220    DivAssign,
221    TypeInfo,
222    MaxEncodedLen,
223)]
224#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
225#[repr(transparent)]
226pub struct PieceOffset(u16);
227
228impl Step for PieceOffset {
229    #[inline]
230    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
231        u16::steps_between(&start.0, &end.0)
232    }
233
234    #[inline]
235    fn forward_checked(start: Self, count: usize) -> Option<Self> {
236        u16::forward_checked(start.0, count).map(Self)
237    }
238
239    #[inline]
240    fn backward_checked(start: Self, count: usize) -> Option<Self> {
241        u16::backward_checked(start.0, count).map(Self)
242    }
243}
244
245impl From<u16> for PieceOffset {
246    #[inline]
247    fn from(original: u16) -> Self {
248        Self(original)
249    }
250}
251
252impl From<PieceOffset> for u16 {
253    #[inline]
254    fn from(original: PieceOffset) -> Self {
255        original.0
256    }
257}
258
259impl From<PieceOffset> for u32 {
260    #[inline]
261    fn from(original: PieceOffset) -> Self {
262        Self::from(original.0)
263    }
264}
265
266impl From<PieceOffset> for u64 {
267    #[inline]
268    fn from(original: PieceOffset) -> Self {
269        Self::from(original.0)
270    }
271}
272
273impl From<PieceOffset> for usize {
274    #[inline]
275    fn from(original: PieceOffset) -> Self {
276        usize::from(original.0)
277    }
278}
279
280impl PieceOffset {
281    /// Piece index 0.
282    pub const ZERO: PieceOffset = PieceOffset(0);
283    /// Piece index 1.
284    pub const ONE: PieceOffset = PieceOffset(1);
285
286    /// Convert piece offset to bytes.
287    #[inline]
288    pub const fn to_bytes(self) -> [u8; mem::size_of::<u16>()] {
289        self.0.to_le_bytes()
290    }
291}
292
293/// Raw record contained within recorded history segment before archiving is applied.
294///
295/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
296#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
297#[repr(transparent)]
298pub struct RawRecord([[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]);
299
300impl fmt::Debug for RawRecord {
301    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
302        write!(f, "{}", hex::encode(self.0.as_flattened()))
303    }
304}
305
306impl Default for RawRecord {
307    #[inline]
308    fn default() -> Self {
309        Self([Default::default(); Self::NUM_CHUNKS])
310    }
311}
312
313impl AsRef<[u8]> for RawRecord {
314    #[inline]
315    fn as_ref(&self) -> &[u8] {
316        self.0.as_slice().as_flattened()
317    }
318}
319
320impl AsMut<[u8]> for RawRecord {
321    #[inline]
322    fn as_mut(&mut self) -> &mut [u8] {
323        self.0.as_mut_slice().as_flattened_mut()
324    }
325}
326
327impl From<&RawRecord> for &[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS] {
328    #[inline]
329    fn from(value: &RawRecord) -> Self {
330        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
331        // layout
332        unsafe { mem::transmute(value) }
333    }
334}
335
336impl From<&[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &RawRecord {
337    #[inline]
338    fn from(value: &[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self {
339        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
340        // layout
341        unsafe { mem::transmute(value) }
342    }
343}
344
345impl From<&mut RawRecord> for &mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS] {
346    #[inline]
347    fn from(value: &mut RawRecord) -> Self {
348        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
349        // layout
350        unsafe { mem::transmute(value) }
351    }
352}
353
354impl From<&mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &mut RawRecord {
355    #[inline]
356    fn from(value: &mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self {
357        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
358        // layout
359        unsafe { mem::transmute(value) }
360    }
361}
362
363impl From<&RawRecord> for &[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS] {
364    #[inline]
365    fn from(value: &RawRecord) -> Self {
366        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
367        // layout as inner array, while array of byte arrays has the same alignment as a single byte
368        unsafe { mem::transmute(value) }
369    }
370}
371
372impl From<&[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &RawRecord {
373    #[inline]
374    fn from(value: &[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self {
375        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
376        // layout as inner array, while array of byte arrays has the same alignment as a single byte
377        unsafe { mem::transmute(value) }
378    }
379}
380
381impl From<&mut RawRecord> for &mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS] {
382    #[inline]
383    fn from(value: &mut RawRecord) -> Self {
384        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
385        // layout as inner array, while array of byte arrays has the same alignment as a single byte
386        unsafe { mem::transmute(value) }
387    }
388}
389
390impl From<&mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &mut RawRecord {
391    #[inline]
392    fn from(value: &mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self {
393        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
394        // layout as inner array, while array of byte arrays has the same alignment as a single byte
395        unsafe { mem::transmute(value) }
396    }
397}
398
399impl RawRecord {
400    /// Number of chunks (scalars) within one raw record.
401    pub const NUM_CHUNKS: usize = 2_usize.pow(15);
402    /// Size of raw record in bytes, is guaranteed to be a multiple of [`ScalarBytes::SAFE_BYTES`].
403    pub const SIZE: usize = ScalarBytes::SAFE_BYTES * Self::NUM_CHUNKS;
404
405    /// Create boxed value without hitting stack overflow
406    #[inline]
407    pub fn new_boxed() -> Box<Self> {
408        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
409        // SAFETY: Data structure filled with zeroes is a valid invariant
410        unsafe { Box::new_zeroed().assume_init() }
411    }
412
413    /// Convenient conversion from slice of record to underlying representation for efficiency
414    /// purposes.
415    #[inline]
416    pub fn slice_to_repr(value: &[Self]) -> &[[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]] {
417        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
418        // layout
419        unsafe { mem::transmute(value) }
420    }
421
422    /// Convenient conversion from slice of underlying representation to record for efficiency
423    /// purposes.
424    #[inline]
425    pub fn slice_from_repr(value: &[[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] {
426        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
427        // layout
428        unsafe { mem::transmute(value) }
429    }
430
431    /// Convenient conversion from mutable slice of record to underlying representation for
432    /// efficiency purposes.
433    #[inline]
434    pub fn slice_mut_to_repr(
435        value: &mut [Self],
436    ) -> &mut [[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]] {
437        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
438        // layout
439        unsafe { mem::transmute(value) }
440    }
441
442    /// Convenient conversion from mutable slice of underlying representation to record for
443    /// efficiency purposes.
444    #[inline]
445    pub fn slice_mut_from_repr(
446        value: &mut [[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]],
447    ) -> &mut [Self] {
448        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
449        // layout
450        unsafe { mem::transmute(value) }
451    }
452}
453
454/// Record contained within a piece.
455///
456/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
457#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
458#[repr(transparent)]
459pub struct Record([[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]);
460
461impl fmt::Debug for Record {
462    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
463        write!(f, "{}", hex::encode(self.0.as_flattened()))
464    }
465}
466
467impl Default for Record {
468    #[inline]
469    fn default() -> Self {
470        Self([Default::default(); Self::NUM_CHUNKS])
471    }
472}
473
474impl AsRef<[u8]> for Record {
475    #[inline]
476    fn as_ref(&self) -> &[u8] {
477        self.0.as_flattened()
478    }
479}
480
481impl AsMut<[u8]> for Record {
482    #[inline]
483    fn as_mut(&mut self) -> &mut [u8] {
484        self.0.as_flattened_mut()
485    }
486}
487
488impl From<&Record> for &[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS] {
489    #[inline]
490    fn from(value: &Record) -> Self {
491        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
492        unsafe { mem::transmute(value) }
493    }
494}
495
496impl From<&[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]> for &Record {
497    #[inline]
498    fn from(value: &[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self {
499        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
500        unsafe { mem::transmute(value) }
501    }
502}
503
504impl From<&mut Record> for &mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS] {
505    #[inline]
506    fn from(value: &mut Record) -> Self {
507        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
508        unsafe { mem::transmute(value) }
509    }
510}
511
512impl From<&mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]> for &mut Record {
513    #[inline]
514    fn from(value: &mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self {
515        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
516        unsafe { mem::transmute(value) }
517    }
518}
519
520impl From<&Record> for &[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS] {
521    #[inline]
522    fn from(value: &Record) -> Self {
523        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
524        // as inner array, while array of byte arrays has the same alignment as a single byte
525        unsafe { mem::transmute(value) }
526    }
527}
528
529impl From<&[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]> for &Record {
530    #[inline]
531    fn from(value: &[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]) -> Self {
532        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
533        // as inner array, while array of byte arrays has the same alignment as a single byte
534        unsafe { mem::transmute(value) }
535    }
536}
537
538impl From<&mut Record> for &mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS] {
539    #[inline]
540    fn from(value: &mut Record) -> Self {
541        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
542        // as inner array, while array of byte arrays has the same alignment as a single byte
543        unsafe { mem::transmute(value) }
544    }
545}
546
547impl From<&mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]> for &mut Record {
548    #[inline]
549    fn from(value: &mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]) -> Self {
550        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
551        // as inner array, while array of byte arrays has the same alignment as a single byte
552        unsafe { mem::transmute(value) }
553    }
554}
555
556impl Record {
557    /// Number of chunks (scalars) within one record.
558    pub const NUM_CHUNKS: usize = RawRecord::NUM_CHUNKS;
559    /// Number of s-buckets contained within one record (and by extension sector).
560    ///
561    /// Essentially we chunk records into scalars and erasure code them.
562    pub const NUM_S_BUCKETS: usize = Self::NUM_CHUNKS
563        * RecordedHistorySegment::ERASURE_CODING_RATE.1
564        / RecordedHistorySegment::ERASURE_CODING_RATE.0;
565    /// Size of a segment record given the global piece size (in bytes) after erasure coding
566    /// [`RawRecord`], is guaranteed to be a multiple of [`ScalarBytes::FULL_BYTES`].
567    pub const SIZE: usize = ScalarBytes::FULL_BYTES * Self::NUM_CHUNKS;
568
569    /// Create boxed value without hitting stack overflow
570    #[inline]
571    pub fn new_boxed() -> Box<Self> {
572        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
573        // SAFETY: Data structure filled with zeroes is a valid invariant
574        unsafe { Box::new_zeroed().assume_init() }
575    }
576
577    /// Create vector filled with zeroe records without hitting stack overflow
578    #[inline]
579    pub fn new_zero_vec(length: usize) -> Vec<Self> {
580        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
581        let mut records = Vec::with_capacity(length);
582        {
583            let slice = records.spare_capacity_mut();
584            // SAFETY: Same memory layout due to `#[repr(transparent)]` on `Record` and
585            // `MaybeUninit<[[T; M]; N]>` is guaranteed to have the same layout as
586            // `[[MaybeUninit<T>; M]; N]`
587            let slice = unsafe {
588                slice::from_raw_parts_mut(
589                    slice.as_mut_ptr()
590                        as *mut [[mem::MaybeUninit<u8>; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS],
591                    length,
592                )
593            };
594            for byte in slice.as_flattened_mut().as_flattened_mut() {
595                byte.write(0);
596            }
597        }
598        // SAFETY: All values are initialized above.
599        unsafe {
600            records.set_len(records.capacity());
601        }
602
603        records
604    }
605
606    /// Convenient conversion from slice of record to underlying representation for efficiency
607    /// purposes.
608    #[inline]
609    pub fn slice_to_repr(value: &[Self]) -> &[[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]] {
610        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
611        unsafe { mem::transmute(value) }
612    }
613
614    /// Convenient conversion from slice of underlying representation to record for efficiency
615    /// purposes.
616    #[inline]
617    pub fn slice_from_repr(value: &[[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] {
618        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
619        unsafe { mem::transmute(value) }
620    }
621
622    /// Convenient conversion from mutable slice of record to underlying representation for
623    /// efficiency purposes.
624    #[inline]
625    pub fn slice_mut_to_repr(
626        value: &mut [Self],
627    ) -> &mut [[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]] {
628        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
629        unsafe { mem::transmute(value) }
630    }
631
632    /// Convenient conversion from mutable slice of underlying representation to record for
633    /// efficiency purposes.
634    #[inline]
635    pub fn slice_mut_from_repr(
636        value: &mut [[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]],
637    ) -> &mut [Self] {
638        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
639        unsafe { mem::transmute(value) }
640    }
641
642    /// Convert from a record to its raw bytes, assumes dealing with source record that only stores
643    /// safe bytes in its chunks.
644    #[inline]
645    pub fn to_raw_record_chunks(
646        &self,
647    ) -> impl DoubleEndedIterator<Item = &'_ [u8; ScalarBytes::SAFE_BYTES]> + '_ {
648        // We have zero byte padding from [`ScalarBytes::SAFE_BYTES`] to
649        // [`ScalarBytes::FULL_BYTES`] that we need to skip
650        self.iter()
651            .map(|bytes| bytes[1..].try_into().expect("Correct length; qed"))
652    }
653
654    /// Convert from a record to mutable raw bytes, assumes dealing with source record that only stores
655    /// safe bytes in its chunks.
656    #[inline]
657    pub fn to_mut_raw_record_chunks(
658        &mut self,
659    ) -> impl DoubleEndedIterator<Item = &'_ mut [u8; ScalarBytes::SAFE_BYTES]> + '_ {
660        self.iter_mut()
661            .map(|bytes| (&mut bytes[1..]).try_into().expect("Correct length; qed"))
662    }
663}
664
665/// Record commitment contained within a piece.
666#[derive(
667    Copy,
668    Clone,
669    Eq,
670    PartialEq,
671    Hash,
672    Deref,
673    DerefMut,
674    From,
675    Into,
676    Encode,
677    Decode,
678    TypeInfo,
679    MaxEncodedLen,
680)]
681pub struct RecordCommitment([u8; RecordCommitment::SIZE]);
682
683impl fmt::Debug for RecordCommitment {
684    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
685        write!(f, "{}", hex::encode(self.0))
686    }
687}
688
689#[cfg(feature = "serde")]
690#[derive(Serialize, Deserialize)]
691#[serde(transparent)]
692struct RecordCommitmentBinary(#[serde(with = "BigArray")] [u8; RecordCommitment::SIZE]);
693
694#[cfg(feature = "serde")]
695#[derive(Serialize, Deserialize)]
696#[serde(transparent)]
697struct RecordCommitmentHex(#[serde(with = "hex")] [u8; RecordCommitment::SIZE]);
698
699#[cfg(feature = "serde")]
700impl Serialize for RecordCommitment {
701    #[inline]
702    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
703    where
704        S: Serializer,
705    {
706        if serializer.is_human_readable() {
707            RecordCommitmentHex(self.0).serialize(serializer)
708        } else {
709            RecordCommitmentBinary(self.0).serialize(serializer)
710        }
711    }
712}
713
714#[cfg(feature = "serde")]
715impl<'de> Deserialize<'de> for RecordCommitment {
716    #[inline]
717    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
718    where
719        D: Deserializer<'de>,
720    {
721        Ok(Self(if deserializer.is_human_readable() {
722            RecordCommitmentHex::deserialize(deserializer)?.0
723        } else {
724            RecordCommitmentBinary::deserialize(deserializer)?.0
725        }))
726    }
727}
728
729impl Default for RecordCommitment {
730    #[inline]
731    fn default() -> Self {
732        Self([0; Self::SIZE])
733    }
734}
735
736impl TryFrom<&[u8]> for RecordCommitment {
737    type Error = TryFromSliceError;
738
739    #[inline]
740    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
741        <[u8; Self::SIZE]>::try_from(slice).map(Self)
742    }
743}
744
745impl AsRef<[u8]> for RecordCommitment {
746    #[inline]
747    fn as_ref(&self) -> &[u8] {
748        &self.0
749    }
750}
751
752impl AsMut<[u8]> for RecordCommitment {
753    #[inline]
754    fn as_mut(&mut self) -> &mut [u8] {
755        &mut self.0
756    }
757}
758
759impl From<&RecordCommitment> for &[u8; RecordCommitment::SIZE] {
760    #[inline]
761    fn from(value: &RecordCommitment) -> Self {
762        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
763        // memory layout
764        unsafe { mem::transmute(value) }
765    }
766}
767
768impl From<&[u8; RecordCommitment::SIZE]> for &RecordCommitment {
769    #[inline]
770    fn from(value: &[u8; RecordCommitment::SIZE]) -> Self {
771        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
772        // memory layout
773        unsafe { mem::transmute(value) }
774    }
775}
776
777impl From<&mut RecordCommitment> for &mut [u8; RecordCommitment::SIZE] {
778    #[inline]
779    fn from(value: &mut RecordCommitment) -> Self {
780        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
781        // memory layout
782        unsafe { mem::transmute(value) }
783    }
784}
785
786impl From<&mut [u8; RecordCommitment::SIZE]> for &mut RecordCommitment {
787    #[inline]
788    fn from(value: &mut [u8; RecordCommitment::SIZE]) -> Self {
789        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
790        // memory layout
791        unsafe { mem::transmute(value) }
792    }
793}
794
795impl RecordCommitment {
796    /// Size of record commitment in bytes.
797    pub const SIZE: usize = 48;
798}
799
800/// Record witness contained within a piece.
801#[derive(
802    Copy,
803    Clone,
804    Eq,
805    PartialEq,
806    Hash,
807    Deref,
808    DerefMut,
809    From,
810    Into,
811    Encode,
812    Decode,
813    TypeInfo,
814    MaxEncodedLen,
815)]
816pub struct RecordWitness([u8; RecordWitness::SIZE]);
817
818impl fmt::Debug for RecordWitness {
819    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
820        write!(f, "{}", hex::encode(self.0))
821    }
822}
823
824#[cfg(feature = "serde")]
825#[derive(Serialize, Deserialize)]
826#[serde(transparent)]
827struct RecordWitnessBinary(#[serde(with = "BigArray")] [u8; RecordWitness::SIZE]);
828
829#[cfg(feature = "serde")]
830#[derive(Serialize, Deserialize)]
831#[serde(transparent)]
832struct RecordWitnessHex(#[serde(with = "hex")] [u8; RecordWitness::SIZE]);
833
834#[cfg(feature = "serde")]
835impl Serialize for RecordWitness {
836    #[inline]
837    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
838    where
839        S: Serializer,
840    {
841        if serializer.is_human_readable() {
842            RecordWitnessHex(self.0).serialize(serializer)
843        } else {
844            RecordWitnessBinary(self.0).serialize(serializer)
845        }
846    }
847}
848
849#[cfg(feature = "serde")]
850impl<'de> Deserialize<'de> for RecordWitness {
851    #[inline]
852    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
853    where
854        D: Deserializer<'de>,
855    {
856        Ok(Self(if deserializer.is_human_readable() {
857            RecordWitnessHex::deserialize(deserializer)?.0
858        } else {
859            RecordWitnessBinary::deserialize(deserializer)?.0
860        }))
861    }
862}
863
864impl Default for RecordWitness {
865    #[inline]
866    fn default() -> Self {
867        Self([0; Self::SIZE])
868    }
869}
870
871impl TryFrom<&[u8]> for RecordWitness {
872    type Error = TryFromSliceError;
873
874    #[inline]
875    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
876        <[u8; Self::SIZE]>::try_from(slice).map(Self)
877    }
878}
879
880impl AsRef<[u8]> for RecordWitness {
881    #[inline]
882    fn as_ref(&self) -> &[u8] {
883        &self.0
884    }
885}
886
887impl AsMut<[u8]> for RecordWitness {
888    #[inline]
889    fn as_mut(&mut self) -> &mut [u8] {
890        &mut self.0
891    }
892}
893
894impl From<&RecordWitness> for &[u8; RecordWitness::SIZE] {
895    #[inline]
896    fn from(value: &RecordWitness) -> Self {
897        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
898        // memory layout
899        unsafe { mem::transmute(value) }
900    }
901}
902
903impl From<&[u8; RecordWitness::SIZE]> for &RecordWitness {
904    #[inline]
905    fn from(value: &[u8; RecordWitness::SIZE]) -> Self {
906        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
907        // memory layout
908        unsafe { mem::transmute(value) }
909    }
910}
911
912impl From<&mut RecordWitness> for &mut [u8; RecordWitness::SIZE] {
913    #[inline]
914    fn from(value: &mut RecordWitness) -> Self {
915        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
916        // memory layout
917        unsafe { mem::transmute(value) }
918    }
919}
920
921impl From<&mut [u8; RecordWitness::SIZE]> for &mut RecordWitness {
922    #[inline]
923    fn from(value: &mut [u8; RecordWitness::SIZE]) -> Self {
924        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
925        // memory layout
926        unsafe { mem::transmute(value) }
927    }
928}
929
930impl RecordWitness {
931    /// Size of record witness in bytes.
932    pub const SIZE: usize = 48;
933}
934
935enum CowBytes {
936    Shared(Bytes),
937    Owned(BytesMut),
938}
939
940impl fmt::Debug for CowBytes {
941    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
942        write!(f, "{}", hex::encode(self.as_ref()))
943    }
944}
945
946impl PartialEq for CowBytes {
947    fn eq(&self, other: &Self) -> bool {
948        self.as_ref().eq(other.as_ref())
949    }
950}
951
952impl Eq for CowBytes {}
953
954impl Hash for CowBytes {
955    fn hash<H: Hasher>(&self, state: &mut H) {
956        self.as_ref().hash(state)
957    }
958}
959
960impl Clone for CowBytes {
961    fn clone(&self) -> Self {
962        match self {
963            Self::Shared(bytes) => Self::Shared(bytes.clone()),
964            // Always return shared clone
965            Self::Owned(bytes) => Self::Shared(Bytes::copy_from_slice(bytes)),
966        }
967    }
968}
969
970impl AsRef<[u8]> for CowBytes {
971    fn as_ref(&self) -> &[u8] {
972        match self {
973            CowBytes::Shared(bytes) => bytes.as_ref(),
974            CowBytes::Owned(bytes) => bytes.as_ref(),
975        }
976    }
977}
978
979impl AsMut<[u8]> for CowBytes {
980    #[inline]
981    fn as_mut(&mut self) -> &mut [u8] {
982        match self {
983            CowBytes::Shared(bytes) => {
984                *self = CowBytes::Owned(BytesMut::from(mem::take(bytes)));
985
986                let CowBytes::Owned(bytes) = self else {
987                    unreachable!("Just replaced; qed");
988                };
989
990                bytes.as_mut()
991            }
992            CowBytes::Owned(bytes) => bytes.as_mut(),
993        }
994    }
995}
996
997/// A piece of archival history in Subspace Network.
998///
999/// This version is allocated on the heap, for stack-allocated piece see [`PieceArray`].
1000///
1001/// Internally piece contains a record and corresponding witness that together with segment
1002/// commitment of the segment this piece belongs to can be used to verify that a piece belongs to
1003/// the actual archival history of the blockchain.
1004#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1005pub struct Piece(CowBytes);
1006
1007impl Encode for Piece {
1008    #[inline]
1009    fn size_hint(&self) -> usize {
1010        self.as_ref().size_hint()
1011    }
1012
1013    #[inline]
1014    fn encode_to<O: Output + ?Sized>(&self, output: &mut O) {
1015        self.as_ref().encode_to(output)
1016    }
1017
1018    #[inline]
1019    fn encode(&self) -> Vec<u8> {
1020        self.as_ref().encode()
1021    }
1022
1023    #[inline]
1024    fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
1025        self.as_ref().using_encoded(f)
1026    }
1027}
1028
1029impl EncodeLike for Piece {}
1030
1031impl Decode for Piece {
1032    fn decode<I: Input>(input: &mut I) -> Result<Self, parity_scale_codec::Error> {
1033        let bytes =
1034            Bytes::decode(input).map_err(|error| error.chain("Could not decode `Piece`"))?;
1035
1036        if bytes.len() != Self::SIZE {
1037            return Err(
1038                parity_scale_codec::Error::from("Incorrect Piece length").chain(format!(
1039                    "Expected {} bytes, found {} bytes",
1040                    Self::SIZE,
1041                    bytes.len()
1042                )),
1043            );
1044        }
1045
1046        Ok(Piece(CowBytes::Shared(bytes)))
1047    }
1048}
1049
1050impl TypeInfo for Piece {
1051    type Identity = Self;
1052
1053    fn type_info() -> Type {
1054        Type::builder()
1055            .path(Path::new("Piece", module_path!()))
1056            .docs(&["A piece of archival history in Subspace Network"])
1057            .composite(
1058                Fields::unnamed().field(|f| f.ty::<[u8; Piece::SIZE]>().type_name("PieceArray")),
1059            )
1060    }
1061}
1062
1063#[cfg(feature = "serde")]
1064impl Serialize for Piece {
1065    #[inline]
1066    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1067    where
1068        S: Serializer,
1069    {
1070        let bytes = match &self.0 {
1071            CowBytes::Shared(bytes) => bytes.as_ref(),
1072            CowBytes::Owned(bytes) => bytes.as_ref(),
1073        };
1074
1075        if serializer.is_human_readable() {
1076            hex::serde::serialize(bytes, serializer)
1077        } else {
1078            bytes.serialize(serializer)
1079        }
1080    }
1081}
1082
1083#[cfg(feature = "serde")]
1084impl<'de> Deserialize<'de> for Piece {
1085    #[inline]
1086    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1087    where
1088        D: Deserializer<'de>,
1089    {
1090        let bytes = if deserializer.is_human_readable() {
1091            hex::serde::deserialize::<_, Vec<u8>>(deserializer).and_then(|bytes| {
1092                if bytes.len() == Piece::SIZE {
1093                    Ok(Bytes::from(bytes))
1094                } else {
1095                    Err(serde::de::Error::invalid_length(
1096                        bytes.len(),
1097                        &format!("Expected {} bytes", Piece::SIZE).as_str(),
1098                    ))
1099                }
1100            })?
1101        } else {
1102            Bytes::deserialize(deserializer)?
1103        };
1104
1105        Ok(Piece(CowBytes::Shared(bytes)))
1106    }
1107}
1108
1109impl Default for Piece {
1110    #[inline]
1111    fn default() -> Self {
1112        Self(CowBytes::Owned(BytesMut::zeroed(Self::SIZE)))
1113    }
1114}
1115
1116impl From<Piece> for Vec<u8> {
1117    #[inline]
1118    fn from(piece: Piece) -> Self {
1119        match piece.0 {
1120            CowBytes::Shared(bytes) => bytes.to_vec(),
1121            CowBytes::Owned(bytes) => Vec::from(bytes),
1122        }
1123    }
1124}
1125
1126impl TryFrom<&[u8]> for Piece {
1127    type Error = ();
1128
1129    #[inline]
1130    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
1131        if slice.len() != Self::SIZE {
1132            return Err(());
1133        }
1134
1135        Ok(Self(CowBytes::Shared(Bytes::copy_from_slice(slice))))
1136    }
1137}
1138
1139impl TryFrom<Vec<u8>> for Piece {
1140    type Error = ();
1141
1142    #[inline]
1143    fn try_from(vec: Vec<u8>) -> Result<Self, Self::Error> {
1144        if vec.len() != Self::SIZE {
1145            return Err(());
1146        }
1147
1148        Ok(Self(CowBytes::Shared(Bytes::from(vec))))
1149    }
1150}
1151
1152impl TryFrom<Bytes> for Piece {
1153    type Error = ();
1154
1155    #[inline]
1156    fn try_from(bytes: Bytes) -> Result<Self, Self::Error> {
1157        if bytes.len() != Self::SIZE {
1158            return Err(());
1159        }
1160
1161        Ok(Self(CowBytes::Shared(bytes)))
1162    }
1163}
1164
1165impl TryFrom<BytesMut> for Piece {
1166    type Error = ();
1167
1168    #[inline]
1169    fn try_from(bytes: BytesMut) -> Result<Self, Self::Error> {
1170        if bytes.len() != Self::SIZE {
1171            return Err(());
1172        }
1173
1174        Ok(Self(CowBytes::Owned(bytes)))
1175    }
1176}
1177
1178impl From<&PieceArray> for Piece {
1179    #[inline]
1180    fn from(value: &PieceArray) -> Self {
1181        Self(CowBytes::Shared(Bytes::copy_from_slice(value.as_ref())))
1182    }
1183}
1184
1185impl Deref for Piece {
1186    type Target = PieceArray;
1187
1188    #[inline]
1189    fn deref(&self) -> &Self::Target {
1190        <&[u8; Self::SIZE]>::try_from(self.as_ref())
1191            .expect("Slice of memory has correct length; qed")
1192            .into()
1193    }
1194}
1195
1196impl DerefMut for Piece {
1197    #[inline]
1198    fn deref_mut(&mut self) -> &mut Self::Target {
1199        <&mut [u8; Self::SIZE]>::try_from(self.as_mut())
1200            .expect("Slice of memory has correct length; qed")
1201            .into()
1202    }
1203}
1204
1205impl AsRef<[u8]> for Piece {
1206    #[inline]
1207    fn as_ref(&self) -> &[u8] {
1208        self.0.as_ref()
1209    }
1210}
1211
1212impl AsMut<[u8]> for Piece {
1213    #[inline]
1214    fn as_mut(&mut self) -> &mut [u8] {
1215        self.0.as_mut()
1216    }
1217}
1218
1219impl Piece {
1220    /// Size of a piece (in bytes).
1221    pub const SIZE: usize = Record::SIZE + RecordCommitment::SIZE + RecordWitness::SIZE;
1222
1223    /// Ensure piece contains cheaply cloneable shared data.
1224    ///
1225    /// Internally piece uses CoW mechanism and can store either mutable owned data or data that is
1226    /// cheap to clone, calling this method will ensure further clones will not result in additional
1227    /// memory allocations.
1228    pub fn to_shared(self) -> Self {
1229        Self(match self.0 {
1230            CowBytes::Shared(bytes) => CowBytes::Shared(bytes),
1231            CowBytes::Owned(bytes) => CowBytes::Shared(bytes.freeze()),
1232        })
1233    }
1234}
1235
1236/// A piece of archival history in Subspace Network.
1237///
1238/// This version is allocated on the stack, for heap-allocated piece see [`Piece`].
1239///
1240/// Internally piece contains a record and corresponding witness that together with segment
1241/// commitment of the segment this piece belongs to can be used to verify that a piece belongs to
1242/// the actual archival history of the blockchain.
1243#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Deref, DerefMut, AsRef, AsMut)]
1244#[repr(transparent)]
1245pub struct PieceArray([u8; Piece::SIZE]);
1246
1247impl fmt::Debug for PieceArray {
1248    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1249        write!(f, "{}", hex::encode(self.0))
1250    }
1251}
1252
1253impl Default for PieceArray {
1254    #[inline]
1255    fn default() -> Self {
1256        Self([0u8; Piece::SIZE])
1257    }
1258}
1259
1260impl AsRef<[u8]> for PieceArray {
1261    #[inline]
1262    fn as_ref(&self) -> &[u8] {
1263        &self.0
1264    }
1265}
1266
1267impl AsMut<[u8]> for PieceArray {
1268    #[inline]
1269    fn as_mut(&mut self) -> &mut [u8] {
1270        &mut self.0
1271    }
1272}
1273
1274impl From<&PieceArray> for &[u8; Piece::SIZE] {
1275    #[inline]
1276    fn from(value: &PieceArray) -> Self {
1277        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1278        // layout
1279        unsafe { mem::transmute(value) }
1280    }
1281}
1282
1283impl From<&[u8; Piece::SIZE]> for &PieceArray {
1284    #[inline]
1285    fn from(value: &[u8; Piece::SIZE]) -> Self {
1286        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1287        // layout
1288        unsafe { mem::transmute(value) }
1289    }
1290}
1291
1292impl From<&mut PieceArray> for &mut [u8; Piece::SIZE] {
1293    #[inline]
1294    fn from(value: &mut PieceArray) -> Self {
1295        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1296        // layout
1297        unsafe { mem::transmute(value) }
1298    }
1299}
1300
1301impl From<&mut [u8; Piece::SIZE]> for &mut PieceArray {
1302    #[inline]
1303    fn from(value: &mut [u8; Piece::SIZE]) -> Self {
1304        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1305        // layout
1306        unsafe { mem::transmute(value) }
1307    }
1308}
1309
1310impl PieceArray {
1311    /// Create boxed value without hitting stack overflow
1312    #[inline]
1313    pub fn new_boxed() -> Box<Self> {
1314        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
1315        // SAFETY: Data structure filled with zeroes is a valid invariant
1316        unsafe { Box::<Self>::new_zeroed().assume_init() }
1317    }
1318
1319    /// Split piece into underlying components.
1320    #[inline]
1321    pub fn split(&self) -> (&Record, &RecordCommitment, &RecordWitness) {
1322        let (record, extra) = self.0.split_at(Record::SIZE);
1323        let (commitment, witness) = extra.split_at(RecordCommitment::SIZE);
1324
1325        let record = <&[u8; Record::SIZE]>::try_from(record)
1326            .expect("Slice of memory has correct length; qed");
1327        let commitment = <&[u8; RecordCommitment::SIZE]>::try_from(commitment)
1328            .expect("Slice of memory has correct length; qed");
1329        let witness = <&[u8; RecordWitness::SIZE]>::try_from(witness)
1330            .expect("Slice of memory has correct length; qed");
1331
1332        (record.into(), commitment.into(), witness.into())
1333    }
1334
1335    /// Split piece into underlying mutable components.
1336    #[inline]
1337    pub fn split_mut(&mut self) -> (&mut Record, &mut RecordCommitment, &mut RecordWitness) {
1338        let (record, extra) = self.0.split_at_mut(Record::SIZE);
1339        let (commitment, witness) = extra.split_at_mut(RecordCommitment::SIZE);
1340
1341        let record = <&mut [u8; Record::SIZE]>::try_from(record)
1342            .expect("Slice of memory has correct length; qed");
1343        let commitment = <&mut [u8; RecordCommitment::SIZE]>::try_from(commitment)
1344            .expect("Slice of memory has correct length; qed");
1345        let witness = <&mut [u8; RecordWitness::SIZE]>::try_from(witness)
1346            .expect("Slice of memory has correct length; qed");
1347
1348        (record.into(), commitment.into(), witness.into())
1349    }
1350
1351    /// Record contained within a piece.
1352    #[inline]
1353    pub fn record(&self) -> &Record {
1354        self.split().0
1355    }
1356
1357    /// Mutable record contained within a piece.
1358    #[inline]
1359    pub fn record_mut(&mut self) -> &mut Record {
1360        self.split_mut().0
1361    }
1362
1363    /// Commitment contained within a piece.
1364    #[inline]
1365    pub fn commitment(&self) -> &RecordCommitment {
1366        self.split().1
1367    }
1368
1369    /// Mutable commitment contained within a piece.
1370    #[inline]
1371    pub fn commitment_mut(&mut self) -> &mut RecordCommitment {
1372        self.split_mut().1
1373    }
1374
1375    /// Witness contained within a piece.
1376    #[inline]
1377    pub fn witness(&self) -> &RecordWitness {
1378        self.split().2
1379    }
1380
1381    /// Mutable witness contained within a piece.
1382    #[inline]
1383    pub fn witness_mut(&mut self) -> &mut RecordWitness {
1384        self.split_mut().2
1385    }
1386
1387    /// Convenient conversion from slice of piece array to underlying representation for efficiency
1388    /// purposes.
1389    #[inline]
1390    pub fn slice_to_repr(value: &[Self]) -> &[[u8; Piece::SIZE]] {
1391        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1392        // layout
1393        unsafe { mem::transmute(value) }
1394    }
1395
1396    /// Convenient conversion from slice of underlying representation to piece array for efficiency
1397    /// purposes.
1398    #[inline]
1399    pub fn slice_from_repr(value: &[[u8; Piece::SIZE]]) -> &[Self] {
1400        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1401        // layout
1402        unsafe { mem::transmute(value) }
1403    }
1404
1405    /// Convenient conversion from mutable slice of piece array to underlying representation for
1406    /// efficiency purposes.
1407    #[inline]
1408    pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; Piece::SIZE]] {
1409        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1410        // layout
1411        unsafe { mem::transmute(value) }
1412    }
1413
1414    /// Convenient conversion from mutable slice of underlying representation to piece array for
1415    /// efficiency purposes.
1416    #[inline]
1417    pub fn slice_mut_from_repr(value: &mut [[u8; Piece::SIZE]]) -> &mut [Self] {
1418        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1419        // layout
1420        unsafe { mem::transmute(value) }
1421    }
1422}
1423
1424impl From<Box<PieceArray>> for Vec<u8> {
1425    fn from(value: Box<PieceArray>) -> Self {
1426        let mut value = mem::ManuallyDrop::new(value);
1427        // SAFETY: Always contains fixed allocation of bytes
1428        unsafe { Vec::from_raw_parts(value.as_mut_ptr(), Piece::SIZE, Piece::SIZE) }
1429    }
1430}
1431
1432/// Flat representation of multiple pieces concatenated for more efficient for processing
1433#[derive(Clone, PartialEq, Eq)]
1434pub struct FlatPieces(CowBytes);
1435
1436impl fmt::Debug for FlatPieces {
1437    #[inline]
1438    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1439        f.debug_struct("FlatPieces").finish_non_exhaustive()
1440    }
1441}
1442
1443impl Deref for FlatPieces {
1444    type Target = [PieceArray];
1445
1446    #[inline]
1447    fn deref(&self) -> &Self::Target {
1448        let bytes = self.0.as_ref();
1449        // SAFETY: Bytes slice has length of multiples of piece size and lifetimes of returned data
1450        // are preserved
1451        let pieces = unsafe {
1452            slice::from_raw_parts(
1453                bytes.as_ptr() as *const [u8; Piece::SIZE],
1454                bytes.len() / Piece::SIZE,
1455            )
1456        };
1457        PieceArray::slice_from_repr(pieces)
1458    }
1459}
1460
1461impl DerefMut for FlatPieces {
1462    #[inline]
1463    fn deref_mut(&mut self) -> &mut Self::Target {
1464        let bytes = self.0.as_mut();
1465        // SAFETY: Bytes slice has length of multiples of piece size and lifetimes of returned data
1466        // are preserved
1467        let pieces = unsafe {
1468            slice::from_raw_parts_mut(
1469                bytes.as_mut_ptr() as *mut [u8; Piece::SIZE],
1470                bytes.len() / Piece::SIZE,
1471            )
1472        };
1473        PieceArray::slice_mut_from_repr(pieces)
1474    }
1475}
1476
1477impl FlatPieces {
1478    /// Allocate `FlatPieces` that will hold `piece_count` pieces filled with zeroes
1479    #[inline]
1480    pub fn new(piece_count: usize) -> Self {
1481        Self(CowBytes::Owned(BytesMut::zeroed(piece_count * Piece::SIZE)))
1482    }
1483
1484    /// Iterate over all pieces.
1485    ///
1486    /// NOTE: Unless [`Self::to_shared`] was called first, iterator may have to allocate each piece
1487    /// from scratch, which is rarely a desired behavior.
1488    #[inline]
1489    pub fn pieces(&self) -> Box<dyn ExactSizeIterator<Item = Piece> + '_> {
1490        match &self.0 {
1491            CowBytes::Shared(bytes) => Box::new(
1492                bytes
1493                    .chunks_exact(Piece::SIZE)
1494                    .map(|slice| Piece(CowBytes::Shared(bytes.slice_ref(slice)))),
1495            ),
1496            CowBytes::Owned(bytes) => Box::new(
1497                bytes
1498                    .chunks_exact(Piece::SIZE)
1499                    .map(|slice| Piece(CowBytes::Shared(Bytes::copy_from_slice(slice)))),
1500            ),
1501        }
1502    }
1503
1504    /// Iterator over source pieces (even indices)
1505    #[inline]
1506    pub fn source_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
1507        self.pieces().step_by(2)
1508    }
1509
1510    /// Iterator over source pieces (even indices)
1511    #[inline]
1512    pub fn source(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
1513        self.iter().step_by(2)
1514    }
1515
1516    /// Mutable iterator over source pieces (even indices)
1517    #[inline]
1518    pub fn source_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
1519        self.iter_mut().step_by(2)
1520    }
1521
1522    /// Iterator over parity pieces (odd indices)
1523    #[inline]
1524    pub fn parity_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
1525        self.pieces().skip(1).step_by(2)
1526    }
1527
1528    /// Iterator over parity pieces (odd indices)
1529    #[inline]
1530    pub fn parity(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
1531        self.iter().skip(1).step_by(2)
1532    }
1533
1534    /// Mutable iterator over parity pieces (odd indices)
1535    #[inline]
1536    pub fn parity_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
1537        self.iter_mut().skip(1).step_by(2)
1538    }
1539
1540    /// Ensure flat pieces contains cheaply cloneable shared data.
1541    ///
1542    /// Internally flat pieces uses CoW mechanism and can store either mutable owned data or data
1543    /// that is cheap to clone, calling this method will ensure further clones and returned pieces
1544    /// will not result in additional memory allocations.
1545    pub fn to_shared(self) -> Self {
1546        Self(match self.0 {
1547            CowBytes::Shared(bytes) => CowBytes::Shared(bytes),
1548            CowBytes::Owned(bytes) => CowBytes::Shared(bytes.freeze()),
1549        })
1550    }
1551}
1552
1553#[cfg(feature = "parallel")]
1554impl FlatPieces {
1555    /// Parallel iterator over source pieces (even indices)
1556    #[inline]
1557    pub fn par_source(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
1558        self.par_iter().step_by(2)
1559    }
1560
1561    /// Mutable parallel iterator over source pieces (even indices)
1562    #[inline]
1563    pub fn par_source_mut(
1564        &mut self,
1565    ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
1566        self.par_iter_mut().step_by(2)
1567    }
1568
1569    /// Parallel iterator over parity pieces (odd indices)
1570    #[inline]
1571    pub fn par_parity(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
1572        self.par_iter().skip(1).step_by(2)
1573    }
1574
1575    /// Mutable parallel iterator over parity pieces (odd indices)
1576    #[inline]
1577    pub fn par_parity_mut(
1578        &mut self,
1579    ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
1580        self.par_iter_mut().skip(1).step_by(2)
1581    }
1582}