subspace_core_primitives/
pieces.rs

1//! Pieces-related data structures.
2
3#[cfg(not(feature = "std"))]
4extern crate alloc;
5
6use crate::ScalarBytes;
7use crate::segments::{ArchivedHistorySegment, RecordedHistorySegment, SegmentIndex};
8#[cfg(feature = "serde")]
9use ::serde::{Deserialize, Serialize};
10#[cfg(feature = "serde")]
11use ::serde::{Deserializer, Serializer};
12#[cfg(not(feature = "std"))]
13use alloc::boxed::Box;
14#[cfg(not(feature = "std"))]
15use alloc::format;
16#[cfg(not(feature = "std"))]
17use alloc::vec::Vec;
18use bytes::{Bytes, BytesMut};
19use core::array::TryFromSliceError;
20use core::hash::{Hash, Hasher};
21use core::iter::Step;
22use core::{fmt, mem, slice};
23use derive_more::{
24    Add, AddAssign, AsMut, AsRef, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul,
25    MulAssign, Sub, SubAssign,
26};
27use parity_scale_codec::{
28    Decode, DecodeWithMemTracking, Encode, EncodeLike, Input, MaxEncodedLen, Output,
29};
30#[cfg(feature = "parallel")]
31use rayon::prelude::*;
32use scale_info::build::Fields;
33use scale_info::{Path, Type, TypeInfo};
34#[cfg(feature = "serde")]
35use serde_big_array::BigArray;
36
37/// Piece index in consensus
38#[derive(
39    Debug,
40    Display,
41    Default,
42    Copy,
43    Clone,
44    Ord,
45    PartialOrd,
46    Eq,
47    PartialEq,
48    Hash,
49    Encode,
50    Decode,
51    Add,
52    AddAssign,
53    Sub,
54    SubAssign,
55    Mul,
56    MulAssign,
57    Div,
58    DivAssign,
59    TypeInfo,
60    MaxEncodedLen,
61)]
62#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
63#[repr(transparent)]
64pub struct PieceIndex(u64);
65
66impl Step for PieceIndex {
67    #[inline]
68    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
69        u64::steps_between(&start.0, &end.0)
70    }
71
72    #[inline]
73    fn forward_checked(start: Self, count: usize) -> Option<Self> {
74        u64::forward_checked(start.0, count).map(Self)
75    }
76
77    #[inline]
78    fn backward_checked(start: Self, count: usize) -> Option<Self> {
79        u64::backward_checked(start.0, count).map(Self)
80    }
81}
82
83impl From<u64> for PieceIndex {
84    #[inline]
85    fn from(original: u64) -> Self {
86        Self(original)
87    }
88}
89
90impl From<PieceIndex> for u64 {
91    #[inline]
92    fn from(original: PieceIndex) -> Self {
93        original.0
94    }
95}
96
97impl PieceIndex {
98    /// Size in bytes.
99    pub const SIZE: usize = mem::size_of::<u64>();
100    /// Piece index 0.
101    pub const ZERO: PieceIndex = PieceIndex(0);
102    /// Piece index 1.
103    pub const ONE: PieceIndex = PieceIndex(1);
104
105    /// Create new instance
106    #[inline]
107    pub const fn new(n: u64) -> Self {
108        Self(n)
109    }
110
111    /// Create piece index from bytes.
112    #[inline]
113    pub const fn from_bytes(bytes: [u8; Self::SIZE]) -> Self {
114        Self(u64::from_le_bytes(bytes))
115    }
116
117    /// Convert piece index to bytes.
118    #[inline]
119    pub const fn to_bytes(self) -> [u8; Self::SIZE] {
120        self.0.to_le_bytes()
121    }
122
123    /// Segment index piece index corresponds to
124    #[inline]
125    pub const fn segment_index(&self) -> SegmentIndex {
126        SegmentIndex::new(self.0 / ArchivedHistorySegment::NUM_PIECES as u64)
127    }
128
129    /// Position of a piece in a segment
130    #[inline]
131    pub const fn position(&self) -> u32 {
132        // Position is statically guaranteed to fit into u32
133        (self.0 % ArchivedHistorySegment::NUM_PIECES as u64) as u32
134    }
135
136    /// Position of a source piece in the source pieces for a segment.
137    /// Panics if the piece is not a source piece.
138    #[inline]
139    pub const fn source_position(&self) -> u32 {
140        assert!(self.is_source());
141
142        let source_start = self.position() / RecordedHistorySegment::ERASURE_CODING_RATE.1 as u32
143            * RecordedHistorySegment::ERASURE_CODING_RATE.0 as u32;
144        let source_offset = self.position() % RecordedHistorySegment::ERASURE_CODING_RATE.1 as u32;
145
146        source_start + source_offset
147    }
148
149    /// Returns the piece index for a source position and segment index.
150    /// Overflows to the next segment if the position is greater than the last source position.
151    #[inline]
152    pub const fn from_source_position(
153        source_position: u32,
154        segment_index: SegmentIndex,
155    ) -> PieceIndex {
156        let source_position = source_position as u64;
157        let start = source_position / RecordedHistorySegment::ERASURE_CODING_RATE.0 as u64
158            * RecordedHistorySegment::ERASURE_CODING_RATE.1 as u64;
159        let offset = source_position % RecordedHistorySegment::ERASURE_CODING_RATE.0 as u64;
160
161        PieceIndex(segment_index.first_piece_index().0 + start + offset)
162    }
163
164    /// Is this piece index a source piece?
165    #[inline]
166    pub const fn is_source(&self) -> bool {
167        // Source pieces are interleaved with parity pieces, source first
168        self.0 % (RecordedHistorySegment::ERASURE_CODING_RATE.1 as u64)
169            < (RecordedHistorySegment::ERASURE_CODING_RATE.0 as u64)
170    }
171
172    /// Returns the next source piece index.
173    /// Panics if the piece is not a source piece.
174    #[inline]
175    pub const fn next_source_index(&self) -> PieceIndex {
176        PieceIndex::from_source_position(self.source_position() + 1, self.segment_index())
177    }
178
179    /// Returns the previous source piece index, if there is one.
180    /// Panics if the piece is not a source piece.
181    #[inline]
182    pub const fn prev_source_index(&self) -> Option<PieceIndex> {
183        if self.source_position() == 0 {
184            // TODO: when Option::map or ? become const, use them here
185            match self.segment_index().checked_sub(SegmentIndex::ONE) {
186                Some(segment_index) => Some(PieceIndex::from_source_position(
187                    RecordedHistorySegment::NUM_RAW_RECORDS as u32 - 1,
188                    segment_index,
189                )),
190                None => None,
191            }
192        } else {
193            Some(PieceIndex::from_source_position(
194                self.source_position() - 1,
195                self.segment_index(),
196            ))
197        }
198    }
199}
200
201/// Piece offset in sector
202#[derive(
203    Debug,
204    Display,
205    Default,
206    Copy,
207    Clone,
208    Ord,
209    PartialOrd,
210    Eq,
211    PartialEq,
212    Hash,
213    Encode,
214    Decode,
215    Add,
216    AddAssign,
217    Sub,
218    SubAssign,
219    Mul,
220    MulAssign,
221    Div,
222    DivAssign,
223    TypeInfo,
224    MaxEncodedLen,
225    DecodeWithMemTracking,
226)]
227#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
228#[repr(transparent)]
229pub struct PieceOffset(u16);
230
231impl Step for PieceOffset {
232    #[inline]
233    fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
234        u16::steps_between(&start.0, &end.0)
235    }
236
237    #[inline]
238    fn forward_checked(start: Self, count: usize) -> Option<Self> {
239        u16::forward_checked(start.0, count).map(Self)
240    }
241
242    #[inline]
243    fn backward_checked(start: Self, count: usize) -> Option<Self> {
244        u16::backward_checked(start.0, count).map(Self)
245    }
246}
247
248impl From<u16> for PieceOffset {
249    #[inline]
250    fn from(original: u16) -> Self {
251        Self(original)
252    }
253}
254
255impl From<PieceOffset> for u16 {
256    #[inline]
257    fn from(original: PieceOffset) -> Self {
258        original.0
259    }
260}
261
262impl From<PieceOffset> for u32 {
263    #[inline]
264    fn from(original: PieceOffset) -> Self {
265        Self::from(original.0)
266    }
267}
268
269impl From<PieceOffset> for u64 {
270    #[inline]
271    fn from(original: PieceOffset) -> Self {
272        Self::from(original.0)
273    }
274}
275
276impl From<PieceOffset> for usize {
277    #[inline]
278    fn from(original: PieceOffset) -> Self {
279        usize::from(original.0)
280    }
281}
282
283impl PieceOffset {
284    /// Piece index 0.
285    pub const ZERO: PieceOffset = PieceOffset(0);
286    /// Piece index 1.
287    pub const ONE: PieceOffset = PieceOffset(1);
288
289    /// Convert piece offset to bytes.
290    #[inline]
291    pub const fn to_bytes(self) -> [u8; mem::size_of::<u16>()] {
292        self.0.to_le_bytes()
293    }
294}
295
296/// Raw record contained within recorded history segment before archiving is applied.
297///
298/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
299#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
300#[repr(transparent)]
301pub struct RawRecord([[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]);
302
303impl fmt::Debug for RawRecord {
304    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
305        write!(f, "{}", hex::encode(self.0.as_flattened()))
306    }
307}
308
309impl Default for RawRecord {
310    #[inline]
311    fn default() -> Self {
312        Self([Default::default(); Self::NUM_CHUNKS])
313    }
314}
315
316impl AsRef<[u8]> for RawRecord {
317    #[inline]
318    fn as_ref(&self) -> &[u8] {
319        self.0.as_slice().as_flattened()
320    }
321}
322
323impl AsMut<[u8]> for RawRecord {
324    #[inline]
325    fn as_mut(&mut self) -> &mut [u8] {
326        self.0.as_mut_slice().as_flattened_mut()
327    }
328}
329
330impl From<&RawRecord> for &[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS] {
331    #[inline]
332    fn from(value: &RawRecord) -> Self {
333        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
334        // layout
335        unsafe { mem::transmute(value) }
336    }
337}
338
339impl From<&[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &RawRecord {
340    #[inline]
341    fn from(value: &[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self {
342        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
343        // layout
344        unsafe { mem::transmute(value) }
345    }
346}
347
348impl From<&mut RawRecord> for &mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS] {
349    #[inline]
350    fn from(value: &mut RawRecord) -> Self {
351        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
352        // layout
353        unsafe { mem::transmute(value) }
354    }
355}
356
357impl From<&mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &mut RawRecord {
358    #[inline]
359    fn from(value: &mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self {
360        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
361        // layout
362        unsafe { mem::transmute(value) }
363    }
364}
365
366impl From<&RawRecord> for &[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS] {
367    #[inline]
368    fn from(value: &RawRecord) -> Self {
369        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
370        // layout as inner array, while array of byte arrays has the same alignment as a single byte
371        unsafe { mem::transmute(value) }
372    }
373}
374
375impl From<&[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &RawRecord {
376    #[inline]
377    fn from(value: &[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self {
378        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
379        // layout as inner array, while array of byte arrays has the same alignment as a single byte
380        unsafe { mem::transmute(value) }
381    }
382}
383
384impl From<&mut RawRecord> for &mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS] {
385    #[inline]
386    fn from(value: &mut RawRecord) -> Self {
387        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
388        // layout as inner array, while array of byte arrays has the same alignment as a single byte
389        unsafe { mem::transmute(value) }
390    }
391}
392
393impl From<&mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &mut RawRecord {
394    #[inline]
395    fn from(value: &mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self {
396        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
397        // layout as inner array, while array of byte arrays has the same alignment as a single byte
398        unsafe { mem::transmute(value) }
399    }
400}
401
402impl RawRecord {
403    /// Number of chunks (scalars) within one raw record.
404    pub const NUM_CHUNKS: usize = 2_usize.pow(15);
405    /// Size of raw record in bytes, is guaranteed to be a multiple of [`ScalarBytes::SAFE_BYTES`].
406    pub const SIZE: usize = ScalarBytes::SAFE_BYTES * Self::NUM_CHUNKS;
407
408    /// Create boxed value without hitting stack overflow
409    #[inline]
410    pub fn new_boxed() -> Box<Self> {
411        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
412        // SAFETY: Data structure filled with zeroes is a valid invariant
413        unsafe { Box::new_zeroed().assume_init() }
414    }
415
416    /// Convenient conversion from slice of record to underlying representation for efficiency
417    /// purposes.
418    #[inline]
419    pub fn slice_to_repr(value: &[Self]) -> &[[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]] {
420        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
421        // layout
422        unsafe { mem::transmute(value) }
423    }
424
425    /// Convenient conversion from slice of underlying representation to record for efficiency
426    /// purposes.
427    #[inline]
428    pub fn slice_from_repr(value: &[[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] {
429        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
430        // layout
431        unsafe { mem::transmute(value) }
432    }
433
434    /// Convenient conversion from mutable slice of record to underlying representation for
435    /// efficiency purposes.
436    #[inline]
437    pub fn slice_mut_to_repr(
438        value: &mut [Self],
439    ) -> &mut [[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]] {
440        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
441        // layout
442        unsafe { mem::transmute(value) }
443    }
444
445    /// Convenient conversion from mutable slice of underlying representation to record for
446    /// efficiency purposes.
447    #[inline]
448    pub fn slice_mut_from_repr(
449        value: &mut [[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]],
450    ) -> &mut [Self] {
451        // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory
452        // layout
453        unsafe { mem::transmute(value) }
454    }
455}
456
457/// Record contained within a piece.
458///
459/// NOTE: This is a stack-allocated data structure and can cause stack overflow!
460#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
461#[repr(transparent)]
462pub struct Record([[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]);
463
464impl fmt::Debug for Record {
465    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
466        write!(f, "{}", hex::encode(self.0.as_flattened()))
467    }
468}
469
470impl Default for Record {
471    #[inline]
472    fn default() -> Self {
473        Self([Default::default(); Self::NUM_CHUNKS])
474    }
475}
476
477impl AsRef<[u8]> for Record {
478    #[inline]
479    fn as_ref(&self) -> &[u8] {
480        self.0.as_flattened()
481    }
482}
483
484impl AsMut<[u8]> for Record {
485    #[inline]
486    fn as_mut(&mut self) -> &mut [u8] {
487        self.0.as_flattened_mut()
488    }
489}
490
491impl From<&Record> for &[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS] {
492    #[inline]
493    fn from(value: &Record) -> Self {
494        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
495        unsafe { mem::transmute(value) }
496    }
497}
498
499impl From<&[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]> for &Record {
500    #[inline]
501    fn from(value: &[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self {
502        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
503        unsafe { mem::transmute(value) }
504    }
505}
506
507impl From<&mut Record> for &mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS] {
508    #[inline]
509    fn from(value: &mut Record) -> Self {
510        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
511        unsafe { mem::transmute(value) }
512    }
513}
514
515impl From<&mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]> for &mut Record {
516    #[inline]
517    fn from(value: &mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self {
518        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
519        unsafe { mem::transmute(value) }
520    }
521}
522
523impl From<&Record> for &[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS] {
524    #[inline]
525    fn from(value: &Record) -> Self {
526        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
527        // as inner array, while array of byte arrays has the same alignment as a single byte
528        unsafe { mem::transmute(value) }
529    }
530}
531
532impl From<&[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]> for &Record {
533    #[inline]
534    fn from(value: &[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]) -> Self {
535        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
536        // as inner array, while array of byte arrays has the same alignment as a single byte
537        unsafe { mem::transmute(value) }
538    }
539}
540
541impl From<&mut Record> for &mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS] {
542    #[inline]
543    fn from(value: &mut Record) -> Self {
544        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
545        // as inner array, while array of byte arrays has the same alignment as a single byte
546        unsafe { mem::transmute(value) }
547    }
548}
549
550impl From<&mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]> for &mut Record {
551    #[inline]
552    fn from(value: &mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]) -> Self {
553        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
554        // as inner array, while array of byte arrays has the same alignment as a single byte
555        unsafe { mem::transmute(value) }
556    }
557}
558
559impl Record {
560    /// Number of chunks (scalars) within one record.
561    pub const NUM_CHUNKS: usize = RawRecord::NUM_CHUNKS;
562    /// Number of s-buckets contained within one record (and by extension sector).
563    ///
564    /// Essentially we chunk records into scalars and erasure code them.
565    pub const NUM_S_BUCKETS: usize = Self::NUM_CHUNKS
566        * RecordedHistorySegment::ERASURE_CODING_RATE.1
567        / RecordedHistorySegment::ERASURE_CODING_RATE.0;
568    /// Size of a segment record given the global piece size (in bytes) after erasure coding
569    /// [`RawRecord`], is guaranteed to be a multiple of [`ScalarBytes::FULL_BYTES`].
570    pub const SIZE: usize = ScalarBytes::FULL_BYTES * Self::NUM_CHUNKS;
571
572    /// Create boxed value without hitting stack overflow
573    #[inline]
574    pub fn new_boxed() -> Box<Self> {
575        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
576        // SAFETY: Data structure filled with zeroes is a valid invariant
577        unsafe { Box::new_zeroed().assume_init() }
578    }
579
580    /// Create vector filled with zeroe records without hitting stack overflow
581    #[inline]
582    pub fn new_zero_vec(length: usize) -> Vec<Self> {
583        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
584        let mut records = Vec::with_capacity(length);
585        {
586            let slice = records.spare_capacity_mut();
587            // SAFETY: Same memory layout due to `#[repr(transparent)]` on `Record` and
588            // `MaybeUninit<[[T; M]; N]>` is guaranteed to have the same layout as
589            // `[[MaybeUninit<T>; M]; N]`
590            let slice = unsafe {
591                slice::from_raw_parts_mut(
592                    slice.as_mut_ptr()
593                        as *mut [[mem::MaybeUninit<u8>; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS],
594                    length,
595                )
596            };
597            for byte in slice.as_flattened_mut().as_flattened_mut() {
598                byte.write(0);
599            }
600        }
601        // SAFETY: All values are initialized above.
602        unsafe {
603            records.set_len(records.capacity());
604        }
605
606        records
607    }
608
609    /// Convenient conversion from slice of record to underlying representation for efficiency
610    /// purposes.
611    #[inline]
612    pub fn slice_to_repr(value: &[Self]) -> &[[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]] {
613        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
614        unsafe { mem::transmute(value) }
615    }
616
617    /// Convenient conversion from slice of underlying representation to record for efficiency
618    /// purposes.
619    #[inline]
620    pub fn slice_from_repr(value: &[[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] {
621        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
622        unsafe { mem::transmute(value) }
623    }
624
625    /// Convenient conversion from mutable slice of record to underlying representation for
626    /// efficiency purposes.
627    #[inline]
628    pub fn slice_mut_to_repr(
629        value: &mut [Self],
630    ) -> &mut [[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]] {
631        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
632        unsafe { mem::transmute(value) }
633    }
634
635    /// Convenient conversion from mutable slice of underlying representation to record for
636    /// efficiency purposes.
637    #[inline]
638    pub fn slice_mut_from_repr(
639        value: &mut [[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]],
640    ) -> &mut [Self] {
641        // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout
642        unsafe { mem::transmute(value) }
643    }
644
645    /// Convert from a record to its raw bytes, assumes dealing with source record that only stores
646    /// safe bytes in its chunks.
647    #[inline]
648    pub fn to_raw_record_chunks(
649        &self,
650    ) -> impl DoubleEndedIterator<Item = &'_ [u8; ScalarBytes::SAFE_BYTES]> + '_ {
651        // We have zero byte padding from [`ScalarBytes::SAFE_BYTES`] to
652        // [`ScalarBytes::FULL_BYTES`] that we need to skip
653        self.iter()
654            .map(|bytes| bytes[1..].try_into().expect("Correct length; qed"))
655    }
656
657    /// Convert from a record to mutable raw bytes, assumes dealing with source record that only stores
658    /// safe bytes in its chunks.
659    #[inline]
660    pub fn to_mut_raw_record_chunks(
661        &mut self,
662    ) -> impl DoubleEndedIterator<Item = &'_ mut [u8; ScalarBytes::SAFE_BYTES]> + '_ {
663        self.iter_mut()
664            .map(|bytes| (&mut bytes[1..]).try_into().expect("Correct length; qed"))
665    }
666}
667
668/// Record commitment contained within a piece.
669#[derive(
670    Copy,
671    Clone,
672    Eq,
673    PartialEq,
674    Hash,
675    Deref,
676    DerefMut,
677    From,
678    Into,
679    Encode,
680    Decode,
681    TypeInfo,
682    MaxEncodedLen,
683    DecodeWithMemTracking,
684)]
685pub struct RecordCommitment([u8; RecordCommitment::SIZE]);
686
687impl fmt::Debug for RecordCommitment {
688    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
689        write!(f, "{}", hex::encode(self.0))
690    }
691}
692
693#[cfg(feature = "serde")]
694#[derive(Serialize, Deserialize)]
695#[serde(transparent)]
696struct RecordCommitmentBinary(#[serde(with = "BigArray")] [u8; RecordCommitment::SIZE]);
697
698#[cfg(feature = "serde")]
699#[derive(Serialize, Deserialize)]
700#[serde(transparent)]
701struct RecordCommitmentHex(#[serde(with = "hex")] [u8; RecordCommitment::SIZE]);
702
703#[cfg(feature = "serde")]
704impl Serialize for RecordCommitment {
705    #[inline]
706    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
707    where
708        S: Serializer,
709    {
710        if serializer.is_human_readable() {
711            RecordCommitmentHex(self.0).serialize(serializer)
712        } else {
713            RecordCommitmentBinary(self.0).serialize(serializer)
714        }
715    }
716}
717
718#[cfg(feature = "serde")]
719impl<'de> Deserialize<'de> for RecordCommitment {
720    #[inline]
721    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
722    where
723        D: Deserializer<'de>,
724    {
725        Ok(Self(if deserializer.is_human_readable() {
726            RecordCommitmentHex::deserialize(deserializer)?.0
727        } else {
728            RecordCommitmentBinary::deserialize(deserializer)?.0
729        }))
730    }
731}
732
733impl Default for RecordCommitment {
734    #[inline]
735    fn default() -> Self {
736        Self([0; Self::SIZE])
737    }
738}
739
740impl TryFrom<&[u8]> for RecordCommitment {
741    type Error = TryFromSliceError;
742
743    #[inline]
744    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
745        <[u8; Self::SIZE]>::try_from(slice).map(Self)
746    }
747}
748
749impl AsRef<[u8]> for RecordCommitment {
750    #[inline]
751    fn as_ref(&self) -> &[u8] {
752        &self.0
753    }
754}
755
756impl AsMut<[u8]> for RecordCommitment {
757    #[inline]
758    fn as_mut(&mut self) -> &mut [u8] {
759        &mut self.0
760    }
761}
762
763impl From<&RecordCommitment> for &[u8; RecordCommitment::SIZE] {
764    #[inline]
765    fn from(value: &RecordCommitment) -> Self {
766        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
767        // memory layout
768        unsafe { mem::transmute(value) }
769    }
770}
771
772impl From<&[u8; RecordCommitment::SIZE]> for &RecordCommitment {
773    #[inline]
774    fn from(value: &[u8; RecordCommitment::SIZE]) -> Self {
775        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
776        // memory layout
777        unsafe { mem::transmute(value) }
778    }
779}
780
781impl From<&mut RecordCommitment> for &mut [u8; RecordCommitment::SIZE] {
782    #[inline]
783    fn from(value: &mut RecordCommitment) -> Self {
784        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
785        // memory layout
786        unsafe { mem::transmute(value) }
787    }
788}
789
790impl From<&mut [u8; RecordCommitment::SIZE]> for &mut RecordCommitment {
791    #[inline]
792    fn from(value: &mut [u8; RecordCommitment::SIZE]) -> Self {
793        // SAFETY: `RecordCommitment` is `#[repr(transparent)]` and guaranteed to have the same
794        // memory layout
795        unsafe { mem::transmute(value) }
796    }
797}
798
799impl RecordCommitment {
800    /// Size of record commitment in bytes.
801    pub const SIZE: usize = 48;
802}
803
804/// Record witness contained within a piece.
805#[derive(
806    Copy,
807    Clone,
808    Eq,
809    PartialEq,
810    Hash,
811    Deref,
812    DerefMut,
813    From,
814    Into,
815    Encode,
816    Decode,
817    TypeInfo,
818    MaxEncodedLen,
819    DecodeWithMemTracking,
820)]
821pub struct RecordWitness([u8; RecordWitness::SIZE]);
822
823impl fmt::Debug for RecordWitness {
824    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
825        write!(f, "{}", hex::encode(self.0))
826    }
827}
828
829#[cfg(feature = "serde")]
830#[derive(Serialize, Deserialize)]
831#[serde(transparent)]
832struct RecordWitnessBinary(#[serde(with = "BigArray")] [u8; RecordWitness::SIZE]);
833
834#[cfg(feature = "serde")]
835#[derive(Serialize, Deserialize)]
836#[serde(transparent)]
837struct RecordWitnessHex(#[serde(with = "hex")] [u8; RecordWitness::SIZE]);
838
839#[cfg(feature = "serde")]
840impl Serialize for RecordWitness {
841    #[inline]
842    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
843    where
844        S: Serializer,
845    {
846        if serializer.is_human_readable() {
847            RecordWitnessHex(self.0).serialize(serializer)
848        } else {
849            RecordWitnessBinary(self.0).serialize(serializer)
850        }
851    }
852}
853
854#[cfg(feature = "serde")]
855impl<'de> Deserialize<'de> for RecordWitness {
856    #[inline]
857    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
858    where
859        D: Deserializer<'de>,
860    {
861        Ok(Self(if deserializer.is_human_readable() {
862            RecordWitnessHex::deserialize(deserializer)?.0
863        } else {
864            RecordWitnessBinary::deserialize(deserializer)?.0
865        }))
866    }
867}
868
869impl Default for RecordWitness {
870    #[inline]
871    fn default() -> Self {
872        Self([0; Self::SIZE])
873    }
874}
875
876impl TryFrom<&[u8]> for RecordWitness {
877    type Error = TryFromSliceError;
878
879    #[inline]
880    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
881        <[u8; Self::SIZE]>::try_from(slice).map(Self)
882    }
883}
884
885impl AsRef<[u8]> for RecordWitness {
886    #[inline]
887    fn as_ref(&self) -> &[u8] {
888        &self.0
889    }
890}
891
892impl AsMut<[u8]> for RecordWitness {
893    #[inline]
894    fn as_mut(&mut self) -> &mut [u8] {
895        &mut self.0
896    }
897}
898
899impl From<&RecordWitness> for &[u8; RecordWitness::SIZE] {
900    #[inline]
901    fn from(value: &RecordWitness) -> Self {
902        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
903        // memory layout
904        unsafe { mem::transmute(value) }
905    }
906}
907
908impl From<&[u8; RecordWitness::SIZE]> for &RecordWitness {
909    #[inline]
910    fn from(value: &[u8; RecordWitness::SIZE]) -> Self {
911        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
912        // memory layout
913        unsafe { mem::transmute(value) }
914    }
915}
916
917impl From<&mut RecordWitness> for &mut [u8; RecordWitness::SIZE] {
918    #[inline]
919    fn from(value: &mut RecordWitness) -> Self {
920        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
921        // memory layout
922        unsafe { mem::transmute(value) }
923    }
924}
925
926impl From<&mut [u8; RecordWitness::SIZE]> for &mut RecordWitness {
927    #[inline]
928    fn from(value: &mut [u8; RecordWitness::SIZE]) -> Self {
929        // SAFETY: `RecordWitness` is `#[repr(transparent)]` and guaranteed to have the same
930        // memory layout
931        unsafe { mem::transmute(value) }
932    }
933}
934
935impl RecordWitness {
936    /// Size of record witness in bytes.
937    pub const SIZE: usize = 48;
938}
939
940enum CowBytes {
941    Shared(Bytes),
942    Owned(BytesMut),
943}
944
945impl fmt::Debug for CowBytes {
946    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
947        write!(f, "{}", hex::encode(self.as_ref()))
948    }
949}
950
951impl PartialEq for CowBytes {
952    fn eq(&self, other: &Self) -> bool {
953        self.as_ref().eq(other.as_ref())
954    }
955}
956
957impl Eq for CowBytes {}
958
959impl Hash for CowBytes {
960    fn hash<H: Hasher>(&self, state: &mut H) {
961        self.as_ref().hash(state)
962    }
963}
964
965impl Clone for CowBytes {
966    fn clone(&self) -> Self {
967        match self {
968            Self::Shared(bytes) => Self::Shared(bytes.clone()),
969            // Always return shared clone
970            Self::Owned(bytes) => Self::Shared(Bytes::copy_from_slice(bytes)),
971        }
972    }
973}
974
975impl AsRef<[u8]> for CowBytes {
976    fn as_ref(&self) -> &[u8] {
977        match self {
978            CowBytes::Shared(bytes) => bytes.as_ref(),
979            CowBytes::Owned(bytes) => bytes.as_ref(),
980        }
981    }
982}
983
984impl AsMut<[u8]> for CowBytes {
985    #[inline]
986    fn as_mut(&mut self) -> &mut [u8] {
987        match self {
988            CowBytes::Shared(bytes) => {
989                *self = CowBytes::Owned(BytesMut::from(mem::take(bytes)));
990
991                let CowBytes::Owned(bytes) = self else {
992                    unreachable!("Just replaced; qed");
993                };
994
995                bytes.as_mut()
996            }
997            CowBytes::Owned(bytes) => bytes.as_mut(),
998        }
999    }
1000}
1001
1002/// A piece of archival history in Subspace Network.
1003///
1004/// This version is allocated on the heap, for stack-allocated piece see [`PieceArray`].
1005///
1006/// Internally piece contains a record and corresponding witness that together with segment
1007/// commitment of the segment this piece belongs to can be used to verify that a piece belongs to
1008/// the actual archival history of the blockchain.
1009#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1010pub struct Piece(CowBytes);
1011
1012impl Encode for Piece {
1013    #[inline]
1014    fn size_hint(&self) -> usize {
1015        self.as_ref().size_hint()
1016    }
1017
1018    #[inline]
1019    fn encode_to<O: Output + ?Sized>(&self, output: &mut O) {
1020        self.as_ref().encode_to(output)
1021    }
1022
1023    #[inline]
1024    fn encode(&self) -> Vec<u8> {
1025        self.as_ref().encode()
1026    }
1027
1028    #[inline]
1029    fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
1030        self.as_ref().using_encoded(f)
1031    }
1032}
1033
1034impl EncodeLike for Piece {}
1035
1036impl Decode for Piece {
1037    fn decode<I: Input>(input: &mut I) -> Result<Self, parity_scale_codec::Error> {
1038        let bytes =
1039            Bytes::decode(input).map_err(|error| error.chain("Could not decode `Piece`"))?;
1040
1041        if bytes.len() != Self::SIZE {
1042            return Err(
1043                parity_scale_codec::Error::from("Incorrect Piece length").chain(format!(
1044                    "Expected {} bytes, found {} bytes",
1045                    Self::SIZE,
1046                    bytes.len()
1047                )),
1048            );
1049        }
1050
1051        Ok(Piece(CowBytes::Shared(bytes)))
1052    }
1053}
1054
1055impl TypeInfo for Piece {
1056    type Identity = Self;
1057
1058    fn type_info() -> Type {
1059        Type::builder()
1060            .path(Path::new("Piece", module_path!()))
1061            .docs(&["A piece of archival history in Subspace Network"])
1062            .composite(
1063                Fields::unnamed().field(|f| f.ty::<[u8; Piece::SIZE]>().type_name("PieceArray")),
1064            )
1065    }
1066}
1067
1068#[cfg(feature = "serde")]
1069impl Serialize for Piece {
1070    #[inline]
1071    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1072    where
1073        S: Serializer,
1074    {
1075        let bytes = match &self.0 {
1076            CowBytes::Shared(bytes) => bytes.as_ref(),
1077            CowBytes::Owned(bytes) => bytes.as_ref(),
1078        };
1079
1080        if serializer.is_human_readable() {
1081            hex::serde::serialize(bytes, serializer)
1082        } else {
1083            bytes.serialize(serializer)
1084        }
1085    }
1086}
1087
1088#[cfg(feature = "serde")]
1089impl<'de> Deserialize<'de> for Piece {
1090    #[inline]
1091    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1092    where
1093        D: Deserializer<'de>,
1094    {
1095        let bytes = if deserializer.is_human_readable() {
1096            hex::serde::deserialize::<_, Vec<u8>>(deserializer).and_then(|bytes| {
1097                if bytes.len() == Piece::SIZE {
1098                    Ok(Bytes::from(bytes))
1099                } else {
1100                    Err(serde::de::Error::invalid_length(
1101                        bytes.len(),
1102                        &format!("Expected {} bytes", Piece::SIZE).as_str(),
1103                    ))
1104                }
1105            })?
1106        } else {
1107            Bytes::deserialize(deserializer)?
1108        };
1109
1110        Ok(Piece(CowBytes::Shared(bytes)))
1111    }
1112}
1113
1114impl Default for Piece {
1115    #[inline]
1116    fn default() -> Self {
1117        Self(CowBytes::Owned(BytesMut::zeroed(Self::SIZE)))
1118    }
1119}
1120
1121impl From<Piece> for Vec<u8> {
1122    #[inline]
1123    fn from(piece: Piece) -> Self {
1124        match piece.0 {
1125            CowBytes::Shared(bytes) => bytes.to_vec(),
1126            CowBytes::Owned(bytes) => Vec::from(bytes),
1127        }
1128    }
1129}
1130
1131impl TryFrom<&[u8]> for Piece {
1132    type Error = ();
1133
1134    #[inline]
1135    fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
1136        if slice.len() != Self::SIZE {
1137            return Err(());
1138        }
1139
1140        Ok(Self(CowBytes::Shared(Bytes::copy_from_slice(slice))))
1141    }
1142}
1143
1144impl TryFrom<Vec<u8>> for Piece {
1145    type Error = ();
1146
1147    #[inline]
1148    fn try_from(vec: Vec<u8>) -> Result<Self, Self::Error> {
1149        if vec.len() != Self::SIZE {
1150            return Err(());
1151        }
1152
1153        Ok(Self(CowBytes::Shared(Bytes::from(vec))))
1154    }
1155}
1156
1157impl TryFrom<Bytes> for Piece {
1158    type Error = ();
1159
1160    #[inline]
1161    fn try_from(bytes: Bytes) -> Result<Self, Self::Error> {
1162        if bytes.len() != Self::SIZE {
1163            return Err(());
1164        }
1165
1166        Ok(Self(CowBytes::Shared(bytes)))
1167    }
1168}
1169
1170impl TryFrom<BytesMut> for Piece {
1171    type Error = ();
1172
1173    #[inline]
1174    fn try_from(bytes: BytesMut) -> Result<Self, Self::Error> {
1175        if bytes.len() != Self::SIZE {
1176            return Err(());
1177        }
1178
1179        Ok(Self(CowBytes::Owned(bytes)))
1180    }
1181}
1182
1183impl From<&PieceArray> for Piece {
1184    #[inline]
1185    fn from(value: &PieceArray) -> Self {
1186        Self(CowBytes::Shared(Bytes::copy_from_slice(value.as_ref())))
1187    }
1188}
1189
1190impl Deref for Piece {
1191    type Target = PieceArray;
1192
1193    #[inline]
1194    fn deref(&self) -> &Self::Target {
1195        <&[u8; Self::SIZE]>::try_from(self.as_ref())
1196            .expect("Slice of memory has correct length; qed")
1197            .into()
1198    }
1199}
1200
1201impl DerefMut for Piece {
1202    #[inline]
1203    fn deref_mut(&mut self) -> &mut Self::Target {
1204        <&mut [u8; Self::SIZE]>::try_from(self.as_mut())
1205            .expect("Slice of memory has correct length; qed")
1206            .into()
1207    }
1208}
1209
1210impl AsRef<[u8]> for Piece {
1211    #[inline]
1212    fn as_ref(&self) -> &[u8] {
1213        self.0.as_ref()
1214    }
1215}
1216
1217impl AsMut<[u8]> for Piece {
1218    #[inline]
1219    fn as_mut(&mut self) -> &mut [u8] {
1220        self.0.as_mut()
1221    }
1222}
1223
1224impl Piece {
1225    /// Size of a piece (in bytes).
1226    pub const SIZE: usize = Record::SIZE + RecordCommitment::SIZE + RecordWitness::SIZE;
1227
1228    /// Ensure piece contains cheaply cloneable shared data.
1229    ///
1230    /// Internally piece uses CoW mechanism and can store either mutable owned data or data that is
1231    /// cheap to clone, calling this method will ensure further clones will not result in additional
1232    /// memory allocations.
1233    pub fn to_shared(self) -> Self {
1234        Self(match self.0 {
1235            CowBytes::Shared(bytes) => CowBytes::Shared(bytes),
1236            CowBytes::Owned(bytes) => CowBytes::Shared(bytes.freeze()),
1237        })
1238    }
1239}
1240
1241/// A piece of archival history in Subspace Network.
1242///
1243/// This version is allocated on the stack, for heap-allocated piece see [`Piece`].
1244///
1245/// Internally piece contains a record and corresponding witness that together with segment
1246/// commitment of the segment this piece belongs to can be used to verify that a piece belongs to
1247/// the actual archival history of the blockchain.
1248#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Deref, DerefMut, AsRef, AsMut)]
1249#[repr(transparent)]
1250pub struct PieceArray([u8; Piece::SIZE]);
1251
1252impl fmt::Debug for PieceArray {
1253    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1254        write!(f, "{}", hex::encode(self.0))
1255    }
1256}
1257
1258impl Default for PieceArray {
1259    #[inline]
1260    fn default() -> Self {
1261        Self([0u8; Piece::SIZE])
1262    }
1263}
1264
1265impl AsRef<[u8]> for PieceArray {
1266    #[inline]
1267    fn as_ref(&self) -> &[u8] {
1268        &self.0
1269    }
1270}
1271
1272impl AsMut<[u8]> for PieceArray {
1273    #[inline]
1274    fn as_mut(&mut self) -> &mut [u8] {
1275        &mut self.0
1276    }
1277}
1278
1279impl From<&PieceArray> for &[u8; Piece::SIZE] {
1280    #[inline]
1281    fn from(value: &PieceArray) -> Self {
1282        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1283        // layout
1284        unsafe { mem::transmute(value) }
1285    }
1286}
1287
1288impl From<&[u8; Piece::SIZE]> for &PieceArray {
1289    #[inline]
1290    fn from(value: &[u8; Piece::SIZE]) -> Self {
1291        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1292        // layout
1293        unsafe { mem::transmute(value) }
1294    }
1295}
1296
1297impl From<&mut PieceArray> for &mut [u8; Piece::SIZE] {
1298    #[inline]
1299    fn from(value: &mut PieceArray) -> Self {
1300        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1301        // layout
1302        unsafe { mem::transmute(value) }
1303    }
1304}
1305
1306impl From<&mut [u8; Piece::SIZE]> for &mut PieceArray {
1307    #[inline]
1308    fn from(value: &mut [u8; Piece::SIZE]) -> Self {
1309        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1310        // layout
1311        unsafe { mem::transmute(value) }
1312    }
1313}
1314
1315impl PieceArray {
1316    /// Create boxed value without hitting stack overflow
1317    #[inline]
1318    pub fn new_boxed() -> Box<Self> {
1319        // TODO: Should have been just `::new()`, but https://github.com/rust-lang/rust/issues/53827
1320        // SAFETY: Data structure filled with zeroes is a valid invariant
1321        unsafe { Box::<Self>::new_zeroed().assume_init() }
1322    }
1323
1324    /// Split piece into underlying components.
1325    #[inline]
1326    pub fn split(&self) -> (&Record, &RecordCommitment, &RecordWitness) {
1327        let (record, extra) = self.0.split_at(Record::SIZE);
1328        let (commitment, witness) = extra.split_at(RecordCommitment::SIZE);
1329
1330        let record = <&[u8; Record::SIZE]>::try_from(record)
1331            .expect("Slice of memory has correct length; qed");
1332        let commitment = <&[u8; RecordCommitment::SIZE]>::try_from(commitment)
1333            .expect("Slice of memory has correct length; qed");
1334        let witness = <&[u8; RecordWitness::SIZE]>::try_from(witness)
1335            .expect("Slice of memory has correct length; qed");
1336
1337        (record.into(), commitment.into(), witness.into())
1338    }
1339
1340    /// Split piece into underlying mutable components.
1341    #[inline]
1342    pub fn split_mut(&mut self) -> (&mut Record, &mut RecordCommitment, &mut RecordWitness) {
1343        let (record, extra) = self.0.split_at_mut(Record::SIZE);
1344        let (commitment, witness) = extra.split_at_mut(RecordCommitment::SIZE);
1345
1346        let record = <&mut [u8; Record::SIZE]>::try_from(record)
1347            .expect("Slice of memory has correct length; qed");
1348        let commitment = <&mut [u8; RecordCommitment::SIZE]>::try_from(commitment)
1349            .expect("Slice of memory has correct length; qed");
1350        let witness = <&mut [u8; RecordWitness::SIZE]>::try_from(witness)
1351            .expect("Slice of memory has correct length; qed");
1352
1353        (record.into(), commitment.into(), witness.into())
1354    }
1355
1356    /// Record contained within a piece.
1357    #[inline]
1358    pub fn record(&self) -> &Record {
1359        self.split().0
1360    }
1361
1362    /// Mutable record contained within a piece.
1363    #[inline]
1364    pub fn record_mut(&mut self) -> &mut Record {
1365        self.split_mut().0
1366    }
1367
1368    /// Commitment contained within a piece.
1369    #[inline]
1370    pub fn commitment(&self) -> &RecordCommitment {
1371        self.split().1
1372    }
1373
1374    /// Mutable commitment contained within a piece.
1375    #[inline]
1376    pub fn commitment_mut(&mut self) -> &mut RecordCommitment {
1377        self.split_mut().1
1378    }
1379
1380    /// Witness contained within a piece.
1381    #[inline]
1382    pub fn witness(&self) -> &RecordWitness {
1383        self.split().2
1384    }
1385
1386    /// Mutable witness contained within a piece.
1387    #[inline]
1388    pub fn witness_mut(&mut self) -> &mut RecordWitness {
1389        self.split_mut().2
1390    }
1391
1392    /// Convenient conversion from slice of piece array to underlying representation for efficiency
1393    /// purposes.
1394    #[inline]
1395    pub fn slice_to_repr(value: &[Self]) -> &[[u8; Piece::SIZE]] {
1396        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1397        // layout
1398        unsafe { mem::transmute(value) }
1399    }
1400
1401    /// Convenient conversion from slice of underlying representation to piece array for efficiency
1402    /// purposes.
1403    #[inline]
1404    pub fn slice_from_repr(value: &[[u8; Piece::SIZE]]) -> &[Self] {
1405        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1406        // layout
1407        unsafe { mem::transmute(value) }
1408    }
1409
1410    /// Convenient conversion from mutable slice of piece array to underlying representation for
1411    /// efficiency purposes.
1412    #[inline]
1413    pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [[u8; Piece::SIZE]] {
1414        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1415        // layout
1416        unsafe { mem::transmute(value) }
1417    }
1418
1419    /// Convenient conversion from mutable slice of underlying representation to piece array for
1420    /// efficiency purposes.
1421    #[inline]
1422    pub fn slice_mut_from_repr(value: &mut [[u8; Piece::SIZE]]) -> &mut [Self] {
1423        // SAFETY: `PieceArray` is `#[repr(transparent)]` and guaranteed to have the same memory
1424        // layout
1425        unsafe { mem::transmute(value) }
1426    }
1427}
1428
1429impl From<Box<PieceArray>> for Vec<u8> {
1430    fn from(value: Box<PieceArray>) -> Self {
1431        let mut value = mem::ManuallyDrop::new(value);
1432        // SAFETY: Always contains fixed allocation of bytes
1433        unsafe { Vec::from_raw_parts(value.as_mut_ptr(), Piece::SIZE, Piece::SIZE) }
1434    }
1435}
1436
1437/// Flat representation of multiple pieces concatenated for more efficient for processing
1438#[derive(Clone, PartialEq, Eq)]
1439pub struct FlatPieces(CowBytes);
1440
1441impl fmt::Debug for FlatPieces {
1442    #[inline]
1443    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1444        f.debug_struct("FlatPieces").finish_non_exhaustive()
1445    }
1446}
1447
1448impl Deref for FlatPieces {
1449    type Target = [PieceArray];
1450
1451    #[inline]
1452    fn deref(&self) -> &Self::Target {
1453        let bytes = self.0.as_ref();
1454        // SAFETY: Bytes slice has length of multiples of piece size and lifetimes of returned data
1455        // are preserved
1456        let pieces = unsafe {
1457            slice::from_raw_parts(
1458                bytes.as_ptr() as *const [u8; Piece::SIZE],
1459                bytes.len() / Piece::SIZE,
1460            )
1461        };
1462        PieceArray::slice_from_repr(pieces)
1463    }
1464}
1465
1466impl DerefMut for FlatPieces {
1467    #[inline]
1468    fn deref_mut(&mut self) -> &mut Self::Target {
1469        let bytes = self.0.as_mut();
1470        // SAFETY: Bytes slice has length of multiples of piece size and lifetimes of returned data
1471        // are preserved
1472        let pieces = unsafe {
1473            slice::from_raw_parts_mut(
1474                bytes.as_mut_ptr() as *mut [u8; Piece::SIZE],
1475                bytes.len() / Piece::SIZE,
1476            )
1477        };
1478        PieceArray::slice_mut_from_repr(pieces)
1479    }
1480}
1481
1482impl FlatPieces {
1483    /// Allocate `FlatPieces` that will hold `piece_count` pieces filled with zeroes
1484    #[inline]
1485    pub fn new(piece_count: usize) -> Self {
1486        Self(CowBytes::Owned(BytesMut::zeroed(piece_count * Piece::SIZE)))
1487    }
1488
1489    /// Iterate over all pieces.
1490    ///
1491    /// NOTE: Unless [`Self::to_shared`] was called first, iterator may have to allocate each piece
1492    /// from scratch, which is rarely a desired behavior.
1493    #[inline]
1494    pub fn pieces(&self) -> Box<dyn ExactSizeIterator<Item = Piece> + '_> {
1495        match &self.0 {
1496            CowBytes::Shared(bytes) => Box::new(
1497                bytes
1498                    .chunks_exact(Piece::SIZE)
1499                    .map(|slice| Piece(CowBytes::Shared(bytes.slice_ref(slice)))),
1500            ),
1501            CowBytes::Owned(bytes) => Box::new(
1502                bytes
1503                    .chunks_exact(Piece::SIZE)
1504                    .map(|slice| Piece(CowBytes::Shared(Bytes::copy_from_slice(slice)))),
1505            ),
1506        }
1507    }
1508
1509    /// Iterator over source pieces (even indices)
1510    #[inline]
1511    pub fn source_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
1512        self.pieces().step_by(2)
1513    }
1514
1515    /// Iterator over source pieces (even indices)
1516    #[inline]
1517    pub fn source(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
1518        self.iter().step_by(2)
1519    }
1520
1521    /// Mutable iterator over source pieces (even indices)
1522    #[inline]
1523    pub fn source_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
1524        self.iter_mut().step_by(2)
1525    }
1526
1527    /// Iterator over parity pieces (odd indices)
1528    #[inline]
1529    pub fn parity_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
1530        self.pieces().skip(1).step_by(2)
1531    }
1532
1533    /// Iterator over parity pieces (odd indices)
1534    #[inline]
1535    pub fn parity(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
1536        self.iter().skip(1).step_by(2)
1537    }
1538
1539    /// Mutable iterator over parity pieces (odd indices)
1540    #[inline]
1541    pub fn parity_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
1542        self.iter_mut().skip(1).step_by(2)
1543    }
1544
1545    /// Ensure flat pieces contains cheaply cloneable shared data.
1546    ///
1547    /// Internally flat pieces uses CoW mechanism and can store either mutable owned data or data
1548    /// that is cheap to clone, calling this method will ensure further clones and returned pieces
1549    /// will not result in additional memory allocations.
1550    pub fn to_shared(self) -> Self {
1551        Self(match self.0 {
1552            CowBytes::Shared(bytes) => CowBytes::Shared(bytes),
1553            CowBytes::Owned(bytes) => CowBytes::Shared(bytes.freeze()),
1554        })
1555    }
1556}
1557
1558#[cfg(feature = "parallel")]
1559impl FlatPieces {
1560    /// Parallel iterator over source pieces (even indices)
1561    #[inline]
1562    pub fn par_source(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
1563        self.par_iter().step_by(2)
1564    }
1565
1566    /// Mutable parallel iterator over source pieces (even indices)
1567    #[inline]
1568    pub fn par_source_mut(
1569        &mut self,
1570    ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
1571        self.par_iter_mut().step_by(2)
1572    }
1573
1574    /// Parallel iterator over parity pieces (odd indices)
1575    #[inline]
1576    pub fn par_parity(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
1577        self.par_iter().skip(1).step_by(2)
1578    }
1579
1580    /// Mutable parallel iterator over parity pieces (odd indices)
1581    #[inline]
1582    pub fn par_parity_mut(
1583        &mut self,
1584    ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
1585        self.par_iter_mut().skip(1).step_by(2)
1586    }
1587}