1#[cfg(not(feature = "std"))]
4extern crate alloc;
5
6use crate::BlockNumber;
7use crate::hashes::{Blake3Hash, blake3_hash};
8use crate::pieces::{FlatPieces, Piece, PieceIndex, RawRecord};
9#[cfg(not(feature = "std"))]
10use alloc::boxed::Box;
11use core::array::TryFromSliceError;
12use core::fmt;
13use core::iter::Step;
14use core::num::NonZeroU64;
15use derive_more::{
16 Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
17 SubAssign,
18};
19use parity_scale_codec::{Decode, DecodeWithMemTracking, Encode, MaxEncodedLen};
20use scale_info::TypeInfo;
21#[cfg(feature = "serde")]
22use serde::{Deserialize, Serialize};
23#[cfg(feature = "serde")]
24use serde::{Deserializer, Serializer};
25#[cfg(feature = "serde")]
26use serde_big_array::BigArray;
27
28#[derive(
30 Debug,
31 Display,
32 Default,
33 Copy,
34 Clone,
35 Ord,
36 PartialOrd,
37 Eq,
38 PartialEq,
39 Hash,
40 From,
41 Into,
42 Encode,
43 Decode,
44 Add,
45 AddAssign,
46 Sub,
47 SubAssign,
48 Mul,
49 MulAssign,
50 Div,
51 DivAssign,
52 TypeInfo,
53 MaxEncodedLen,
54 DecodeWithMemTracking,
55)]
56#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
57#[repr(transparent)]
58pub struct SegmentIndex(u64);
59
60impl Step for SegmentIndex {
61 #[inline]
62 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
63 u64::steps_between(&start.0, &end.0)
64 }
65
66 #[inline]
67 fn forward_checked(start: Self, count: usize) -> Option<Self> {
68 u64::forward_checked(start.0, count).map(Self)
69 }
70
71 #[inline]
72 fn backward_checked(start: Self, count: usize) -> Option<Self> {
73 u64::backward_checked(start.0, count).map(Self)
74 }
75}
76
77impl SegmentIndex {
78 pub const ZERO: SegmentIndex = SegmentIndex(0);
80 pub const ONE: SegmentIndex = SegmentIndex(1);
82
83 #[inline]
85 pub const fn new(n: u64) -> Self {
86 Self(n)
87 }
88
89 #[inline]
91 pub const fn first_piece_index(&self) -> PieceIndex {
92 PieceIndex::new(self.0 * ArchivedHistorySegment::NUM_PIECES as u64)
93 }
94
95 #[inline]
97 pub const fn last_piece_index(&self) -> PieceIndex {
98 PieceIndex::new((self.0 + 1) * ArchivedHistorySegment::NUM_PIECES as u64 - 1)
99 }
100
101 pub fn segment_piece_indexes(&self) -> [PieceIndex; ArchivedHistorySegment::NUM_PIECES] {
103 let mut piece_indices = [PieceIndex::ZERO; ArchivedHistorySegment::NUM_PIECES];
104 (self.first_piece_index()..=self.last_piece_index())
105 .zip(&mut piece_indices)
106 .for_each(|(input, output)| {
107 *output = input;
108 });
109
110 piece_indices
111 }
112
113 pub fn segment_piece_indexes_source_first(
115 &self,
116 ) -> [PieceIndex; ArchivedHistorySegment::NUM_PIECES] {
117 let mut source_first_piece_indices = [PieceIndex::ZERO; ArchivedHistorySegment::NUM_PIECES];
118
119 let piece_indices = self.segment_piece_indexes();
120 piece_indices
121 .into_iter()
122 .step_by(2)
123 .chain(piece_indices.into_iter().skip(1).step_by(2))
124 .zip(&mut source_first_piece_indices)
125 .for_each(|(input, output)| {
126 *output = input;
127 });
128
129 source_first_piece_indices
130 }
131
132 #[inline]
134 pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
135 match self.0.checked_sub(rhs.0) {
137 Some(segment_index) => Some(Self(segment_index)),
138 None => None,
139 }
140 }
141
142 #[inline]
145 pub const fn saturating_sub(self, rhs: Self) -> Self {
146 Self(self.0.saturating_sub(rhs.0))
147 }
148}
149
150#[derive(
152 Copy,
153 Clone,
154 Eq,
155 PartialEq,
156 Hash,
157 Deref,
158 DerefMut,
159 From,
160 Into,
161 Encode,
162 Decode,
163 TypeInfo,
164 MaxEncodedLen,
165 DecodeWithMemTracking,
166)]
167#[repr(transparent)]
168pub struct SegmentCommitment([u8; SegmentCommitment::SIZE]);
169
170impl fmt::Debug for SegmentCommitment {
171 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
172 write!(f, "{}", hex::encode(self.0))
173 }
174}
175
176#[cfg(feature = "serde")]
177#[derive(Serialize, Deserialize)]
178#[serde(transparent)]
179struct SegmentCommitmentBinary(#[serde(with = "BigArray")] [u8; SegmentCommitment::SIZE]);
180
181#[cfg(feature = "serde")]
182#[derive(Serialize, Deserialize)]
183#[serde(transparent)]
184struct SegmentCommitmentHex(#[serde(with = "hex")] [u8; SegmentCommitment::SIZE]);
185
186#[cfg(feature = "serde")]
187impl Serialize for SegmentCommitment {
188 #[inline]
189 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
190 where
191 S: Serializer,
192 {
193 if serializer.is_human_readable() {
194 SegmentCommitmentHex(self.0).serialize(serializer)
195 } else {
196 SegmentCommitmentBinary(self.0).serialize(serializer)
197 }
198 }
199}
200
201#[cfg(feature = "serde")]
202impl<'de> Deserialize<'de> for SegmentCommitment {
203 #[inline]
204 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
205 where
206 D: Deserializer<'de>,
207 {
208 Ok(Self(if deserializer.is_human_readable() {
209 SegmentCommitmentHex::deserialize(deserializer)?.0
210 } else {
211 SegmentCommitmentBinary::deserialize(deserializer)?.0
212 }))
213 }
214}
215
216impl Default for SegmentCommitment {
217 #[inline]
218 fn default() -> Self {
219 Self([0; Self::SIZE])
220 }
221}
222
223impl TryFrom<&[u8]> for SegmentCommitment {
224 type Error = TryFromSliceError;
225
226 #[inline]
227 fn try_from(slice: &[u8]) -> Result<Self, Self::Error> {
228 <[u8; Self::SIZE]>::try_from(slice).map(Self)
229 }
230}
231
232impl AsRef<[u8]> for SegmentCommitment {
233 #[inline]
234 fn as_ref(&self) -> &[u8] {
235 &self.0
236 }
237}
238
239impl AsMut<[u8]> for SegmentCommitment {
240 #[inline]
241 fn as_mut(&mut self) -> &mut [u8] {
242 &mut self.0
243 }
244}
245
246impl SegmentCommitment {
247 pub const SIZE: usize = 48;
249}
250
251#[derive(
253 Debug,
254 Display,
255 Copy,
256 Clone,
257 Ord,
258 PartialOrd,
259 Eq,
260 PartialEq,
261 Hash,
262 From,
263 Into,
264 Deref,
265 DerefMut,
266 Encode,
267 Decode,
268 TypeInfo,
269 MaxEncodedLen,
270 DecodeWithMemTracking,
271)]
272#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
273#[repr(transparent)]
274pub struct HistorySize(NonZeroU64);
275
276impl From<SegmentIndex> for HistorySize {
277 #[inline]
278 fn from(value: SegmentIndex) -> Self {
279 Self(NonZeroU64::new(value.0 + 1).expect("Not zero; qed"))
280 }
281}
282
283impl HistorySize {
284 pub const ONE: Self = Self(NonZeroU64::new(1).expect("Not zero; qed"));
286
287 pub const fn new(value: NonZeroU64) -> Self {
289 Self(value)
290 }
291
292 pub const fn in_pieces(&self) -> NonZeroU64 {
294 self.0.saturating_mul(
295 NonZeroU64::new(ArchivedHistorySegment::NUM_PIECES as u64).expect("Not zero; qed"),
296 )
297 }
298
299 pub fn segment_index(&self) -> SegmentIndex {
301 SegmentIndex::from(self.0.get() - 1)
302 }
303
304 pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
308 self.0.checked_add(min_sector_lifetime.0.get()).map(Self)
309 }
310}
311
312#[derive(
314 Debug,
315 Copy,
316 Clone,
317 PartialEq,
318 Eq,
319 Ord,
320 PartialOrd,
321 Hash,
322 Encode,
323 Decode,
324 TypeInfo,
325 DecodeWithMemTracking,
326)]
327#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
328#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
329pub enum ArchivedBlockProgress {
330 Complete,
332
333 Partial(u32),
335}
336
337impl Default for ArchivedBlockProgress {
338 #[inline]
341 fn default() -> Self {
342 Self::Complete
343 }
344}
345
346impl ArchivedBlockProgress {
347 pub fn partial(&self) -> Option<u32> {
349 match self {
350 Self::Complete => None,
351 Self::Partial(number) => Some(*number),
352 }
353 }
354
355 pub fn set_partial(&mut self, new_partial: u32) {
357 *self = Self::Partial(new_partial);
358 }
359}
360
361#[derive(
363 Debug,
364 Copy,
365 Clone,
366 PartialEq,
367 Eq,
368 Ord,
369 PartialOrd,
370 Hash,
371 Encode,
372 Decode,
373 TypeInfo,
374 DecodeWithMemTracking,
375)]
376#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
377#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
378pub struct LastArchivedBlock {
379 pub number: BlockNumber,
381 pub archived_progress: ArchivedBlockProgress,
383}
384
385impl LastArchivedBlock {
386 pub fn partial_archived(&self) -> Option<u32> {
388 self.archived_progress.partial()
389 }
390
391 pub fn set_partial_archived(&mut self, new_partial: u32) {
393 self.archived_progress.set_partial(new_partial);
394 }
395
396 pub fn set_complete(&mut self) {
398 self.archived_progress = ArchivedBlockProgress::Complete;
399 }
400}
401
402#[derive(
409 Debug, Copy, Clone, PartialEq, Eq, Encode, Decode, TypeInfo, Hash, DecodeWithMemTracking,
410)]
411#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
412#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
413pub enum SegmentHeader {
414 #[codec(index = 0)]
416 #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
417 V0 {
418 segment_index: SegmentIndex,
420 segment_commitment: SegmentCommitment,
422 prev_segment_header_hash: Blake3Hash,
424 last_archived_block: LastArchivedBlock,
426 },
427}
428
429impl SegmentHeader {
430 pub fn hash(&self) -> Blake3Hash {
432 blake3_hash(&self.encode())
433 }
434
435 pub fn segment_index(&self) -> SegmentIndex {
437 match self {
438 Self::V0 { segment_index, .. } => *segment_index,
439 }
440 }
441
442 pub fn segment_commitment(&self) -> SegmentCommitment {
444 match self {
445 Self::V0 {
446 segment_commitment, ..
447 } => *segment_commitment,
448 }
449 }
450
451 pub fn prev_segment_header_hash(&self) -> Blake3Hash {
453 match self {
454 Self::V0 {
455 prev_segment_header_hash,
456 ..
457 } => *prev_segment_header_hash,
458 }
459 }
460
461 pub fn last_archived_block(&self) -> LastArchivedBlock {
463 match self {
464 Self::V0 {
465 last_archived_block,
466 ..
467 } => *last_archived_block,
468 }
469 }
470}
471
472#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
476#[repr(transparent)]
477pub struct RecordedHistorySegment([RawRecord; Self::NUM_RAW_RECORDS]);
478
479impl fmt::Debug for RecordedHistorySegment {
480 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
481 f.debug_struct("RecordedHistorySegment")
482 .finish_non_exhaustive()
483 }
484}
485
486impl Default for RecordedHistorySegment {
487 #[inline]
488 fn default() -> Self {
489 Self([RawRecord::default(); Self::NUM_RAW_RECORDS])
490 }
491}
492
493impl AsRef<[u8]> for RecordedHistorySegment {
494 #[inline]
495 fn as_ref(&self) -> &[u8] {
496 RawRecord::slice_to_repr(&self.0)
497 .as_flattened()
498 .as_flattened()
499 }
500}
501
502impl AsMut<[u8]> for RecordedHistorySegment {
503 #[inline]
504 fn as_mut(&mut self) -> &mut [u8] {
505 RawRecord::slice_mut_to_repr(&mut self.0)
506 .as_flattened_mut()
507 .as_flattened_mut()
508 }
509}
510
511impl RecordedHistorySegment {
512 pub const NUM_RAW_RECORDS: usize = 128;
514 pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
516 pub const SIZE: usize = RawRecord::SIZE * Self::NUM_RAW_RECORDS;
522
523 #[inline]
525 pub fn new_boxed() -> Box<Self> {
526 unsafe { Box::<Self>::new_zeroed().assume_init() }
529 }
530}
531
532#[derive(Debug, Clone, Eq, PartialEq, Deref, DerefMut)]
534#[repr(transparent)]
535pub struct ArchivedHistorySegment(FlatPieces);
536
537impl Default for ArchivedHistorySegment {
538 #[inline]
539 fn default() -> Self {
540 Self(FlatPieces::new(Self::NUM_PIECES))
541 }
542}
543
544impl ArchivedHistorySegment {
545 pub const NUM_PIECES: usize = RecordedHistorySegment::NUM_RAW_RECORDS
547 * RecordedHistorySegment::ERASURE_CODING_RATE.1
548 / RecordedHistorySegment::ERASURE_CODING_RATE.0;
549 pub const SIZE: usize = Piece::SIZE * Self::NUM_PIECES;
555
556 pub fn to_shared(self) -> Self {
562 Self(self.0.to_shared())
563 }
564}