subspace_core_primitives/
hashes.rs1use crate::ScalarBytes;
4use core::array::TryFromSliceError;
5use core::fmt;
6use derive_more::{AsMut, AsRef, Deref, DerefMut, From, Into};
7use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
8use scale_info::TypeInfo;
9#[cfg(feature = "serde")]
10use serde::{Deserialize, Serialize};
11#[cfg(feature = "serde")]
12use serde::{Deserializer, Serializer};
13
14#[derive(
16 Default,
17 Copy,
18 Clone,
19 Eq,
20 PartialEq,
21 Ord,
22 PartialOrd,
23 Hash,
24 From,
25 Into,
26 AsRef,
27 AsMut,
28 Deref,
29 DerefMut,
30 Encode,
31 Decode,
32 TypeInfo,
33 MaxEncodedLen,
34)]
35pub struct Blake3Hash([u8; Blake3Hash::SIZE]);
36
37#[cfg(feature = "serde")]
38#[derive(Serialize, Deserialize)]
39#[serde(transparent)]
40struct Blake3HashBinary([u8; Blake3Hash::SIZE]);
41
42#[cfg(feature = "serde")]
43#[derive(Serialize, Deserialize)]
44#[serde(transparent)]
45struct Blake3HashHex(#[serde(with = "hex")] [u8; Blake3Hash::SIZE]);
46
47#[cfg(feature = "serde")]
48impl Serialize for Blake3Hash {
49 #[inline]
50 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
51 where
52 S: Serializer,
53 {
54 if serializer.is_human_readable() {
55 Blake3HashHex(self.0).serialize(serializer)
56 } else {
57 Blake3HashBinary(self.0).serialize(serializer)
58 }
59 }
60}
61
62#[cfg(feature = "serde")]
63impl<'de> Deserialize<'de> for Blake3Hash {
64 #[inline]
65 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
66 where
67 D: Deserializer<'de>,
68 {
69 Ok(Self(if deserializer.is_human_readable() {
70 Blake3HashHex::deserialize(deserializer)?.0
71 } else {
72 Blake3HashBinary::deserialize(deserializer)?.0
73 }))
74 }
75}
76
77impl fmt::Debug for Blake3Hash {
78 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
79 write!(f, "{}", hex::encode(self.0))
80 }
81}
82
83impl AsRef<[u8]> for Blake3Hash {
84 #[inline]
85 fn as_ref(&self) -> &[u8] {
86 &self.0
87 }
88}
89
90impl AsMut<[u8]> for Blake3Hash {
91 #[inline]
92 fn as_mut(&mut self) -> &mut [u8] {
93 &mut self.0
94 }
95}
96
97impl From<&[u8; Self::SIZE]> for Blake3Hash {
98 #[inline]
99 fn from(value: &[u8; Self::SIZE]) -> Self {
100 Self(*value)
101 }
102}
103
104impl TryFrom<&[u8]> for Blake3Hash {
105 type Error = TryFromSliceError;
106
107 #[inline]
108 fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
109 Ok(Self(value.try_into()?))
110 }
111}
112
113impl Blake3Hash {
114 pub const SIZE: usize = 32;
116}
117
118pub fn blake3_hash(data: &[u8]) -> Blake3Hash {
120 blake3::hash(data).as_bytes().into()
121}
122
123#[cfg(feature = "parallel")]
125#[inline]
126pub fn blake3_hash_parallel(data: &[u8]) -> Blake3Hash {
127 let mut state = blake3::Hasher::new();
128 state.update_rayon(data);
129 state.finalize().as_bytes().into()
130}
131
132#[inline]
134pub fn blake3_hash_with_key(key: &[u8; 32], data: &[u8]) -> Blake3Hash {
135 blake3::keyed_hash(key, data).as_bytes().into()
136}
137
138#[inline]
140pub fn blake3_hash_list_with_key(key: &[u8; 32], data: &[&[u8]]) -> Blake3Hash {
141 let mut state = blake3::Hasher::new_keyed(key);
142 for d in data {
143 state.update(d);
144 }
145 state.finalize().as_bytes().into()
146}
147
148#[inline]
150pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash {
151 let mut state = blake3::Hasher::new();
152 for d in data {
153 state.update(d);
154 }
155 state.finalize().as_bytes().into()
156}
157
158#[inline]
160pub fn blake3_254_hash_to_scalar(data: &[u8]) -> ScalarBytes {
161 let mut hash = blake3_hash(data);
162 hash[0] &= 0b00111111;
164 ScalarBytes(*hash)
165}