subspace_core_primitives/
hashes.rs1use crate::ScalarBytes;
4use core::array::TryFromSliceError;
5use core::fmt;
6use derive_more::{AsMut, AsRef, Deref, DerefMut, From, Into};
7use parity_scale_codec::{Decode, DecodeWithMemTracking, Encode, MaxEncodedLen};
8use scale_info::TypeInfo;
9#[cfg(feature = "serde")]
10use serde::{Deserialize, Serialize};
11#[cfg(feature = "serde")]
12use serde::{Deserializer, Serializer};
13
14#[derive(
16 Default,
17 Copy,
18 Clone,
19 Eq,
20 PartialEq,
21 Ord,
22 PartialOrd,
23 Hash,
24 From,
25 Into,
26 AsRef,
27 AsMut,
28 Deref,
29 DerefMut,
30 Encode,
31 Decode,
32 TypeInfo,
33 MaxEncodedLen,
34 DecodeWithMemTracking,
35)]
36pub struct Blake3Hash([u8; Blake3Hash::SIZE]);
37
38#[cfg(feature = "serde")]
39#[derive(Serialize, Deserialize)]
40#[serde(transparent)]
41struct Blake3HashBinary([u8; Blake3Hash::SIZE]);
42
43#[cfg(feature = "serde")]
44#[derive(Serialize, Deserialize)]
45#[serde(transparent)]
46struct Blake3HashHex(#[serde(with = "hex")] [u8; Blake3Hash::SIZE]);
47
48#[cfg(feature = "serde")]
49impl Serialize for Blake3Hash {
50 #[inline]
51 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
52 where
53 S: Serializer,
54 {
55 if serializer.is_human_readable() {
56 Blake3HashHex(self.0).serialize(serializer)
57 } else {
58 Blake3HashBinary(self.0).serialize(serializer)
59 }
60 }
61}
62
63#[cfg(feature = "serde")]
64impl<'de> Deserialize<'de> for Blake3Hash {
65 #[inline]
66 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
67 where
68 D: Deserializer<'de>,
69 {
70 Ok(Self(if deserializer.is_human_readable() {
71 Blake3HashHex::deserialize(deserializer)?.0
72 } else {
73 Blake3HashBinary::deserialize(deserializer)?.0
74 }))
75 }
76}
77
78impl fmt::Debug for Blake3Hash {
79 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
80 write!(f, "{}", hex::encode(self.0))
81 }
82}
83
84impl AsRef<[u8]> for Blake3Hash {
85 #[inline]
86 fn as_ref(&self) -> &[u8] {
87 &self.0
88 }
89}
90
91impl AsMut<[u8]> for Blake3Hash {
92 #[inline]
93 fn as_mut(&mut self) -> &mut [u8] {
94 &mut self.0
95 }
96}
97
98impl From<&[u8; Self::SIZE]> for Blake3Hash {
99 #[inline]
100 fn from(value: &[u8; Self::SIZE]) -> Self {
101 Self(*value)
102 }
103}
104
105impl TryFrom<&[u8]> for Blake3Hash {
106 type Error = TryFromSliceError;
107
108 #[inline]
109 fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
110 Ok(Self(value.try_into()?))
111 }
112}
113
114impl Blake3Hash {
115 pub const SIZE: usize = 32;
117}
118
119pub fn blake3_hash(data: &[u8]) -> Blake3Hash {
121 blake3::hash(data).as_bytes().into()
122}
123
124#[cfg(feature = "parallel")]
126#[inline]
127pub fn blake3_hash_parallel(data: &[u8]) -> Blake3Hash {
128 let mut state = blake3::Hasher::new();
129 state.update_rayon(data);
130 state.finalize().as_bytes().into()
131}
132
133#[inline]
135pub fn blake3_hash_with_key(key: &[u8; 32], data: &[u8]) -> Blake3Hash {
136 blake3::keyed_hash(key, data).as_bytes().into()
137}
138
139#[inline]
141pub fn blake3_hash_list_with_key(key: &[u8; 32], data: &[&[u8]]) -> Blake3Hash {
142 let mut state = blake3::Hasher::new_keyed(key);
143 for d in data {
144 state.update(d);
145 }
146 state.finalize().as_bytes().into()
147}
148
149#[inline]
151pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash {
152 let mut state = blake3::Hasher::new();
153 for d in data {
154 state.update(d);
155 }
156 state.finalize().as_bytes().into()
157}
158
159#[inline]
161pub fn blake3_254_hash_to_scalar(data: &[u8]) -> ScalarBytes {
162 let mut hash = blake3_hash(data);
163 hash[0] &= 0b00111111;
165 ScalarBytes(*hash)
166}