use crate::archiver::SegmentHeadersStore;
use crate::verifier::VerificationError;
use crate::{aux_schema, slot_worker, SubspaceLink};
use futures::channel::mpsc;
use futures::StreamExt;
use sc_client_api::backend::AuxStore;
use sc_client_api::BlockBackend;
use sc_consensus::block_import::{
BlockCheckParams, BlockImport, BlockImportParams, ForkChoiceStrategy, ImportResult,
};
use sc_consensus::StateAction;
use sc_proof_of_time::verifier::PotVerifier;
use sp_api::{ApiError, ApiExt, ProvideRuntimeApi};
use sp_block_builder::BlockBuilder as BlockBuilderApi;
use sp_blockchain::HeaderBackend;
use sp_consensus_slots::Slot;
use sp_consensus_subspace::digests::{
extract_pre_digest, extract_subspace_digest_items, SubspaceDigestItems,
};
use sp_consensus_subspace::{PotNextSlotInput, SubspaceApi, SubspaceJustification};
use sp_inherents::{CreateInherentDataProviders, InherentDataProvider};
use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor, One};
use sp_runtime::Justifications;
use std::marker::PhantomData;
use std::sync::Arc;
use subspace_core_primitives::sectors::SectorId;
use subspace_core_primitives::segments::{HistorySize, SegmentHeader, SegmentIndex};
use subspace_core_primitives::solutions::SolutionRange;
use subspace_core_primitives::{BlockNumber, PublicKey};
use subspace_proof_of_space::Table;
use subspace_verification::{calculate_block_weight, PieceCheckParams, VerifySolutionParams};
use tracing::warn;
#[derive(Debug, Clone)]
pub struct BlockImportingNotification<Block>
where
Block: BlockT,
{
pub block_number: NumberFor<Block>,
pub acknowledgement_sender: mpsc::Sender<()>,
}
use subspace_verification::Error as VerificationPrimitiveError;
#[derive(Debug, thiserror::Error)]
pub enum Error<Header: HeaderT> {
#[error("Inner block import error: {0}")]
InnerBlockImportError(#[from] sp_consensus::Error),
#[error("Digest item error: {0}")]
DigestItemError(#[from] sp_consensus_subspace::digests::Error),
#[error("Parent ({0}) of {1} unavailable. Cannot import")]
ParentUnavailable(Header::Hash, Header::Hash),
#[error("Genesis block unavailable. Cannot import")]
GenesisUnavailable,
#[error("Slot number must increase: parent slot: {0}, this slot: {1}")]
SlotMustIncrease(Slot, Slot),
#[error("Header {0:?} has a bad seal")]
HeaderBadSeal(Header::Hash),
#[error("Header {0:?} is unsealed")]
HeaderUnsealed(Header::Hash),
#[error("Bad reward signature on {0:?}")]
BadRewardSignature(Header::Hash),
#[error("Missing Subspace justification")]
MissingSubspaceJustification,
#[error("Invalid Subspace justification: {0}")]
InvalidSubspaceJustification(codec::Error),
#[error("Invalid Subspace justification contents")]
InvalidSubspaceJustificationContents,
#[error("Invalid proof of time")]
InvalidProofOfTime,
#[error(
"Solution distance {solution_distance} is outside of solution range \
{half_solution_range} (half of actual solution range) for slot {slot}"
)]
OutsideOfSolutionRange {
slot: Slot,
half_solution_range: SolutionRange,
solution_distance: SolutionRange,
},
#[error("Invalid proof of space")]
InvalidProofOfSpace,
#[error("Invalid audit chunk offset")]
InvalidAuditChunkOffset,
#[error("Invalid chunk: {0}")]
InvalidChunk(String),
#[error("Invalid chunk witness")]
InvalidChunkWitness,
#[error("Piece verification failed")]
InvalidPieceOffset {
slot: Slot,
piece_offset: u16,
max_pieces_in_sector: u16,
},
#[error("Piece verification failed for slot {0}")]
InvalidPiece(Slot),
#[error("Invalid solution range for block {0}")]
InvalidSolutionRange(Header::Hash),
#[error("Invalid set of segment headers")]
InvalidSetOfSegmentHeaders,
#[error("Stored segment header extrinsic was not found: {0:?}")]
SegmentHeadersExtrinsicNotFound(Vec<SegmentHeader>),
#[error("Segment header for index {0} not found")]
SegmentHeaderNotFound(SegmentIndex),
#[error(
"Different segment commitment for segment index {0} was found in storage, likely fork \
below archiving point"
)]
DifferentSegmentCommitment(SegmentIndex),
#[error("Segment commitment for segment index {0} not found")]
SegmentCommitmentNotFound(SegmentIndex),
#[error("Sector expired")]
SectorExpired {
expiration_history_size: HistorySize,
current_history_size: HistorySize,
},
#[error("Invalid history size")]
InvalidHistorySize,
#[error("Only root plot public key is allowed")]
OnlyRootPlotPublicKeyAllowed,
#[error("Checking inherents failed: {0}")]
CheckInherents(sp_inherents::Error),
#[error("Checking inherents unhandled error: {}", String::from_utf8_lossy(.0))]
CheckInherentsUnhandled(sp_inherents::InherentIdentifier),
#[error("Creating inherents failed: {0}")]
CreateInherents(sp_inherents::Error),
#[error(transparent)]
Client(#[from] sp_blockchain::Error),
#[error(transparent)]
RuntimeApi(#[from] ApiError),
}
impl<Header> From<VerificationError<Header>> for Error<Header>
where
Header: HeaderT,
{
#[inline]
fn from(error: VerificationError<Header>) -> Self {
match error {
VerificationError::HeaderBadSeal(block_hash) => Error::HeaderBadSeal(block_hash),
VerificationError::HeaderUnsealed(block_hash) => Error::HeaderUnsealed(block_hash),
VerificationError::BadRewardSignature(block_hash) => {
Error::BadRewardSignature(block_hash)
}
VerificationError::MissingSubspaceJustification => Error::MissingSubspaceJustification,
VerificationError::InvalidSubspaceJustification(error) => {
Error::InvalidSubspaceJustification(error)
}
VerificationError::InvalidSubspaceJustificationContents => {
Error::InvalidSubspaceJustificationContents
}
VerificationError::InvalidProofOfTime => Error::InvalidProofOfTime,
VerificationError::VerificationError(slot, error) => match error {
VerificationPrimitiveError::InvalidPieceOffset {
piece_offset,
max_pieces_in_sector,
} => Error::InvalidPieceOffset {
slot,
piece_offset,
max_pieces_in_sector,
},
VerificationPrimitiveError::InvalidPiece => Error::InvalidPiece(slot),
VerificationPrimitiveError::OutsideSolutionRange {
half_solution_range,
solution_distance,
} => Error::OutsideOfSolutionRange {
slot,
half_solution_range,
solution_distance,
},
VerificationPrimitiveError::InvalidProofOfSpace => Error::InvalidProofOfSpace,
VerificationPrimitiveError::InvalidAuditChunkOffset => {
Error::InvalidAuditChunkOffset
}
VerificationPrimitiveError::InvalidChunk(error) => Error::InvalidChunk(error),
VerificationPrimitiveError::InvalidChunkWitness => Error::InvalidChunkWitness,
VerificationPrimitiveError::SectorExpired {
expiration_history_size,
current_history_size,
} => Error::SectorExpired {
expiration_history_size,
current_history_size,
},
VerificationPrimitiveError::InvalidHistorySize => Error::InvalidHistorySize,
},
}
}
}
impl<Header> From<Error<Header>> for String
where
Header: HeaderT,
{
#[inline]
fn from(error: Error<Header>) -> String {
error.to_string()
}
}
pub struct SubspaceBlockImport<PosTable, Block, Client, I, CIDP, AS>
where
Block: BlockT,
{
inner: I,
client: Arc<Client>,
subspace_link: SubspaceLink<Block>,
create_inherent_data_providers: CIDP,
segment_headers_store: SegmentHeadersStore<AS>,
pot_verifier: PotVerifier,
_pos_table: PhantomData<PosTable>,
}
impl<PosTable, Block, I, Client, CIDP, AS> Clone
for SubspaceBlockImport<PosTable, Block, Client, I, CIDP, AS>
where
Block: BlockT,
I: Clone,
CIDP: Clone,
{
fn clone(&self) -> Self {
SubspaceBlockImport {
inner: self.inner.clone(),
client: self.client.clone(),
subspace_link: self.subspace_link.clone(),
create_inherent_data_providers: self.create_inherent_data_providers.clone(),
segment_headers_store: self.segment_headers_store.clone(),
pot_verifier: self.pot_verifier.clone(),
_pos_table: PhantomData,
}
}
}
impl<PosTable, Block, Client, I, CIDP, AS> SubspaceBlockImport<PosTable, Block, Client, I, CIDP, AS>
where
PosTable: Table,
Block: BlockT,
Client: ProvideRuntimeApi<Block> + BlockBackend<Block> + HeaderBackend<Block> + AuxStore,
Client::Api: BlockBuilderApi<Block> + SubspaceApi<Block, PublicKey> + ApiExt<Block>,
CIDP: CreateInherentDataProviders<Block, ()> + Send + Sync + 'static,
AS: AuxStore + Send + Sync + 'static,
BlockNumber: From<<Block::Header as HeaderT>::Number>,
{
pub fn new(
client: Arc<Client>,
block_import: I,
subspace_link: SubspaceLink<Block>,
create_inherent_data_providers: CIDP,
segment_headers_store: SegmentHeadersStore<AS>,
pot_verifier: PotVerifier,
) -> Self {
Self {
client,
inner: block_import,
subspace_link,
create_inherent_data_providers,
segment_headers_store,
pot_verifier,
_pos_table: PhantomData,
}
}
async fn block_import_verification(
&self,
block_hash: Block::Hash,
header: Block::Header,
extrinsics: Option<Vec<Block::Extrinsic>>,
root_plot_public_key: &Option<PublicKey>,
subspace_digest_items: &SubspaceDigestItems<PublicKey>,
justifications: &Option<Justifications>,
) -> Result<(), Error<Block::Header>> {
let block_number = *header.number();
let parent_hash = *header.parent_hash();
let pre_digest = &subspace_digest_items.pre_digest;
if let Some(root_plot_public_key) = root_plot_public_key {
if &pre_digest.solution().public_key != root_plot_public_key {
return Err(Error::OnlyRootPlotPublicKeyAllowed);
}
}
let parent_header = self
.client
.header(parent_hash)?
.ok_or(Error::ParentUnavailable(parent_hash, block_hash))?;
let parent_slot = extract_pre_digest(&parent_header).map(|d| d.slot())?;
if pre_digest.slot() <= parent_slot {
return Err(Error::SlotMustIncrease(parent_slot, pre_digest.slot()));
}
let parent_subspace_digest_items = if block_number.is_one() {
None
} else {
Some(extract_subspace_digest_items::<_, PublicKey>(
&parent_header,
)?)
};
let correct_solution_range = if block_number.is_one() {
slot_worker::extract_solution_ranges_for_block(self.client.as_ref(), parent_hash)?.0
} else {
let parent_subspace_digest_items = parent_subspace_digest_items
.as_ref()
.expect("Always Some for non-first block; qed");
match parent_subspace_digest_items.next_solution_range {
Some(solution_range) => solution_range,
None => parent_subspace_digest_items.solution_range,
}
};
if subspace_digest_items.solution_range != correct_solution_range {
return Err(Error::InvalidSolutionRange(block_hash));
}
let chain_constants = self.subspace_link.chain_constants();
{
let Some(subspace_justification) = justifications
.as_ref()
.and_then(|justifications| {
justifications
.iter()
.find_map(SubspaceJustification::try_from_justification)
})
.transpose()
.map_err(Error::InvalidSubspaceJustification)?
else {
return Err(Error::MissingSubspaceJustification);
};
let SubspaceJustification::PotCheckpoints { seed, checkpoints } =
subspace_justification;
let future_slot = pre_digest.slot() + chain_constants.block_authoring_delay();
if block_number.is_one() {
if seed != self.pot_verifier.genesis_seed() {
return Err(Error::InvalidSubspaceJustificationContents);
}
if checkpoints.len() as u64 != *future_slot {
return Err(Error::InvalidSubspaceJustificationContents);
}
} else {
let parent_subspace_digest_items = parent_subspace_digest_items
.as_ref()
.expect("Always Some for non-first block; qed");
let parent_future_slot = parent_slot + chain_constants.block_authoring_delay();
let correct_input_parameters = PotNextSlotInput::derive(
subspace_digest_items.pot_slot_iterations,
parent_future_slot,
parent_subspace_digest_items
.pre_digest
.pot_info()
.future_proof_of_time(),
&subspace_digest_items.pot_parameters_change,
);
if seed != correct_input_parameters.seed {
return Err(Error::InvalidSubspaceJustificationContents);
}
if checkpoints.len() as u64 != (*future_slot - *parent_future_slot) {
return Err(Error::InvalidSubspaceJustificationContents);
}
}
}
let sector_id = SectorId::new(
pre_digest.solution().public_key.hash(),
pre_digest.solution().sector_index,
pre_digest.solution().history_size,
);
let max_pieces_in_sector = self
.client
.runtime_api()
.max_pieces_in_sector(parent_hash)?;
let piece_index = sector_id.derive_piece_index(
pre_digest.solution().piece_offset,
pre_digest.solution().history_size,
max_pieces_in_sector,
chain_constants.recent_segments(),
chain_constants.recent_history_fraction(),
);
let segment_index = piece_index.segment_index();
let segment_commitment = self
.segment_headers_store
.get_segment_header(segment_index)
.map(|segment_header| segment_header.segment_commitment())
.ok_or(Error::SegmentCommitmentNotFound(segment_index))?;
let sector_expiration_check_segment_commitment = self
.segment_headers_store
.get_segment_header(
subspace_digest_items
.pre_digest
.solution()
.history_size
.sector_expiration_check(chain_constants.min_sector_lifetime())
.ok_or(Error::InvalidHistorySize)?
.segment_index(),
)
.map(|segment_header| segment_header.segment_commitment());
subspace_verification::verify_solution::<PosTable, _>(
pre_digest.solution(),
pre_digest.slot().into(),
&VerifySolutionParams {
proof_of_time: subspace_digest_items.pre_digest.pot_info().proof_of_time(),
solution_range: subspace_digest_items.solution_range,
piece_check_params: Some(PieceCheckParams {
max_pieces_in_sector,
segment_commitment,
recent_segments: chain_constants.recent_segments(),
recent_history_fraction: chain_constants.recent_history_fraction(),
min_sector_lifetime: chain_constants.min_sector_lifetime(),
current_history_size: self.client.runtime_api().history_size(parent_hash)?,
sector_expiration_check_segment_commitment,
}),
},
&self.subspace_link.kzg,
)
.map_err(|error| VerificationError::VerificationError(pre_digest.slot(), error))?;
if let Some(extrinsics) = extrinsics {
let create_inherent_data_providers = self
.create_inherent_data_providers
.create_inherent_data_providers(parent_hash, ())
.await
.map_err(|error| Error::Client(sp_blockchain::Error::from(error)))?;
let inherent_data = create_inherent_data_providers
.create_inherent_data()
.await
.map_err(Error::CreateInherents)?;
let inherent_res = self.client.runtime_api().check_inherents(
parent_hash,
Block::new(header, extrinsics),
inherent_data,
)?;
if !inherent_res.ok() {
for (i, e) in inherent_res.into_errors() {
match create_inherent_data_providers
.try_handle_error(&i, &e)
.await
{
Some(res) => res.map_err(Error::CheckInherents)?,
None => return Err(Error::CheckInherentsUnhandled(i)),
}
}
}
}
Ok(())
}
}
#[async_trait::async_trait]
impl<PosTable, Block, Client, Inner, CIDP, AS> BlockImport<Block>
for SubspaceBlockImport<PosTable, Block, Client, Inner, CIDP, AS>
where
PosTable: Table,
Block: BlockT,
Inner: BlockImport<Block, Error = sp_consensus::Error> + Send + Sync,
Client: ProvideRuntimeApi<Block>
+ BlockBackend<Block>
+ HeaderBackend<Block>
+ AuxStore
+ Send
+ Sync,
Client::Api: BlockBuilderApi<Block> + SubspaceApi<Block, PublicKey> + ApiExt<Block>,
CIDP: CreateInherentDataProviders<Block, ()> + Send + Sync + 'static,
AS: AuxStore + Send + Sync + 'static,
BlockNumber: From<<Block::Header as HeaderT>::Number>,
{
type Error = Error<Block::Header>;
async fn import_block(
&self,
mut block: BlockImportParams<Block>,
) -> Result<ImportResult, Self::Error> {
let block_hash = block.post_hash();
let block_number = *block.header.number();
match self.client.status(block_hash)? {
sp_blockchain::BlockStatus::InChain => {
block.fork_choice = Some(ForkChoiceStrategy::Custom(false));
return self
.inner
.import_block(block)
.await
.map_err(Error::InnerBlockImportError);
}
sp_blockchain::BlockStatus::Unknown => {}
}
let subspace_digest_items = extract_subspace_digest_items(&block.header)?;
if !matches!(block.state_action, StateAction::ApplyChanges(_)) {
let root_plot_public_key = self
.client
.runtime_api()
.root_plot_public_key(*block.header.parent_hash())?;
self.block_import_verification(
block_hash,
block.header.clone(),
block.body.clone(),
&root_plot_public_key,
&subspace_digest_items,
&block.justifications,
)
.await?;
}
let parent_weight = if block_number.is_one() {
0
} else {
aux_schema::load_block_weight(self.client.as_ref(), block.header.parent_hash())?
.unwrap_or_default()
};
let added_weight = calculate_block_weight(subspace_digest_items.solution_range);
let total_weight = parent_weight + added_weight;
aux_schema::write_block_weight(block_hash, total_weight, |values| {
block
.auxiliary
.extend(values.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec()))))
});
for (&segment_index, segment_commitment) in &subspace_digest_items.segment_commitments {
let found_segment_commitment = self
.segment_headers_store
.get_segment_header(segment_index)
.ok_or_else(|| Error::SegmentHeaderNotFound(segment_index))?
.segment_commitment();
if &found_segment_commitment != segment_commitment {
warn!(
"Different segment commitment for segment index {} was found in storage, \
likely fork below archiving point. expected {:?}, found {:?}",
segment_index, segment_commitment, found_segment_commitment
);
return Err(Error::DifferentSegmentCommitment(segment_index));
}
}
let fork_choice = {
let info = self.client.info();
let last_best_weight = if &info.best_hash == block.header.parent_hash() {
parent_weight
} else {
aux_schema::load_block_weight(&*self.client, info.best_hash)?.unwrap_or_default()
};
ForkChoiceStrategy::Custom(total_weight > last_best_weight)
};
block.fork_choice = Some(fork_choice);
let (acknowledgement_sender, mut acknowledgement_receiver) = mpsc::channel(0);
self.subspace_link
.block_importing_notification_sender
.notify(move || BlockImportingNotification {
block_number,
acknowledgement_sender,
});
while acknowledgement_receiver.next().await.is_some() {
}
self.inner
.import_block(block)
.await
.map_err(Error::InnerBlockImportError)
}
async fn check_block(
&self,
block: BlockCheckParams<Block>,
) -> Result<ImportResult, Self::Error> {
self.inner.check_block(block).await.map_err(Into::into)
}
}