Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(collator) add export pov on slot base collator #7585

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 5 additions & 46 deletions cumulus/client/consensus/aura/src/collators/lookahead.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ use cumulus_primitives_aura::AuraUnincludedSegmentApi;
use cumulus_primitives_core::{ClaimQueueOffset, CollectCollationInfo, PersistedValidationData};
use cumulus_relay_chain_interface::RelayChainInterface;

use polkadot_node_primitives::{PoV, SubmitCollationParams};
use polkadot_node_primitives::SubmitCollationParams;
use polkadot_node_subsystem::messages::CollationGenerationMessage;
use polkadot_overseer::Handle as OverseerHandle;
use polkadot_primitives::{
vstaging::DEFAULT_CLAIM_QUEUE_OFFSET, BlockNumber as RBlockNumber, CollatorPair, Hash as RHash,
HeadData, Id as ParaId, OccupiedCoreAssumption,
vstaging::DEFAULT_CLAIM_QUEUE_OFFSET, CollatorPair, Id as ParaId, OccupiedCoreAssumption,
};

use crate::{collator as collator_util, export_pov_to_path};
use futures::prelude::*;
use sc_client_api::{backend::AuxStore, BlockBackend, BlockOf};
use sc_consensus::BlockImport;
Expand All @@ -58,49 +58,8 @@ use sp_consensus_aura::{AuraApi, Slot};
use sp_core::crypto::Pair;
use sp_inherents::CreateInherentDataProviders;
use sp_keystore::KeystorePtr;
use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Member, NumberFor};
use std::{
fs::{self, File},
path::PathBuf,
sync::Arc,
time::Duration,
};

use crate::{collator as collator_util, LOG_TARGET};

/// Export the given `pov` to the file system at `path`.
///
/// The file will be named `block_hash_block_number.pov`.
///
/// The `parent_header`, `relay_parent_storage_root` and `relay_parent_number` will also be
/// stored in the file alongside the `pov`. This enables stateless validation of the `pov`.
fn export_pov_to_path<Block: BlockT>(
path: PathBuf,
pov: PoV,
block_hash: Block::Hash,
block_number: NumberFor<Block>,
parent_header: Block::Header,
relay_parent_storage_root: RHash,
relay_parent_number: RBlockNumber,
) {
if let Err(error) = fs::create_dir_all(&path) {
tracing::error!(target: LOG_TARGET, %error, path = %path.display(), "Failed to create PoV export directory");
return
}

let mut file = match File::create(path.join(format!("{block_hash:?}_{block_number}.pov"))) {
Ok(f) => f,
Err(error) => {
tracing::error!(target: LOG_TARGET, %error, "Failed to export PoV.");
return
},
};

pov.encode_to(&mut file);
HeadData(parent_header.encode()).encode_to(&mut file);
relay_parent_storage_root.encode_to(&mut file);
relay_parent_number.encode_to(&mut file);
}
use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Member};
use std::{path::PathBuf, sync::Arc, time::Duration};

/// Parameters for [`run`].
pub struct Params<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
// along with Cumulus. If not, see <https://www.gnu.org/licenses/>.

use codec::Encode;
use std::path::PathBuf;

use cumulus_client_collator::service::ServiceInterface as CollatorServiceInterface;
use cumulus_relay_chain_interface::RelayChainInterface;
Expand All @@ -25,8 +26,10 @@
use polkadot_overseer::Handle as OverseerHandle;
use polkadot_primitives::{CollatorPair, Id as ParaId};

use cumulus_primitives_core::relay_chain::BlockId;
use futures::prelude::*;

use crate::export_pov_to_path;
use sc_utils::mpsc::TracingUnboundedReceiver;
use sp_runtime::traits::{Block as BlockT, Header};

Expand All @@ -50,6 +53,8 @@
pub collator_receiver: TracingUnboundedReceiver<CollatorMessage<Block>>,
/// The handle from the special slot based block import.
pub block_import_handle: super::SlotBasedBlockImportHandle<Block>,
/// When set, the collator will export every produced `POV` to this folder.
pub export_pov: Option<PathBuf>,
}

/// Asynchronously executes the collation task for a parachain.
Expand All @@ -67,6 +72,7 @@
collator_service,
mut collator_receiver,
mut block_import_handle,
export_pov,
}: Params<Block, RClient, CS>,
) where
Block: BlockT,
Expand All @@ -93,7 +99,7 @@
return;
};

handle_collation_message(message, &collator_service, &mut overseer_handle).await;
handle_collation_message(message, &collator_service, &mut overseer_handle,relay_client.clone(),export_pov.clone()).await;
},
block_import_msg = block_import_handle.next().fuse() => {
// TODO: Implement me.
Expand All @@ -107,10 +113,12 @@
/// Handle an incoming collation message from the block builder task.
/// This builds the collation from the [`CollatorMessage`] and submits it to
/// the collation-generation subsystem of the relay chain.
async fn handle_collation_message<Block: BlockT>(
async fn handle_collation_message<Block: BlockT, RClient: RelayChainInterface + Clone + 'static>(
message: CollatorMessage<Block>,
collator_service: &impl CollatorServiceInterface<Block>,
overseer_handle: &mut OverseerHandle,
relay_client: RClient,
export_pov: Option<PathBuf>,
) {
let CollatorMessage {
parent_header,
Expand Down Expand Up @@ -140,6 +148,24 @@
);

if let MaybeCompressedPoV::Compressed(ref pov) = collation.proof_of_validity {
if let Some(pov_path) = export_pov {
if let Ok(Some(relay_parent_header)) =
relay_client.header(BlockId::Hash(relay_parent)).await
{
export_pov_to_path::<Block>(
pov_path.clone(),
pov.clone(),
block_data.header().hash(),
*block_data.header().number(),
parent_header.clone(),
relay_parent_header.state_root,
relay_parent_header.number,
);
} else {
tracing::error!(target: LOG_TARGET, "Failed to get relay parent header from hash: {:?relay_parent}");

Check failure on line 165 in cumulus/client/consensus/aura/src/collators/slot_based/collation_task.rs

View workflow job for this annotation

GitHub Actions / cargo-check-all-crate-macos

invalid format string: expected `}`, found `r`
}
}

tracing::info!(
target: LOG_TARGET,
"Compressed PoV size: {}kb",
Expand Down
44 changes: 25 additions & 19 deletions cumulus/client/consensus/aura/src/collators/slot_based/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ use sp_core::{crypto::Pair, traits::SpawnNamed, U256};
use sp_inherents::CreateInherentDataProviders;
use sp_keystore::KeystorePtr;
use sp_runtime::traits::{Block as BlockT, Member, NumberFor, One};
use std::{sync::Arc, time::Duration};
use std::{path::PathBuf, sync::Arc, time::Duration};

pub use block_import::{SlotBasedBlockImport, SlotBasedBlockImportHandle};

Expand Down Expand Up @@ -100,28 +100,13 @@ pub struct Params<Block, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS,
pub block_import_handle: SlotBasedBlockImportHandle<Block>,
/// Spawner for spawning futures.
pub spawner: Spawner,
/// When set, the collator will export every produced `POV` to this folder.
pub export_pov: Option<PathBuf>,
}

/// Run aura-based block building and collation task.
pub fn run<Block, P, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS, Spawner>(
Params {
create_inherent_data_providers,
block_import,
para_client,
para_backend,
relay_client,
code_hash_provider,
keystore,
collator_key,
para_id,
proposer,
collator_service,
authoring_duration,
reinitialize,
slot_drift,
block_import_handle,
spawner,
}: Params<Block, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS, Spawner>,
params: Params<Block, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS, Spawner>,
) where
Block: BlockT,
Client: ProvideRuntimeApi<Block>
Expand All @@ -148,6 +133,26 @@ pub fn run<Block, P, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS, Spaw
P::Signature: TryFrom<Vec<u8>> + Member + Codec,
Spawner: SpawnNamed,
{
let Params {
create_inherent_data_providers,
block_import,
para_client,
para_backend,
relay_client,
code_hash_provider,
keystore,
collator_key,
para_id,
proposer,
collator_service,
authoring_duration,
reinitialize,
slot_drift,
block_import_handle,
spawner,
export_pov,
} = params;

let (tx, rx) = tracing_unbounded("mpsc_builder_to_collator", 100);
let collator_task_params = collation_task::Params {
relay_client: relay_client.clone(),
Expand All @@ -157,6 +162,7 @@ pub fn run<Block, P, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS, Spaw
collator_service: collator_service.clone(),
collator_receiver: rx,
block_import_handle,
export_pov,
};

let collation_task_fut = run_collation_task::<Block, _, _>(collator_task_params);
Expand Down
42 changes: 41 additions & 1 deletion cumulus/client/consensus/aura/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,15 @@
//!
//! For more information about AuRa, the Substrate crate should be checked.

use codec::Codec;
use codec::{Codec, Encode};
use cumulus_client_consensus_common::{
ParachainBlockImportMarker, ParachainCandidate, ParachainConsensus,
};
use cumulus_primitives_core::{relay_chain::Hash as PHash, PersistedValidationData};

use cumulus_primitives_core::relay_chain::HeadData;
use futures::lock::Mutex;
use polkadot_primitives::{BlockNumber as RBlockNumber, Hash as RHash};
use sc_client_api::{backend::AuxStore, BlockOf};
use sc_consensus::BlockImport;
use sc_consensus_slots::{BackoffAuthoringBlocksStrategy, SimpleSlotWorker, SlotInfo};
Expand All @@ -45,7 +47,10 @@ use sp_keystore::KeystorePtr;
use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Member, NumberFor};
use std::{
convert::TryFrom,
fs,
fs::File,
marker::PhantomData,
path::PathBuf,
sync::{
atomic::{AtomicU64, Ordering},
Arc,
Expand All @@ -55,6 +60,7 @@ use std::{
mod import_queue;

pub use import_queue::{build_verifier, import_queue, BuildVerifierParams, ImportQueueParams};
use polkadot_node_primitives::PoV;
pub use sc_consensus_aura::{
slot_duration, standalone::slot_duration_at, AuraVerifier, BuildAuraWorkerParams,
SlotProportion,
Expand Down Expand Up @@ -252,3 +258,37 @@ where
Some(ParachainCandidate { block: res.block, proof: res.storage_proof })
}
}

/// Export the given `pov` to the file system at `path`.
///
/// The file will be named `block_hash_block_number.pov`.
///
/// The `parent_header`, `relay_parent_storage_root` and `relay_parent_number` will also be
/// stored in the file alongside the `pov`. This enables stateless validation of the `pov`.
pub(crate) fn export_pov_to_path<Block: BlockT>(
path: PathBuf,
pov: PoV,
block_hash: Block::Hash,
block_number: NumberFor<Block>,
parent_header: Block::Header,
relay_parent_storage_root: RHash,
relay_parent_number: RBlockNumber,
) {
if let Err(error) = fs::create_dir_all(&path) {
tracing::error!(target: LOG_TARGET, %error, path = %path.display(), "Failed to create PoV export directory");
return
}

let mut file = match File::create(path.join(format!("{block_hash:?}_{block_number}.pov"))) {
Ok(f) => f,
Err(error) => {
tracing::error!(target: LOG_TARGET, %error, "Failed to export PoV.");
return
},
};

pov.encode_to(&mut file);
HeadData(parent_header.encode()).encode_to(&mut file);
relay_parent_storage_root.encode_to(&mut file);
relay_parent_number.encode_to(&mut file);
}
10 changes: 7 additions & 3 deletions cumulus/polkadot-omni-node/lib/src/nodes/aura.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ where
{
#[docify::export_content]
fn launch_slot_based_collator<CIDP, CHP, Proposer, CS, Spawner>(
params: SlotBasedParams<
params_with_export: SlotBasedParams<
Block,
ParachainBlockImport<
Block,
Expand All @@ -277,7 +277,9 @@ where
CS: CollatorServiceInterface<Block> + Send + Sync + Clone + 'static,
Spawner: SpawnNamed,
{
slot_based::run::<Block, <AuraId as AppCrypto>::Pair, _, _, _, _, _, _, _, _, _>(params);
slot_based::run::<Block, <AuraId as AppCrypto>::Pair, _, _, _, _, _, _, _, _, _>(
params_with_export,
);
}
}

Expand Down Expand Up @@ -319,7 +321,7 @@ where
_overseer_handle: OverseerHandle,
announce_block: Arc<dyn Fn(Hash, Option<Vec<u8>>) + Send + Sync>,
backend: Arc<ParachainBackend<Block>>,
_node_extra_args: NodeExtraArgs,
node_extra_args: NodeExtraArgs,
block_import_handle: SlotBasedBlockImportHandle<Block>,
) -> Result<(), Error> {
let proposer_factory = sc_basic_authorship::ProposerFactory::with_proof_recording(
Expand Down Expand Up @@ -358,10 +360,12 @@ where
slot_drift: Duration::from_secs(1),
block_import_handle,
spawner: task_manager.spawn_handle(),
export_pov: node_extra_args.export_pov,
};

// We have a separate function only to be able to use `docify::export` on this piece of
// code.

Self::launch_slot_based_collator(params);

Ok(())
Expand Down
1 change: 1 addition & 0 deletions cumulus/test/service/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,7 @@ where
slot_drift: Duration::from_secs(1),
block_import_handle: slot_based_handle,
spawner: task_manager.spawn_handle(),
export_pov: None,
};

slot_based::run::<Block, AuthorityPair, _, _, _, _, _, _, _, _, _>(params);
Expand Down
11 changes: 11 additions & 0 deletions prdoc/pr_7585.prdoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
title: 'Add export PoV on slot base collator'
doc:
- audience: [Node Dev, Node Operator]
description: Add functionality to export the Proof of Validity (PoV) when the slot-based collator is used.
crates:
- name: cumulus-test-service
bump: major
- name: cumulus-client-consensus-aura
bump: major
- name: polkadot-omni-node
bump: major
Loading