diff --git a/Cargo.lock b/Cargo.lock index 2c5306b37..643d15298 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4251,6 +4251,7 @@ dependencies = [ "sp-runtime", "sp-timestamp", "substrate-build-script-utils", + "tuxedo-core", "tuxedo-template-runtime", ] @@ -7900,6 +7901,23 @@ dependencies = [ "time-core", ] +[[package]] +name = "timestamp" +version = "0.1.0" +dependencies = [ + "log", + "parity-scale-codec", + "scale-info", + "serde", + "sp-api", + "sp-core", + "sp-inherents", + "sp-runtime", + "sp-std", + "sp-timestamp", + "tuxedo-core", +] + [[package]] name = "tiny-bip39" version = "1.0.0" @@ -8291,6 +8309,7 @@ version = "1.0.0-dev" dependencies = [ "aggregator", "array-bytes", + "async-trait", "derive-no-bound", "log", "parity-scale-codec", @@ -8300,6 +8319,7 @@ dependencies = [ "sp-api", "sp-core", "sp-debug-derive", + "sp-inherents", "sp-io", "sp-runtime", "sp-std", @@ -8339,6 +8359,7 @@ dependencies = [ "sp-transaction-pool", "sp-version", "substrate-wasm-builder", + "timestamp", "tuxedo-core", ] diff --git a/Cargo.toml b/Cargo.toml index a841fe45b..b934eec2e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ members = [ "wardrobe/amoeba", "wardrobe/money", "wardrobe/poe", + "wardrobe/timestamp", "wardrobe/kitties", "wardrobe/runtime_upgrade", ] @@ -19,6 +20,7 @@ resolver = "2" [workspace.dependencies] # Generic dependencies +async-trait = "0.1.73" clap = "4.3.0" hex-literal = "0.4.1" jsonrpsee = "0.16.2" diff --git a/node/Cargo.toml b/node/Cargo.toml index c8059e37e..1d3b22412 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -45,6 +45,7 @@ sp-blockchain = { workspace = true } # Local Dependencies node-template-runtime = { package = "tuxedo-template-runtime", path = "../tuxedo-template-runtime" } +tuxedo-core = { path = "../tuxedo-core" } [[bin]] name = "node-template" diff --git a/node/src/service.rs b/node/src/service.rs index d4fc72fbc..139c4a728 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -234,6 +234,7 @@ pub fn new_full(config: Configuration) -> Result { ); let slot_duration = sc_consensus_aura::slot_duration(&*client)?; + let client_for_cidp = client.clone(); let aura = sc_consensus_aura::start_aura::( StartAuraParams { @@ -242,16 +243,27 @@ pub fn new_full(config: Configuration) -> Result { select_chain, block_import, proposer_factory, - create_inherent_data_providers: move |_, ()| async move { - let timestamp = sp_timestamp::InherentDataProvider::from_system_time(); - - let slot = - sp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_slot_duration( - *timestamp, - slot_duration, - ); - - Ok((slot, timestamp)) + create_inherent_data_providers: move |parent_hash, ()| { + let parent_block = client_for_cidp + .clone() + .block(parent_hash) + .expect("Block backend should not error.") + .expect("Parent block should be found in database for authoring to work.") + .block; + + async move { + let parent_idp = + tuxedo_core::inherents::ParentBlockInherentDataProvider(parent_block); + let timestamp = sp_timestamp::InherentDataProvider::from_system_time(); + + let slot = + sp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_slot_duration( + *timestamp, + slot_duration, + ); + + Ok((slot, parent_idp, timestamp)) + } }, force_authoring, backoff_authoring_blocks, diff --git a/tuxedo-core/Cargo.toml b/tuxedo-core/Cargo.toml index 8fb42ca37..bf27aee09 100644 --- a/tuxedo-core/Cargo.toml +++ b/tuxedo-core/Cargo.toml @@ -7,6 +7,7 @@ repository = "https://github.com/Off-Narrative-Labs/Tuxedo" version = "1.0.0-dev" [dependencies] +async-trait = { optional = true, workspace = true } log = { workspace = true } parity-scale-codec = { features = [ "derive" ], workspace = true } parity-util-mem = { optional = true, workspace = true } @@ -19,6 +20,7 @@ derive-no-bound = { path = "no_bound" } sp-api = { default_features = false, workspace = true } sp-core = { default_features = false, workspace = true } sp-debug-derive = { features = [ "force-debug" ], default_features = false, workspace = true } +sp-inherents = { default_features = false, workspace = true } sp-io = { features = [ "with-tracing" ], default_features = false, workspace = true } sp-runtime = { default_features = false, workspace = true } sp-std = { default_features = false, workspace = true } @@ -30,12 +32,14 @@ array-bytes = { workspace = true } [features] default = [ "std" ] std = [ + "async-trait", "sp-debug-derive/std", "parity-scale-codec/std", "sp-core/std", "sp-std/std", "serde", "sp-api/std", + "sp-inherents/std", "sp-io/std", "sp-runtime/std", "parity-util-mem", diff --git a/tuxedo-core/aggregator/src/lib.rs b/tuxedo-core/aggregator/src/lib.rs index b7fafb073..90a685cc1 100644 --- a/tuxedo-core/aggregator/src/lib.rs +++ b/tuxedo-core/aggregator/src/lib.rs @@ -32,13 +32,15 @@ pub fn aggregate(_: TokenStream, body: TokenStream) -> TokenStream { ) }); let variants = variant_type_pairs.clone().map(|(v, _t)| v); + let variants2 = variants.clone(); let inner_types = variant_type_pairs.map(|(_v, t)| t); + let inner_types2 = inner_types.clone(); let output = quote! { // First keep the original code in tact #original_code - // Now write all the From impls + // Now write all the wrapping From impls #( impl From<#inner_types> for #outer_type { fn from(b: #inner_types) -> Self { @@ -46,6 +48,19 @@ pub fn aggregate(_: TokenStream, body: TokenStream) -> TokenStream { } } )* + + // Finally write all the un-wrapping From impls + #( + impl From<#outer_type> for #inner_types2 { + fn from(a: #outer_type) -> Self { + if let #outer_type::#variants2(b) = a { + b + } else { + panic!("wrong type or something...") + } + } + } + )* }; output.into() @@ -115,12 +130,28 @@ pub fn tuxedo_constraint_checker(attrs: TokenStream, body: TokenStream) -> Token let variants = variant_type_pairs.clone().map(|(v, _t)| v); let inner_types = variant_type_pairs.map(|(_v, t)| t); - let vis = ast.vis; + // Set up the names of the new associated types. let mut error_type_name = outer_type.to_string(); error_type_name.push_str("Error"); let error_type = Ident::new(&error_type_name, outer_type.span()); - let inner_types = inner_types.clone(); + + let mut inherent_hooks_name = outer_type.to_string(); + inherent_hooks_name.push_str("InherentHooks"); + let inherent_hooks = Ident::new(&inherent_hooks_name, outer_type.span()); + + let vis = ast.vis; + + // TODO there must be a better way to do this, right? + let inner_types2 = inner_types.clone(); + let inner_types3 = inner_types.clone(); + let inner_types4 = inner_types.clone(); + let inner_types6 = inner_types.clone(); let variants2 = variants.clone(); + let variants3 = variants.clone(); + let variants4 = variants.clone(); + let variants5 = variants.clone(); + let variants6 = variants.clone(); + let output = quote! { // Preserve the original enum, and write the From impls #[tuxedo_core::aggregate] @@ -138,9 +169,84 @@ pub fn tuxedo_constraint_checker(attrs: TokenStream, body: TokenStream) -> Token )* } + /// This type is generated by the `#[tuxedo_constraint_checker]` macro. + /// It is a combined set of inherent hooks for the inherent hooks of each individual checker. + /// + /// This type is accessible downstream as `::InherentHooks` + #[derive(Debug, scale_info::TypeInfo)] + #vis enum #inherent_hooks { + #( + #variants2(<#inner_types2 as tuxedo_core::ConstraintChecker<#verifier>>::InherentHooks), + )* + } + + impl tuxedo_core::inherents::InherentInternal<#verifier, #outer_type> for #inherent_hooks { + + fn create_inherents( + authoring_inherent_data: &InherentData, + previous_inherents: Vec<(tuxedo_core::types::Transaction<#verifier, #outer_type>, sp_core::H256)>, + ) -> Vec> { + + let mut all_inherents = Vec::new(); + + #( + { + // Filter the previous inherents down to just the ones that came from this piece + let previous_inherents = previous_inherents + .iter() + .filter_map(|(tx, hash)| { + match tx.checker { + #outer_type::#variants3(ref inner_checker) => Some((tx.transform::<#inner_types3>(), *hash )), + _ => None, + } + }) + .collect(); + + let inherents = <#inner_types3 as tuxedo_core::ConstraintChecker<#verifier>>::InherentHooks::create_inherents(authoring_inherent_data, previous_inherents) + .iter() + .map(|tx| tx.transform::<#outer_type>()) + .collect::>(); + all_inherents.extend(inherents); + } + )* + + // Return the aggregate of all inherent extrinsics from all constituent constraint checkers. + all_inherents + } + + fn check_inherents( + importing_inherent_data: &sp_inherents::InherentData, + inherents: Vec>, + result: &mut sp_inherents::CheckInherentsResult, + ) { + #( + let relevant_inherents: Vec> = inherents + .iter() + .filter_map(|tx| { + match tx.checker { + #outer_type::#variants4(ref inner_checker) => Some(tx.transform::<#inner_types4>()), + _ => None, + } + }) + .collect(); + + <#inner_types4 as tuxedo_core::ConstraintChecker<#verifier>>::InherentHooks::check_inherents(importing_inherent_data, relevant_inherents, result); + + // According to https://paritytech.github.io/polkadot-sdk/master/sp_inherents/struct.CheckInherentsResult.html + // "When a fatal error occurs, all other errors are removed and the implementation needs to abort checking inherents." + if result.fatal_error() { + return; + } + )* + } + + } + impl tuxedo_core::ConstraintChecker<#verifier> for #outer_type { type Error = #error_type; + type InherentHooks = #inherent_hooks; + fn check ( &self, inputs: &[tuxedo_core::types::Output<#verifier>], @@ -149,10 +255,20 @@ pub fn tuxedo_constraint_checker(attrs: TokenStream, body: TokenStream) -> Token ) -> Result { match self { #( - Self::#variants2(inner) => inner.check(inputs, peeks, outputs).map_err(|e| Self::Error::#variants2(e)), + Self::#variants5(inner) => inner.check(inputs, peeks, outputs).map_err(|e| Self::Error::#variants5(e)), + )* + } + } + + fn is_inherent(&self) -> bool { + match self { + #( + Self::#variants6(inner) => <#inner_types6 as tuxedo_core::ConstraintChecker<#verifier>>::is_inherent(inner), )* } + } + } }; diff --git a/tuxedo-core/src/constraint_checker.rs b/tuxedo-core/src/constraint_checker.rs index 81a82f970..498160bfc 100644 --- a/tuxedo-core/src/constraint_checker.rs +++ b/tuxedo-core/src/constraint_checker.rs @@ -5,15 +5,12 @@ use sp_std::{fmt::Debug, vec::Vec}; -use crate::{dynamic_typing::DynamicallyTypedData, types::Output, Verifier}; +use crate::{dynamic_typing::DynamicallyTypedData, inherents::InherentInternal, types::Output}; use parity_scale_codec::{Decode, Encode}; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; use sp_runtime::transaction_validity::TransactionPriority; -/// A simplified constraint checker that a transaction can choose to call. Checks whether the input -/// and output data from a transaction meets the codified constraints. +/// A simplified constraint checker that a transaction can choose to call. +/// Checks whether the input and output data from a transaction meets the codified constraints. /// /// Additional transient information may be passed to the constraint checker by including it in the fields /// of the constraint checker struct itself. Information passed in this way does not come from state, nor @@ -34,16 +31,21 @@ pub trait SimpleConstraintChecker: Debug + Encode + Decode + Clone { /// A single constraint checker that a transaction can choose to call. Checks whether the input /// and output data from a transaction meets the codified constraints. /// -/// This full ConstraintChecker should only be used if there is more that a piece wants to do such -/// as check the verifier information in some unique way. +/// This full ConstraintChecker should only be implemented if the piece logic cannot be expressed with +/// the SimpleConstraintChecker. For example, if you need to enforce the verifier is a particular type +/// or contains a certain value. Another reason would be if you need to implement an inherent. /// /// Additional transient information may be passed to the constraint checker by including it in the fields /// of the constraint checker struct itself. Information passed in this way does not come from state, nor /// is it stored in state. -pub trait ConstraintChecker: Debug + Encode + Decode + Clone { - /// the error type that this constraint checker may return +pub trait ConstraintChecker: Debug + Encode + Decode + Clone { + /// The error type that this constraint checker may return type Error: Debug; + /// Optional Associated Inherent processing logic. If this transaction type is not an inherent, use (). + /// If it is an inherent, use Self, and implement the TuxedoInherent trait. + type InherentHooks: InherentInternal; + /// The actual check validation logic fn check( &self, @@ -51,15 +53,21 @@ pub trait ConstraintChecker: Debug + Encode + Decode + Clone { peeks: &[Output], outputs: &[Output], ) -> Result; + + /// Tells whether this extrinsic is an inherent or not. + /// If you return true here, you must provide the correct inherent hooks above. + fn is_inherent(&self) -> bool; } // This blanket implementation makes it so that any type that chooses to -// implement the Simple trait also implements the more Powerful trait. This way -// the executive can always just call the more Powerful trait. -impl ConstraintChecker for T { +// implement the Simple trait also implements the more Powerful trait. +// This way the executive can always just call the more Powerful trait. +impl ConstraintChecker for T { // Use the same error type used in the simple implementation. type Error = ::Error; + type InherentHooks = (); + fn check( &self, inputs: &[Output], @@ -81,12 +89,20 @@ impl ConstraintChecker for T { // Call the simple constraint checker SimpleConstraintChecker::check(self, &input_data, &peek_data, &output_data) } + + fn is_inherent(&self) -> bool { + false + } } /// Utilities for writing constraint-checker-related unit tests -#[cfg(feature = "std")] +#[cfg(test)] pub mod testing { + use scale_info::TypeInfo; + use serde::{Deserialize, Serialize}; + use super::*; + use crate::{types::Output, verifier::TestVerifier}; /// A testing checker that passes (with zero priority) or not depending on /// the boolean value enclosed. @@ -94,16 +110,19 @@ pub mod testing { pub struct TestConstraintChecker { /// Whether the checker should pass. pub checks: bool, + /// Whether this constraint checker is an inherent. + pub inherent: bool, } - impl SimpleConstraintChecker for TestConstraintChecker { + impl ConstraintChecker for TestConstraintChecker { type Error = (); + type InherentHooks = (); fn check( &self, - _input_data: &[DynamicallyTypedData], - _peek_data: &[DynamicallyTypedData], - _output_data: &[DynamicallyTypedData], + _input_data: &[Output], + _peek_data: &[Output], + _output_data: &[Output], ) -> Result { if self.checks { Ok(0) @@ -111,19 +130,29 @@ pub mod testing { Err(()) } } + + fn is_inherent(&self) -> bool { + self.inherent + } } #[test] fn test_checker_passes() { - let result = - SimpleConstraintChecker::check(&TestConstraintChecker { checks: true }, &[], &[], &[]); + let result = TestConstraintChecker { + checks: true, + inherent: false, + } + .check(&[], &[], &[]); assert_eq!(result, Ok(0)); } #[test] fn test_checker_fails() { - let result = - SimpleConstraintChecker::check(&TestConstraintChecker { checks: false }, &[], &[], &[]); + let result = TestConstraintChecker { + checks: false, + inherent: false, + } + .check(&[], &[], &[]); assert_eq!(result, Err(())); } } diff --git a/tuxedo-core/src/executive.rs b/tuxedo-core/src/executive.rs index 8924f1dd6..f2897f60d 100644 --- a/tuxedo-core/src/executive.rs +++ b/tuxedo-core/src/executive.rs @@ -9,6 +9,7 @@ use crate::{ constraint_checker::ConstraintChecker, ensure, + inherents::{InherentInternal, PARENT_INHERENT_IDENTIFIER}, types::{DispatchResult, OutputRef, Transaction, UtxoError}, utxo_set::TransparentUtxoSet, verifier::Verifier, @@ -18,6 +19,8 @@ use log::debug; use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_api::{BlockT, HashT, HeaderT, TransactionValidity}; +use sp_core::H256; +use sp_inherents::{CheckInherentsResult, InherentData}; use sp_runtime::{ traits::BlakeTwo256, transaction_validity::{ @@ -46,6 +49,11 @@ impl< pub fn validate_tuxedo_transaction( transaction: &Transaction, ) -> Result> { + debug!( + target: LOG_TARGET, + "validating tuxedo transaction", + ); + // Make sure there are no duplicate inputs // Duplicate peeks are allowed, although they are inefficient and wallets should not create such transactions { @@ -129,6 +137,10 @@ impl< // If any of the inputs are missing, we cannot make any more progress // If they are all present, we may proceed to call the constraint checker if !missing_inputs.is_empty() { + debug!( + target: LOG_TARGET, + "Transaction is valid but still has missing inputs. Returning early.", + ); return Ok(ValidTransaction { requires: missing_inputs, provides, @@ -291,8 +303,22 @@ impl< // be cleared before the end of the block sp_io::storage::set(HEADER_KEY, &block.header().encode()); + // Tuxedo requires that inherents are at the beginning (and soon end) of the + // block and not scattered throughout. We use this flag to enforce that. + let mut finished_with_opening_inherents = false; + // Apply each extrinsic for extrinsic in block.extrinsics() { + // Enforce that inherents are in the right place + let current_tx_is_inherent = extrinsic.checker.is_inherent(); + if current_tx_is_inherent && finished_with_opening_inherents { + panic!("Tried to execute opening inherent after switching to non-inherents."); + } + if !current_tx_is_inherent && !finished_with_opening_inherents { + // This is the first non-inherent, so we update our flag and continue. + finished_with_opening_inherents = true; + } + match Self::apply_tuxedo_transaction(extrinsic.clone()) { Ok(()) => debug!( target: LOG_TARGET, @@ -360,14 +386,86 @@ impl< block_hash ); - // TODO, we need a good way to map our UtxoError into the supposedly generic InvalidTransaction - // https://paritytech.github.io/substrate/master/sp_runtime/transaction_validity/enum.InvalidTransaction.html - // For now, I just make them all custom zero - let r = Self::validate_tuxedo_transaction(&tx); + // Inherents are not permitted in the pool. They only come from the block author. + // We perform this check here rather than in the `validate_tuxedo_transaction` helper, + // because that helper is called again during on-chain execution. Inherents are valid + // during execution, so we do not want this check repeated. + let r = if tx.checker.is_inherent() { + Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) + } else { + // TODO, we need a good way to map our UtxoError into the supposedly generic InvalidTransaction + // https://paritytech.github.io/substrate/master/sp_runtime/transaction_validity/enum.InvalidTransaction.html + // For now, I just make them all custom zero + Self::validate_tuxedo_transaction(&tx) + .map_err(|_| TransactionValidityError::Invalid(InvalidTransaction::Custom(0))) + }; debug!(target: LOG_TARGET, "Validation result: {:?}", r); - r.map_err(|_| TransactionValidityError::Invalid(InvalidTransaction::Custom(0))) + r + } + + // The last two are for the standard beginning-of-block inherent extrinsics. + pub fn inherent_extrinsics(data: sp_inherents::InherentData) -> Vec<::Extrinsic> { + debug!( + target: LOG_TARGET, + "Entering `inherent_extrinsics`." + ); + + // Extract the complete parent block from the inheret data + let parent: B = data + .get_data(&PARENT_INHERENT_IDENTIFIER) + .expect("Parent block inherent data should be able to decode.") + .expect("Parent block should be present among authoring inherent data."); + + // Extract the inherents from the previous block, which can be found at the beginning of the extrinsics list. + // The parent is already imported, so we know it is valid and we know its inherents came first. + // We also annotate each transaction with its original hash for purposes of constructing output refs later. + // This is necessary because the transaction hash changes as we unwrap layers of aggregation, + // and we need an original universal transaction id. + let previous_blocks_inherents: Vec<(::Extrinsic, H256)> = parent + .extrinsics() + .iter() + .cloned() + .take_while(|tx| tx.checker.is_inherent()) + .map(|tx| { + let id = BlakeTwo256::hash_of(&tx.encode()); + (tx, id) + }) + .collect(); + + debug!( + target: LOG_TARGET, + "The previous block had {} extrinsics ({} inherents).", parent.extrinsics().len(), previous_blocks_inherents.len() + ); + + // Call into constraint checker's own inherent hooks to create the actual transactions + C::InherentHooks::create_inherents(&data, previous_blocks_inherents) + } + + pub fn check_inherents(block: B, data: InherentData) -> sp_inherents::CheckInherentsResult { + debug!( + target: LOG_TARGET, + "Entering `check_inherents`" + ); + + let mut result = CheckInherentsResult::new(); + + // Tuxedo requires that all inherents come at the beginning of the block. + // (Soon we will also allow them at the end, but never throughout the body.) + // (TODO revise this logic once that is implemented.) + // At this off-chain pre-check stage, we assume that requirement is upheld. + // It will be verified later once we are executing on-chain. + let inherents: Vec> = block + .extrinsics() + .iter() + .cloned() + .take_while(|tx| tx.checker.is_inherent()) + .collect(); + + C::InherentHooks::check_inherents(&data, inherents, &mut result); + + result } } @@ -429,14 +527,12 @@ mod tests { self } - fn build(self, should_check: bool) -> TestTransaction { + fn build(self, checks: bool, inherent: bool) -> TestTransaction { TestTransaction { inputs: self.inputs, peeks: self.peeks, outputs: self.outputs, - checker: TestConstraintChecker { - checks: should_check, - }, + checker: TestConstraintChecker { checks, inherent }, } } } @@ -535,7 +631,7 @@ mod tests { #[test] fn validate_empty_works() { - let tx = TestTransactionBuilder::default().build(true); + let tx = TestTransactionBuilder::default().build(true, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx).unwrap(); @@ -559,7 +655,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_input(input) - .build(true); + .build(true, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx).unwrap(); @@ -579,7 +675,7 @@ mod tests { .execute_with(|| { let tx = TestTransactionBuilder::default() .with_peek(output_ref) - .build(true); + .build(true, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx).unwrap(); @@ -598,7 +694,7 @@ mod tests { }; let tx = TestTransactionBuilder::default() .with_output(output) - .build(true); + .build(true, false); // This is a real transaction, so we need to calculate a real OutputRef let tx_hash = BlakeTwo256::hash_of(&tx.encode()); @@ -625,7 +721,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_input(input) - .build(true); + .build(true, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx).unwrap(); @@ -644,7 +740,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_peek(output_ref.clone()) - .build(true); + .build(true, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx).unwrap(); @@ -672,7 +768,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_input(input.clone()) .with_input(input) - .build(true); + .build(true, false); let result = TestExecutive::validate_tuxedo_transaction(&tx); @@ -694,7 +790,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_peek(output_ref.clone()) .with_peek(output_ref) - .build(true); + .build(true, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx).unwrap(); @@ -719,7 +815,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_input(input) - .build(true); + .build(true, false); let result = TestExecutive::validate_tuxedo_transaction(&tx); @@ -741,7 +837,7 @@ mod tests { }; let tx = TestTransactionBuilder::default() .with_output(output) - .build(true); + .build(true, false); // Now calculate the output ref that the transaction creates so we can pre-populate the state. let tx_hash = BlakeTwo256::hash_of(&tx.encode()); @@ -759,7 +855,7 @@ mod tests { #[test] fn validate_with_constraint_error_fails() { ExternalityBuilder::default().build().execute_with(|| { - let tx = TestTransactionBuilder::default().build(false); + let tx = TestTransactionBuilder::default().build(false, false); let vt = TestExecutive::validate_tuxedo_transaction(&tx); @@ -770,7 +866,7 @@ mod tests { #[test] fn apply_empty_works() { ExternalityBuilder::default().build().execute_with(|| { - let tx = TestTransactionBuilder::default().build(true); + let tx = TestTransactionBuilder::default().build(true, false); let vt = TestExecutive::apply_tuxedo_transaction(tx); @@ -789,7 +885,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_input(input) - .build(true); + .build(true, false); let vt = TestExecutive::apply_tuxedo_transaction(tx); @@ -804,7 +900,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_peek(output_ref) - .build(true); + .build(true, false); let vt = TestExecutive::apply_tuxedo_transaction(tx); @@ -827,7 +923,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_input(input) - .build(true); + .build(true, false); // Commit the tx to storage TestExecutive::update_storage(tx); @@ -847,7 +943,7 @@ mod tests { let tx = TestTransactionBuilder::default() .with_output(output.clone()) - .build(true); + .build(true, false); let tx_hash = BlakeTwo256::hash_of(&tx.encode()); let output_ref = OutputRef { tx_hash, index: 0 }; @@ -889,7 +985,7 @@ mod tests { #[test] fn apply_valid_extrinsic_work() { ExternalityBuilder::default().build().execute_with(|| { - let tx = TestTransactionBuilder::default().build(true); + let tx = TestTransactionBuilder::default().build(true, false); let apply_result = TestExecutive::apply_extrinsic(tx.clone()); @@ -908,7 +1004,7 @@ mod tests { #[test] fn apply_invalid_extrinsic_rejects() { ExternalityBuilder::default().build().execute_with(|| { - let tx = TestTransactionBuilder::default().build(false); + let tx = TestTransactionBuilder::default().build(false, false); let apply_result = TestExecutive::apply_extrinsic(tx.clone()); @@ -992,11 +1088,11 @@ mod tests { "858174d563f845dbb4959ea64816bd8409e48cc7e65db8aa455bc98d61d24071", ), extrinsics_root: array_bytes::hex_n_into_unchecked( - "4680cf9e9383d16183adcc1bf1ab6ce66133af3b5e650405ebe94a943849f4d4", + "d609af1c51521f5891054014cf667619067a93f4bca518b398f5a39aeb270cca", ), digest: Default::default(), }, - extrinsics: vec![TestTransactionBuilder::default().build(true)], + extrinsics: vec![TestTransactionBuilder::default().build(true, false)], }; TestExecutive::execute_block(b); @@ -1019,7 +1115,7 @@ mod tests { ), digest: Default::default(), }, - extrinsics: vec![TestTransactionBuilder::default().build(false)], + extrinsics: vec![TestTransactionBuilder::default().build(false, false)], }; TestExecutive::execute_block(b); @@ -1067,4 +1163,108 @@ mod tests { TestExecutive::execute_block(b); }); } + + #[test] + fn execute_block_inherent_only_works() { + ExternalityBuilder::default().build().execute_with(|| { + let b = TestBlock { + header: TestHeader { + parent_hash: H256::zero(), + number: 6, + state_root: array_bytes::hex_n_into_unchecked( + "858174d563f845dbb4959ea64816bd8409e48cc7e65db8aa455bc98d61d24071", + ), + extrinsics_root: array_bytes::hex_n_into_unchecked( + "799fc6d36f68fc83ae3408de607006e02836181e91701aa3a8021960b1f3507c", + ), + digest: Default::default(), + }, + extrinsics: vec![TestTransactionBuilder::default().build(true, true)], + }; + + TestExecutive::execute_block(b); + }); + } + + #[test] + fn execute_block_inherent_first_works() { + ExternalityBuilder::default().build().execute_with(|| { + let b = TestBlock { + header: TestHeader { + parent_hash: H256::zero(), + number: 6, + state_root: array_bytes::hex_n_into_unchecked( + "858174d563f845dbb4959ea64816bd8409e48cc7e65db8aa455bc98d61d24071", + ), + extrinsics_root: array_bytes::hex_n_into_unchecked( + "bf3e98799022bee8f0a55659af5f498717736ae012d2aff6274cdb7c2b0d78e9", + ), + digest: Default::default(), + }, + extrinsics: vec![ + TestTransactionBuilder::default().build(true, true), + TestTransactionBuilder::default().build(true, false), + ], + }; + + TestExecutive::execute_block(b); + }); + } + + #[test] + #[should_panic( + expected = "Tried to execute opening inherent after switching to non-inherents." + )] + fn execute_block_inherents_must_be_first() { + ExternalityBuilder::default().build().execute_with(|| { + let b = TestBlock { + header: TestHeader { + parent_hash: H256::zero(), + number: 6, + state_root: array_bytes::hex_n_into_unchecked( + "858174d563f845dbb4959ea64816bd8409e48cc7e65db8aa455bc98d61d24071", + ), + extrinsics_root: array_bytes::hex_n_into_unchecked( + "df64890515cd8ef5a8e736248394f7c72a1df197bd400a4e31affcaf6e051984", + ), + digest: Default::default(), + }, + extrinsics: vec![ + TestTransactionBuilder::default().build(true, false), + TestTransactionBuilder::default().build(true, true), + ], + }; + + TestExecutive::execute_block(b); + }); + } + + #[test] + #[should_panic( + expected = "Tried to execute opening inherent after switching to non-inherents." + )] + fn execute_block_inherents_must_all_be_first() { + ExternalityBuilder::default().build().execute_with(|| { + let b = TestBlock { + header: TestHeader { + parent_hash: H256::zero(), + number: 6, + state_root: array_bytes::hex_n_into_unchecked( + "858174d563f845dbb4959ea64816bd8409e48cc7e65db8aa455bc98d61d24071", + ), + extrinsics_root: array_bytes::hex_n_into_unchecked( + "0x36601deae36de127b974e8498e118e348a50aa4aa94bc5713e29c56e0d37e44f", + ), + digest: Default::default(), + }, + extrinsics: vec![ + TestTransactionBuilder::default().build(true, true), + TestTransactionBuilder::default().build(true, false), + TestTransactionBuilder::default().build(true, true), + ], + }; + + TestExecutive::execute_block(b); + }); + } } diff --git a/tuxedo-core/src/inherents.rs b/tuxedo-core/src/inherents.rs new file mode 100644 index 000000000..f67fe50e8 --- /dev/null +++ b/tuxedo-core/src/inherents.rs @@ -0,0 +1,222 @@ +//! APIs and utilities for working with Substrate's Inherents in Tuxedo based chains. +//! +//! # Substrate inherents +//! +//! Inherents are a Substrate feature that allows block authors to insert some transactions directly +//! into the body of the block. Inherents are similar to pre-runtime digests which allow authors to +//! insert info into the block header. However inherents go in the block body and therefore must be transactions. +//! +//! Classic usecases for inherents are injecting and updating environmental information such as a block timestamp, +//! information about the relay chain (if the current chain is a parachain), or information about who should receive the block reward. +//! +//! In order to allow the runtime to construct such transactions while keeping the cleint opaque, there are special APIs +//! for creating inherents and performing off-chain validation of inherents. That's right, inherents also offer +//! a special API to have their environmental data checked off-chain before the block is executed. +//! +//! # Complexities in UTXO chains +//! +//! In account based systems, the classic way to use an inherent is that the block inserts a transaction providing some data like a timestamp. +//! When the extrinsic executed it, overwrites the previously stored timestamp in a dedicated storage item. +//! +//! In UTXO chains, there are no storage items, and all state is local to a UTXO. This is the case with, for example, the timestamp as well. +//! This means that when the author calls into the runtime with a timestamp, the transaction that is returned must include the correct reference +//! to the UTXO that contained the previous best timestamp. This is the crux of the problem: there is no easy way to know the location of +//! the previous timestamp in the utxo-space from inside the runtime. +//! +//! # Scraping the Parent Block +//! +//! The solution is to provide the entirety of the previous block to the runtime when asking it to construct inherents. +//! This module provides an inherent data provider that does just this. Any Tuxedo runtime that uses inherents (At least ones +//! that update environmental data), needs to include this foundational previous block inherent data provider +//! so that the Tuxedo executive can scrape it to find the output references of the previous inherent transactions. + +use parity_scale_codec::{Decode, Encode}; +use scale_info::TypeInfo; +use sp_core::H256; +use sp_inherents::{ + CheckInherentsResult, InherentData, InherentIdentifier, IsFatalError, MakeFatalError, +}; +use sp_runtime::traits::Block as BlockT; +use sp_std::{vec, vec::Vec}; + +use crate::{types::Transaction, ConstraintChecker, Verifier}; + +/// An inherent identifier for the Tuxedo parent block inherent +pub const PARENT_INHERENT_IDENTIFIER: InherentIdentifier = *b"prnt_blk"; + +/// An inherent data provider that inserts the previous block into the inherent data. +/// This data does NOT go into an extrinsic. +#[cfg(feature = "std")] +pub struct ParentBlockInherentDataProvider(pub Block); + +#[cfg(feature = "std")] +impl sp_std::ops::Deref for ParentBlockInherentDataProvider { + type Target = B; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "std")] +#[async_trait::async_trait] +impl sp_inherents::InherentDataProvider for ParentBlockInherentDataProvider { + async fn provide_inherent_data( + &self, + inherent_data: &mut InherentData, + ) -> Result<(), sp_inherents::Error> { + inherent_data.put_data(PARENT_INHERENT_IDENTIFIER, &self.0) + } + + async fn try_handle_error( + &self, + identifier: &InherentIdentifier, + error: &[u8], + ) -> Option> { + if identifier == &PARENT_INHERENT_IDENTIFIER { + println!("UH OH! INHERENT ERROR!!!!!!!!!!!!!!!!!!!!!!"); + Some(Err(sp_inherents::Error::Application(Box::from( + String::decode(&mut &error[..]).ok()?, + )))) + } else { + None + } + } +} + +/// Tuxedo's controlled interface around Substrate's concept of inherents. +/// +/// This interface assumes that each inherent will appear exactly once in each block. +/// This will be verified off-chain by nodes before block execution begins. +/// +/// This interface is stricter and more structured, and therefore simpler than FRAME's. +/// If you need to do something more powerful (which you probably don't) and you +/// understand exactly how Substrate's block authoring and Tuxedo's piece aggregation works +/// (which you probably don't) you can directly implement the `InherentInternal` trait +/// which is more powerful (and dangerous). +pub trait TuxedoInherent>: Sized { + type Error: Encode + IsFatalError; + + const INHERENT_IDENTIFIER: InherentIdentifier; + + /// Create the inherent extrinsic to insert into a block that is being authored locally. + /// The inherent data is supplied by the authoring node. + fn create_inherent( + authoring_inherent_data: &InherentData, + // The option represents the so-called "first block hack". + // We need a way to initialize the chain with a first inherent on block one + // where there is no previous inherent. Once we introduce genesis extrinsics, this can be removed. + previous_inherent: Option<(Transaction, H256)>, + ) -> Transaction; + + /// Perform off-chain pre-execution checks on the inherent. + /// The inherent data is supplied by the importing node. + /// The inherent data available here is not guaranteed to be the + /// same as what is available at authoring time. + fn check_inherent( + importing_inherent_data: &InherentData, + inherent: Transaction, + results: &mut CheckInherentsResult, + ); +} + +/// Almost identical to TuxedoInherent, but allows returning multiple extrinsics +/// (as aggregate runtimes will need to) and removes the requirement that the generic +/// outer constraint checker be buildable from `Self` so we can implement it for (). +/// +/// If you are trying to implement some complex inherent logic that requires the interaction of +/// multiple inherents, or features a variable number of inherents in each block, you might be +/// able to express it by implementing this trait, but such designs are probably too complicated. +/// Think long and hard before implementing this trait directly. +pub trait InherentInternal>: Sized { + /// Create the inherent extrinsic to insert into a block that is being authored locally. + /// The inherent data is supplied by the authoring node. + fn create_inherents( + authoring_inherent_data: &InherentData, + previous_inherents: Vec<(Transaction, H256)>, + ) -> Vec>; + + /// Perform off-chain pre-execution checks on the inherents. + /// The inherent data is supplied by the importing node. + /// The inherent data available here is not guaranteed to be the + /// same as what is available at authoring time. + fn check_inherents( + importing_inherent_data: &InherentData, + inherents: Vec>, + results: &mut CheckInherentsResult, + ); +} + +/// An adapter to transform structured Tuxedo inherents into the more general and powerful +/// InherentInternal trait. +#[derive(Debug, Default, TypeInfo, Clone, Copy)] +pub struct TuxedoInherentAdapter(T); + +impl, T: TuxedoInherent + 'static> InherentInternal + for TuxedoInherentAdapter +{ + fn create_inherents( + authoring_inherent_data: &InherentData, + previous_inherents: Vec<(Transaction, H256)>, + ) -> Vec> { + if previous_inherents.len() > 1 { + panic!("Authoring a leaf inherent constraint checker, but multiple previous inherents were supplied.") + } + + let previous_inherent = previous_inherents.get(0).cloned(); + + vec![>::create_inherent( + authoring_inherent_data, + previous_inherent, + )] + } + + fn check_inherents( + importing_inherent_data: &InherentData, + inherents: Vec>, + results: &mut CheckInherentsResult, + ) { + if inherents.is_empty() { + results + .put_error( + *b"12345678", + &MakeFatalError::from( + "Tuxedo inherent expected exactly one inherent extrinsic but found zero", + ), + ) + .expect("Should be able to put an error."); + return; + } else if inherents.len() > 1 { + results + .put_error(*b"12345678", &MakeFatalError::from("Tuxedo inherent expected exactly one inherent extrinsic but found multiple")) + .expect("Should be able to put an error."); + return; + } + let inherent = inherents + .get(0) + .expect("We already checked the bounds.") + .clone(); + >::check_inherent(importing_inherent_data, inherent, results) + } +} + +impl> InherentInternal for () { + fn create_inherents( + _: &InherentData, + _: Vec<(Transaction, H256)>, + ) -> Vec> { + Vec::new() + } + + fn check_inherents( + _: &InherentData, + inherents: Vec>, + _: &mut CheckInherentsResult, + ) { + // Inherents should always be empty for this stub implementation. Not just in valid blocks, but as an invariant. + // The way we determined which inherents got here is by matching on the constraint checker. + assert!( + inherents.is_empty(), + "inherent extrinsic was passed to check inherents stub implementation." + ) + } +} diff --git a/tuxedo-core/src/lib.rs b/tuxedo-core/src/lib.rs index d4bb98805..6b702797b 100644 --- a/tuxedo-core/src/lib.rs +++ b/tuxedo-core/src/lib.rs @@ -10,6 +10,7 @@ pub mod dynamic_typing; mod executive; pub mod constraint_checker; +pub mod inherents; pub mod support_macros; pub mod traits; pub mod types; diff --git a/tuxedo-core/src/types.rs b/tuxedo-core/src/types.rs index de0c28831..c41860433 100644 --- a/tuxedo-core/src/types.rs +++ b/tuxedo-core/src/types.rs @@ -1,6 +1,6 @@ //! The common types that will be used across a Tuxedo runtime, and not specific to any one piece -use crate::dynamic_typing::DynamicallyTypedData; +use crate::{dynamic_typing::DynamicallyTypedData, ConstraintChecker, Verifier}; use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] @@ -35,8 +35,8 @@ pub struct OutputRef { /// and evictions (inputs that are forcefully consumed.) /// Existing state to be read and consumed from storage #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Debug, PartialEq, Eq, Clone, TypeInfo)] -pub struct Transaction { +#[derive(Default, Debug, PartialEq, Eq, Clone, TypeInfo)] +pub struct Transaction { /// Existing pieces of state to be read and consumed from storage pub inputs: Vec, /// Existing state to be read, but not consumed, from storage @@ -47,6 +47,20 @@ pub struct Transaction { pub checker: C, } +impl Transaction { + /// A helper function for transforming a transaction generic over one + /// kind of constraint checker into a transaction generic over another type + /// of constraint checker. This is useful when moving up and down the aggregation tree. + pub fn transform>(&self) -> Transaction { + Transaction { + inputs: self.inputs.clone(), + peeks: self.peeks.clone(), + outputs: self.outputs.clone(), + checker: self.checker.clone().into(), + } + } +} + // Manually implement Encode and Decode for the Transaction type // so that its encoding is the same as an opaque Vec. impl Encode for Transaction { @@ -94,7 +108,11 @@ impl Decode for Transaction { // This trait's design has a preference for transactions that will have a single signature over the // entire block, so it is not very useful for us. We still need to implement it to satisfy the bound, // so we do a minimal implementation. -impl Extrinsic for Transaction { +impl Extrinsic for Transaction +where + C: TypeInfo + ConstraintChecker + 'static, + V: TypeInfo + Verifier + 'static, +{ type Call = Self; type SignaturePayload = (); @@ -102,10 +120,24 @@ impl Extrinsic for Transaction Option { - Some(false) + if self.checker.is_inherent() { + Some(false) + } else { + None + } } } @@ -149,17 +181,29 @@ pub struct Output { pub verifier: V, } +impl From for Output { + fn from(payload: DynamicallyTypedData) -> Self { + Self { + payload, + verifier: Default::default(), + } + } +} + #[cfg(test)] pub mod tests { - use crate::{constraint_checker::testing::TestConstraintChecker, verifier::UpForGrabs}; + use crate::{constraint_checker::testing::TestConstraintChecker, verifier::TestVerifier}; use super::*; #[test] fn extrinsic_no_signed_payload() { - let checker = TestConstraintChecker { checks: true }; - let tx: Transaction = Transaction { + let checker = TestConstraintChecker { + checks: true, + inherent: false, + }; + let tx: Transaction = Transaction { inputs: Vec::new(), peeks: Vec::new(), outputs: Vec::new(), @@ -168,13 +212,34 @@ pub mod tests { let e = Transaction::new(tx.clone(), None).unwrap(); assert_eq!(e, tx); - assert_eq!(e.is_signed(), Some(false)); + assert_eq!(e.is_signed(), None); } #[test] fn extrinsic_is_signed_works() { - let checker = TestConstraintChecker { checks: true }; - let tx: Transaction = Transaction { + let checker = TestConstraintChecker { + checks: true, + inherent: false, + }; + let tx: Transaction = Transaction { + inputs: Vec::new(), + peeks: Vec::new(), + outputs: Vec::new(), + checker, + }; + let e = Transaction::new(tx.clone(), Some(())).unwrap(); + + assert_eq!(e, tx); + assert_eq!(e.is_signed(), None); + } + + #[test] + fn extrinsic_is_signed_works_for_inherents() { + let checker = TestConstraintChecker { + checks: true, + inherent: true, + }; + let tx: Transaction = Transaction { inputs: Vec::new(), peeks: Vec::new(), outputs: Vec::new(), diff --git a/tuxedo-core/src/verifier.rs b/tuxedo-core/src/verifier.rs index 6baf4f254..9a6bfb2d7 100644 --- a/tuxedo-core/src/verifier.rs +++ b/tuxedo-core/src/verifier.rs @@ -44,7 +44,7 @@ impl Verifier for SigCheck { /// A simple verifier that allows anyone to consume an output at any time #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] +#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo, Default)] pub struct UpForGrabs; impl Verifier for UpForGrabs { diff --git a/tuxedo-template-runtime/Cargo.toml b/tuxedo-template-runtime/Cargo.toml index 5c160ee89..d2a5f54ca 100644 --- a/tuxedo-template-runtime/Cargo.toml +++ b/tuxedo-template-runtime/Cargo.toml @@ -39,6 +39,7 @@ kitties = { default-features = false, path = "../wardrobe/kitties" } money = { default-features = false, path = "../wardrobe/money" } poe = { default-features = false, path = "../wardrobe/poe" } runtime-upgrade = { default-features = false, path = "../wardrobe/runtime_upgrade" } +timestamp = { default-features = false, path = "../wardrobe/timestamp" } tuxedo-core = { default-features = false, path = "../tuxedo-core" } [build-dependencies] @@ -73,5 +74,6 @@ std = [ "money/std", "poe/std", "kitties/std", + "timestamp/std", "runtime-upgrade/std", ] diff --git a/tuxedo-template-runtime/src/lib.rs b/tuxedo-template-runtime/src/lib.rs index bb6ea57b1..a76f88443 100644 --- a/tuxedo-template-runtime/src/lib.rs +++ b/tuxedo-template-runtime/src/lib.rs @@ -15,6 +15,7 @@ use sp_consensus_aura::sr25519::AuthorityId as AuraId; use sp_consensus_grandpa::AuthorityId as GrandpaId; use sp_api::impl_runtime_apis; +use sp_inherents::InherentData; use sp_runtime::{ create_runtime_str, impl_opaque_keys, traits::{BlakeTwo256, Block as BlockT}, @@ -206,6 +207,12 @@ impl poe::PoeConfig for Runtime { } } +impl timestamp::TimestampConfig for Runtime { + fn block_height() -> u32 { + Executive::block_height() + } +} + // Observation: For some applications, it will be invalid to simply delete // a UTXO without any further processing. Therefore, we explicitly include // AmoebaDeath and PoeRevoke on an application-specific basis @@ -234,6 +241,8 @@ pub enum OuterConstraintChecker { /// Checks that one winning claim came earlier than all the other claims, and thus /// the losing claims can be removed from storage. PoeDispute(poe::PoeDispute), + /// Set the block's timestamp via an inherent extrinsic. + SetTimestamp(timestamp::SetTimestamp), /// Upgrade the Wasm Runtime RuntimeUpgrade(runtime_upgrade::RuntimeUpgrade), } @@ -328,17 +337,15 @@ impl_runtime_apis! { Executive::close_block() } - fn inherent_extrinsics(_data: sp_inherents::InherentData) -> Vec<::Extrinsic> { - // Tuxedo does not yet support inherents - Default::default() + fn inherent_extrinsics(data: sp_inherents::InherentData) -> Vec<::Extrinsic> { + Executive::inherent_extrinsics(data) } fn check_inherents( - _block: Block, - _data: sp_inherents::InherentData + block: Block, + data: InherentData ) -> sp_inherents::CheckInherentsResult { - // Tuxedo does not yet support inherents - Default::default() + Executive::check_inherents(block, data) } } diff --git a/wallet/src/rpc.rs b/wallet/src/rpc.rs index f65d6cd08..f9b90af23 100644 --- a/wallet/src/rpc.rs +++ b/wallet/src/rpc.rs @@ -26,7 +26,7 @@ pub async fn node_get_block(hash: H256, client: &HttpClient) -> anyhow::Result u32 { + 1_000_000 + } +} + +#[test] +fn cleanup_timestamp_happy_path() { + let old = Timestamp::new(1, 1); + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![old.into()]; + let peek = vec![newer.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[],), + Ok(0), + ); +} + +#[test] +fn cleanup_timestamp_no_peek() { + let old = Timestamp::new(1, 1); + let inp = vec![old.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &[], &[]), + Err(CleanupRequiresOneReference) + ); +} + +#[test] +fn cleanup_timestamp_input_newer_than_reference() { + let old = Timestamp::new(1, 1); + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![newer.into()]; + let peek = vec![old.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[]), + Err(DontBeSoHasty) + ); +} + +#[test] +fn cleanup_timestamp_input_not_yet_ripe_for_cleaning() { + let old = Timestamp::new(1, 1); + let newer = Timestamp::new( + AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP / 2, + AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![old.into()]; + let peek = vec![newer.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[]), + Err(DontBeSoHasty) + ); +} + +#[test] +fn cleanup_timestamp_multiple_happy_path() { + let old1 = Timestamp::new(1 * AlwaysBlockMillion::MINIMUM_TIME_INTERVAL, 1); + let old2 = Timestamp::new(2 * AlwaysBlockMillion::MINIMUM_TIME_INTERVAL, 2); + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![old1.into(), old2.into()]; + let peek = vec![newer.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[]), + Ok(0), + ); +} + +#[test] +fn cleanup_timestamp_missing_input() { + // The logic allows cleaning up "multiple", or more precisely, zero or more, + // stale inputs. This test ensures that cleaning up zero is considered valid. + // Of course there is little reason to do this in real life; it only wastes resources. + + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let peek = vec![newer.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&[], &peek, &[]), + Ok(0), + ); +} + +#[test] +fn cleanup_timestamp_multiple_first_valid_second_invalid() { + let old = Timestamp::new(1 * AlwaysBlockMillion::MINIMUM_TIME_INTERVAL, 1); + let supposedly_old = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![old.into(), supposedly_old.into()]; + let peek = vec![newer.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[]), + Err(DontBeSoHasty) + ); +} + +#[test] +fn cleanup_timestamp_input_is_wrong_type() { + let old = Bogus; + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![old.into()]; + let peek = vec![newer.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[]), + Err(BadlyTyped) + ); +} + +#[test] +fn cleanup_timestamp_reference_is_wrong_type() { + let old = Timestamp::new(1, 1); + + let inp = vec![old.into()]; + let peek = vec![Bogus.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &[]), + Err(BadlyTyped) + ); +} + +#[test] +fn cleanup_timestamp_cannot_create_state() { + let old = Timestamp::new(1, 1); + let newer = Timestamp::new( + 2 * AlwaysBlockMillion::MIN_TIME_BEFORE_CLEANUP, + 2 * AlwaysBlockMillion::MIN_BLOCKS_BEFORE_CLEANUP, + ); + + let inp = vec![old.into()]; + let peek = vec![newer.into()]; + let out = vec![Bogus.into()]; + + assert_eq!( + CleanUpTimestamp::::default().check(&inp, &peek, &out,), + Err(CleanupCannotCreateState) + ); +} diff --git a/wardrobe/timestamp/src/first_block_special_case_tests.rs b/wardrobe/timestamp/src/first_block_special_case_tests.rs new file mode 100644 index 000000000..cb19dc0d5 --- /dev/null +++ b/wardrobe/timestamp/src/first_block_special_case_tests.rs @@ -0,0 +1,28 @@ +//! Unit tests for the Timestamp piece. +//! This module tests the "hack / workaround" where we allow setting a timestamp in block #1 +//! without consuming any previous one. I hope to remove this hack by including a timestamp extrinsic +//! in the genesis block. I've asked for some background about that in +//! https://substrate.stackexchange.com/questions/10105/extrinsics-in-genesis-block +//! And also sketched a path toward a timestamp in the genesis block in +//! https://github.com/Off-Narrative-Labs/Tuxedo/issues/107 + +use super::*; + +/// The mock config always says the block number is one. +pub struct AlwaysBlockOne; + +impl TimestampConfig for AlwaysBlockOne { + fn block_height() -> u32 { + 1 + } +} + +#[test] +fn set_timestamp_first_block_happy_path() { + let checker = SetTimestamp::(Default::default()); + + let new: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!(checker.check(&[], &[], &out), Ok(0)); +} diff --git a/wardrobe/timestamp/src/lib.rs b/wardrobe/timestamp/src/lib.rs new file mode 100644 index 000000000..ae1ab38bc --- /dev/null +++ b/wardrobe/timestamp/src/lib.rs @@ -0,0 +1,380 @@ +//! Allow block authors to include a timestamp via an inherent transaction. +//! +//! This is roughly analogous to FRAME's pallet timestamp. It relies on the same client-side inherent data provider, +//! as well as Tuxedo's own previous block inehrent data provider. +//! +//! In each block, the block author must include a single `SetTimestamp` transaction that peeks at the +//! Timestamp UTXO that was created in the previous block, and creates a new one with an updated timestamp. +//! +//! This piece currently features two prominent hacks which will need to be cleaned up in due course. +//! 1. It abuses the UpForGrabs verifier. This should be replaced with an Unspendable verifier and an eviction workflow. +//! 2. In block #1 it allows creating a new best timestamp without comsuming a previous one. +//! This should be removed once we are able to include a timestamp in the genesis block. + +#![cfg_attr(not(feature = "std"), no_std)] + +use core::marker::PhantomData; + +use parity_scale_codec::{Decode, Encode}; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::H256; +use sp_inherents::{CheckInherentsResult, InherentData}; +use sp_runtime::transaction_validity::TransactionPriority; +use sp_std::{vec, vec::Vec}; +use sp_timestamp::InherentError::TooFarInFuture; +use tuxedo_core::{ + dynamic_typing::{DynamicallyTypedData, UtxoData}, + ensure, + inherents::{TuxedoInherent, TuxedoInherentAdapter}, + support_macros::{CloneNoBound, DebugNoBound, DefaultNoBound}, + types::{Output, OutputRef, Transaction}, + verifier::UpForGrabs, + ConstraintChecker, SimpleConstraintChecker, Verifier, +}; + +#[cfg(test)] +mod cleanup_tests; +#[cfg(test)] +mod first_block_special_case_tests; +#[cfg(test)] +mod update_timestamp_tests; + +/// A piece-wide target for logging +const LOG_TARGET: &str = "timestamp-piece"; + +/// A timestamp, since the unix epoch, noted at some point in the history of the chain. +/// It also records the block height in which it was included. +#[derive(Debug, Encode, Decode, PartialEq, Eq, Clone, Copy, Default, PartialOrd, Ord)] +pub struct Timestamp { + /// The time, in milliseconds, since the unix epoch. + pub time: u64, + /// The block number in which this timestamp was noted. + pub block: u32, +} + +impl UtxoData for Timestamp { + const TYPE_ID: [u8; 4] = *b"time"; +} + +impl Timestamp { + pub fn new(time: u64, block: u32) -> Self { + Self { time, block } + } +} + +/// Options to configure the timestamp piece in your runtime. +/// Currently we only need access to a block number. +pub trait TimestampConfig { + /// A means of getting the current block height. + /// Probably this will be the Tuxedo Executive + fn block_height() -> u32; + + /// The minimum amount of time by which the timestamp may be updated. + /// + /// The default is 2 seconds which should be slightly lower than most chains' block times. + const MINIMUM_TIME_INTERVAL: u64 = 2_000; + + /// The maximum amount by which a valid block's timestamp may be ahead of an importing + /// node's current local time. + /// + /// Default is 1 minute. + const MAX_DRIFT: u64 = 60_000; + + /// The minimum amount of time that must have passed before an old timestamp + /// may be cleaned up. + /// + /// Default is 1 day. + const MIN_TIME_BEFORE_CLEANUP: u64 = 1000 * 60 * 60 * 24; + + /// The minimum number of blocks that must have passed before an old timestamp + /// may be cleaned up. + /// + /// Default is 15 thousand which is roughly equivalent to 1 day with 6 second + /// block times which is a common default in Substrate chains because of Polkadot. + const MIN_BLOCKS_BEFORE_CLEANUP: u32 = 15_000; +} + +/// Reasons that setting or cleaning up the timestamp may go wrong. +#[derive(Debug, Eq, PartialEq)] +pub enum TimestampError { + /// UTXO data has an unexpected type + BadlyTyped, + /// When attempting to set a new best timestamp, you have not included a new timestamp output. + MissingNewTimestamp, + /// The block height reported in the new timestamp does not match the block into which it was inserted. + NewTimestampWrongHeight, + /// Multiple outputs were specified while setting the timestamp, but exactly one is required. + TooManyOutputsWhileSettingTimestamp, + /// The previous timestamp that is peeked at must be from the immediate ancestor block, but this one is not. + PreviousTimestampWrongHeight, + /// No previous timestamp was peeked at in this transaction, but at least one peek is required. + MissingPreviousTimestamp, + /// Inputs were specified while setting the timestamp, but none are allowed. + InputsWhileSettingTimestamp, + /// The new timestamp is not sufficiently far after the previous (or may even be before it). + TimestampTooOld, + /// When cleaning up old timestamps, you must supply exactly one peek input which is the "new time reference" + /// All the timestamps that will be cleaned up must be at least the CLEANUP_AGE older than this reference. + CleanupRequiresOneReference, + /// When cleaning up old timestamps, you may not create any new state at all. + /// However, you have supplied some new outputs in this transaction. + CleanupCannotCreateState, + /// You may not clean up old timestamps until they are at least the CLEANUP_AGE older than another + /// noted timestamp on-chain. + DontBeSoHasty, +} + +/// A constraint checker for the simple act of setting a new best timetamp. +/// +/// This is expected to be performed through an inherent, and to happen exactly once per block. +/// +/// This transaction comsumes a single input which is the previous best timestamp, +/// And it creates two new outputs. A best timestamp, and a noted timestamp, both of which +/// include the same timestamp. The purpose of the best timestamp is to be consumed immediately +/// in the next block and guarantees that the timestamp is always increasing by at least the minimum. +/// On the other hand, the noted timestamps stick around in storage for a while so that other +/// transactions that need to peek at them are not immediately invalidated. Noted timestamps +/// can be voluntarily cleand up later by another transaction. +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Encode, Decode, DebugNoBound, DefaultNoBound, PartialEq, Eq, CloneNoBound, TypeInfo)] +#[scale_info(skip_type_params(T))] +pub struct SetTimestamp(PhantomData); + +impl> ConstraintChecker + for SetTimestamp +{ + type Error = TimestampError; + type InherentHooks = TuxedoInherentAdapter; + + fn check( + &self, + input_data: &[tuxedo_core::types::Output], + peek_data: &[tuxedo_core::types::Output], + output_data: &[tuxedo_core::types::Output], + ) -> Result { + log::debug!( + target: LOG_TARGET, + "🕰️🖴 Checking constraints for SetTimestamp." + ); + + // Make sure there are no inputs. Setting a new timestamp does not consume anything. + ensure!( + input_data.is_empty(), + Self::Error::InputsWhileSettingTimestamp + ); + + // Make sure the only output is a new best timestamp + ensure!(!output_data.is_empty(), Self::Error::MissingNewTimestamp); + let new_timestamp = output_data[0] + .payload + .extract::() + .map_err(|_| Self::Error::BadlyTyped)?; + ensure!( + output_data.len() == 1, + Self::Error::TooManyOutputsWhileSettingTimestamp + ); + + // Make sure the block height from this timestamp matches the current block height. + ensure!( + new_timestamp.block == T::block_height(), + Self::Error::NewTimestampWrongHeight, + ); + + // Next we need to check peeks, but there is a special case for block 1. + // We need to initialize the timestamp in block 1, so there are no requirements on + // the peeks at that height. + if T::block_height() == 1 { + // If this special case remains for a while, we should do some checks here like + // making sure there are no inputs at all. For now, We'll just leave it as is. + log::debug!( + target: LOG_TARGET, + "🕰️🖴 Executing timestamp inherent. Triggering first-block special case." + ); + return Ok(0); + } + + // Make sure there at least one peek that is the previous block's timestamp. + // We don't expect any additional peeks typically, but they are harmless. + ensure!(!peek_data.is_empty(), Self::Error::MissingPreviousTimestamp); + let old_timestamp = peek_data[0] + .payload + .extract::() + .map_err(|_| Self::Error::BadlyTyped)?; + + // Compare the new timestamp to the previous timestamp + ensure!( + new_timestamp.time >= old_timestamp.time + T::MINIMUM_TIME_INTERVAL, + Self::Error::TimestampTooOld + ); + + // Make sure the block height from the previous timestamp matches the previous block height. + ensure!( + new_timestamp.block == old_timestamp.block + 1, + Self::Error::PreviousTimestampWrongHeight, + ); + + Ok(0) + } + + fn is_inherent(&self) -> bool { + true + } +} + +impl, T: TimestampConfig + 'static> TuxedoInherent + for SetTimestamp +{ + type Error = sp_timestamp::InherentError; + const INHERENT_IDENTIFIER: sp_inherents::InherentIdentifier = sp_timestamp::INHERENT_IDENTIFIER; + + fn create_inherent( + authoring_inherent_data: &InherentData, + previous_inherent: Option<(Transaction, H256)>, + ) -> tuxedo_core::types::Transaction { + let current_timestamp: u64 = authoring_inherent_data + .get_data(&sp_timestamp::INHERENT_IDENTIFIER) + .expect("Inherent data should decode properly") + .expect("Timestamp inherent data should be present."); + let new_timestamp = Timestamp { + time: current_timestamp, + block: T::block_height(), + }; + + log::debug!( + target: LOG_TARGET, + "🕰️🖴 Local timestamp while creating inherent i:: {current_timestamp}" + ); + + let mut peeks = Vec::new(); + match (previous_inherent, T::block_height()) { + (None, 1) => { + // This is the first block hack case. + // We don't need any inputs, so just do nothing. + } + (None, _) => panic!("Attemping to construct timestamp inherent with no previous inherent (and not block 1)."), + (Some((_previous_inherent, previous_id)), _) => { + // This is the the normal case. We create a full previous to peek at. + + // We are given the entire previous inherent in case we need data from it or need to scrape the outputs. + // But out transactions are simple enough that we know we just need the one and only output. + peeks.push(OutputRef { + tx_hash: previous_id, + // There is always 1 output, so we know right where to find it. + index: 0, + }); + } + } + + let new_output = Output { + payload: new_timestamp.into(), + verifier: UpForGrabs.into(), + }; + + Transaction { + inputs: Vec::new(), + peeks, + outputs: vec![new_output], + checker: Self::default(), + } + } + + fn check_inherent( + importing_inherent_data: &InherentData, + inherent: Transaction, + result: &mut CheckInherentsResult, + ) { + let local_time: u64 = importing_inherent_data + .get_data(&sp_timestamp::INHERENT_IDENTIFIER) + .expect("Inherent data should decode properly") + .expect("Timestamp inherent data should be present."); + + log::debug!( + target: LOG_TARGET, + "🕰️🖴 Local timestamp while checking inherent is: {:#?}", local_time + ); + + let on_chain_timestamp = inherent.outputs[0].payload.extract::().expect( + "SetTimestamp extrinsic should have an output that decodes as a StorableTimestamp.", + ); + + log::debug!( + target: LOG_TARGET, + "🕰️🖴 In-block timestamp is: {:#?}", on_chain_timestamp + ); + + // Although FRAME makes the check for the minimum interval here, we don't. + // We make that check in the on-chain constraint checker. + // That is a deterministic check that all nodes should agree upon and thus it belongs onchain. + // FRAME's checks: github.com/paritytech/polkadot-sdk/blob/945ebbbc/substrate/frame/timestamp/src/lib.rs#L299-L306 + + // Make the comparison for too far in future + if on_chain_timestamp.time > local_time + T::MAX_DRIFT { + log::debug!( + target: LOG_TARGET, + "🕰️🖴 Block timestamp is too far in future. About to push an error" + ); + + result + .put_error(sp_timestamp::INHERENT_IDENTIFIER, &TooFarInFuture) + .expect("Should be able to push some error"); + } + } +} + +/// Allows users to voluntarily clean up old timestamps by showing that there +/// exists another timestamp that is at least the CLEANUP_AGE newer. +/// +/// You can clean up multiple timestamps at once, but you only peek at a single +/// new reference. Although it is useless to do so, it is valid for a transaction +/// to clean up zero timestamps. +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Encode, Decode, DebugNoBound, DefaultNoBound, PartialEq, Eq, CloneNoBound, TypeInfo)] +pub struct CleanUpTimestamp(PhantomData); + +impl SimpleConstraintChecker for CleanUpTimestamp { + type Error = TimestampError; + + fn check( + &self, + input_data: &[DynamicallyTypedData], + peek_data: &[DynamicallyTypedData], + output_data: &[DynamicallyTypedData], + ) -> Result { + // Make sure there at least one peek that is the new reference time. + // We don't expect any additional peeks typically, but as above, they are harmless. + ensure!( + !peek_data.is_empty(), + Self::Error::CleanupRequiresOneReference + ); + let new_reference_timestamp = peek_data[0] + .extract::() + .map_err(|_| Self::Error::BadlyTyped)?; + + // Make sure there are no outputs + ensure!( + output_data.is_empty(), + Self::Error::CleanupCannotCreateState + ); + + // Make sure each input is old enough to be cleaned up + // in terms of both time and block height. + for input_datum in input_data { + let old_timestamp = input_datum + .extract::() + .map_err(|_| Self::Error::BadlyTyped)?; + + ensure!( + old_timestamp.time + T::MIN_TIME_BEFORE_CLEANUP < new_reference_timestamp.time, + Self::Error::DontBeSoHasty + ); + ensure!( + old_timestamp.block + T::MIN_BLOCKS_BEFORE_CLEANUP < T::block_height(), + Self::Error::DontBeSoHasty + ); + } + + Ok(0) + } +} diff --git a/wardrobe/timestamp/src/update_timestamp_tests.rs b/wardrobe/timestamp/src/update_timestamp_tests.rs new file mode 100644 index 000000000..e254c1d00 --- /dev/null +++ b/wardrobe/timestamp/src/update_timestamp_tests.rs @@ -0,0 +1,146 @@ +//! Unit tests for the Timestamp piece. +//! This module tests the primary flow of updating the timestamp via an inherent after it has been initialized. + +use super::*; +use tuxedo_core::dynamic_typing::testing::Bogus; +use TimestampError::*; + +/// The mock config always says the block number is two. +pub struct AlwaysBlockTwo; + +impl TimestampConfig for AlwaysBlockTwo { + fn block_height() -> u32 { + 2 + } +} + +#[test] +fn update_timestamp_happy_path() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(3_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!(checker.check(&[], &peek, &out), Ok(0)); +} + +#[test] +fn update_timestamp_with_input() { + let checker = SetTimestamp::(Default::default()); + + let bogus: DynamicallyTypedData = Bogus.into(); + let inp: Vec> = vec![bogus.into()]; + let old: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(3_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!( + checker.check(&inp, &peek, &out), + Err(InputsWhileSettingTimestamp) + ); +} + +#[test] +fn update_timestamp_bogus_peek() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Bogus.into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(3_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!(checker.check(&[], &peek, &out), Err(BadlyTyped)); +} + +#[test] +fn update_timestamp_no_peek() { + let checker = SetTimestamp::(Default::default()); + + let new: DynamicallyTypedData = Timestamp::new(3_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!(checker.check(&[], &[], &out), Err(MissingPreviousTimestamp)); +} + +#[test] +fn update_timestamp_no_output() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let peek: Vec> = vec![old.into()]; + + assert_eq!(checker.check(&[], &peek, &[]), Err(MissingNewTimestamp)); +} + +#[test] +fn update_timestamp_too_many_outputs() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(3_000, 2).into(); + let bogus: DynamicallyTypedData = Bogus.into(); + let out: Vec> = vec![new.into(), bogus.into()]; + + assert_eq!( + checker.check(&[], &peek, &out), + Err(TooManyOutputsWhileSettingTimestamp) + ); +} + +#[test] +fn update_timestamp_wrong_height() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(5_000, 3).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!( + checker.check(&[], &peek, &out), + Err(NewTimestampWrongHeight) + ); +} + +#[test] +fn update_timestamp_output_earlier_than_input() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(2_000, 1).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(1_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!(checker.check(&[], &peek, &out), Err(TimestampTooOld)); +} + +#[test] +fn update_timestamp_output_newer_than_previous_best_nut_not_enough_to_meet_threshold() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(1_000, 1).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(2_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!(checker.check(&[], &peek, &out), Err(TimestampTooOld)); +} + +#[test] +fn update_timestamp_previous_timestamp_wrong_height() { + let checker = SetTimestamp::(Default::default()); + + let old: DynamicallyTypedData = Timestamp::new(0, 0).into(); + let peek: Vec> = vec![old.into()]; + let new: DynamicallyTypedData = Timestamp::new(2_000, 2).into(); + let out: Vec> = vec![new.into()]; + + assert_eq!( + checker.check(&[], &peek, &out), + Err(PreviousTimestampWrongHeight) + ); +}