diff --git a/crates/sui-core/src/authority.rs b/crates/sui-core/src/authority.rs index 58c6c9d27ce80..24d20cc77322a 100644 --- a/crates/sui-core/src/authority.rs +++ b/crates/sui-core/src/authority.rs @@ -860,7 +860,7 @@ impl AuthorityState { .skip_to(&next_expected_tx) .expect("Seeking batches should never fail at this point") { - let transactions: Vec<(TxSequenceNumber, TransactionDigest)> = state + let transactions: Vec<(TxSequenceNumber, ExecutionDigests)> = state .database .executed_sequence .iter() diff --git a/crates/sui-core/src/authority/authority_notifier.rs b/crates/sui-core/src/authority/authority_notifier.rs index 19868b525dd25..12cb7a713be9c 100644 --- a/crates/sui-core/src/authority/authority_notifier.rs +++ b/crates/sui-core/src/authority/authority_notifier.rs @@ -77,7 +77,7 @@ impl TransactionNotifier { pub fn iter_from( self: &Arc, next_seq: u64, - ) -> SuiResult + Unpin> { + ) -> SuiResult + Unpin> { if self .has_stream .compare_exchange( @@ -93,7 +93,7 @@ impl TransactionNotifier { // The state we inject in the async stream let transaction_notifier = self.clone(); - let temp_buffer: VecDeque<(TxSequenceNumber, TransactionDigest)> = VecDeque::new(); + let temp_buffer: VecDeque<(TxSequenceNumber, ExecutionDigests)> = VecDeque::new(); let uniquess_guard = IterUniquenessGuard(transaction_notifier.clone()); let initial_state = (transaction_notifier, temp_buffer, next_seq, uniquess_guard); @@ -238,17 +238,17 @@ mod tests { { let t0 = ¬ifier.ticket().expect("ok"); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } { let t0 = ¬ifier.ticket().expect("ok"); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } { let t0 = ¬ifier.ticket().expect("ok"); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } let mut iter = notifier.iter_from(0).unwrap(); @@ -276,7 +276,7 @@ mod tests { { let t0 = ¬ifier.ticket().expect("ok"); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } let x = iter.next().await; @@ -293,15 +293,15 @@ mod tests { let t7 = notifier.ticket().expect("ok"); let t8 = notifier.ticket().expect("ok"); - store.side_sequence(t6.seq(), &TransactionDigest::random()); + store.side_sequence(t6.seq(), &ExecutionDigests::random()); drop(t6); - store.side_sequence(t5.seq(), &TransactionDigest::random()); + store.side_sequence(t5.seq(), &ExecutionDigests::random()); drop(t5); drop(t7); - store.side_sequence(t8.seq(), &TransactionDigest::random()); + store.side_sequence(t8.seq(), &ExecutionDigests::random()); drop(t8); assert!(matches!(iter.next().await, Some((5, _)))); diff --git a/crates/sui-core/src/authority/authority_store.rs b/crates/sui-core/src/authority/authority_store.rs index 356c45b5b46fe..7de942d9f0b62 100644 --- a/crates/sui-core/src/authority/authority_store.rs +++ b/crates/sui-core/src/authority/authority_store.rs @@ -100,7 +100,7 @@ pub struct SuiDataStore { // Tables used for authority batch structure /// A sequence on all executed certificates and effects. - pub executed_sequence: DBMap, + pub executed_sequence: DBMap, /// A sequence of batches indexing into the sequence of executed transactions. pub batches: DBMap, @@ -252,7 +252,7 @@ impl< } #[cfg(test)] - pub fn side_sequence(&self, seq: TxSequenceNumber, digest: &TransactionDigest) { + pub fn side_sequence(&self, seq: TxSequenceNumber, digest: &ExecutionDigests) { self.executed_sequence.insert(&seq, digest).unwrap(); } @@ -561,6 +561,7 @@ impl< )?; // Store the signed effects of the transaction + let effects_digest = effects.effects.digest(); write_batch = write_batch.insert_batch( &self.effects, std::iter::once((transaction_digest, effects)), @@ -575,7 +576,7 @@ impl< write_batch, temporary_store, *transaction_digest, - sequence_number, + sequence_number.map(|seq| (seq, effects_digest)), ) .await } @@ -625,6 +626,7 @@ impl< )?; // Store the unsigned effects of the transaction + let effects_digest = effects.effects.digest(); write_batch = write_batch.insert_batch( &self.effects, std::iter::once((transaction_digest, effects)), @@ -639,7 +641,7 @@ impl< write_batch, temporary_store, *transaction_digest, - Some(sequence_number), + Some((sequence_number, effects_digest)), ) .await } @@ -650,7 +652,7 @@ impl< mut write_batch: DBBatch, temporary_store: AuthorityTemporaryStore, transaction_digest: TransactionDigest, - seq_opt: Option, + seq_opt: Option<(TxSequenceNumber, TransactionEffectsDigest)>, ) -> Result<(), SuiError> { let (objects, active_inputs, written, deleted, _events) = temporary_store.into_inner(); trace!(written =? written.values().map(|((obj_id, ver, _), _)| (obj_id, ver)).collect::>(), @@ -771,7 +773,7 @@ impl< } } - if let Some(next_seq) = seq_opt { + if let Some((next_seq, effects_digest)) = seq_opt { // Now we are sure we are going to execute, add to the sequence // number and insert into authority sequence. // @@ -781,7 +783,10 @@ impl< // full sequence, and the batching logic needs to deal with this. write_batch = write_batch.insert_batch( &self.executed_sequence, - std::iter::once((next_seq, transaction_digest)), + std::iter::once(( + next_seq, + ExecutionDigests::new(transaction_digest, effects_digest), + )), )?; } @@ -922,6 +927,7 @@ impl< .iter() .skip_to(&start)? .take_while(|(seq, _tx)| *seq < end) + .map(|(seq, exec)| (seq, exec.transaction)) .collect()) } @@ -942,7 +948,7 @@ impl< &self, start: u64, end: u64, - ) -> Result<(Vec, Vec<(TxSequenceNumber, TransactionDigest)>), SuiError> { + ) -> Result<(Vec, Vec<(TxSequenceNumber, ExecutionDigests)>), SuiError> { /* Get all batches that include requested transactions. This includes the signed batch prior to the first requested transaction, the batch including the last requested @@ -1002,7 +1008,7 @@ impl< sequence misses items. This will confuse calling logic, so we filter them out and allow callers to use the subscription API to catch the latest items in order. */ - let transactions: Vec<(TxSequenceNumber, TransactionDigest)> = self + let transactions: Vec<(TxSequenceNumber, ExecutionDigests)> = self .executed_sequence .iter() .skip_to(&first_seq)? diff --git a/crates/sui-core/src/authority_active/checkpoint_driver/mod.rs b/crates/sui-core/src/authority_active/checkpoint_driver/mod.rs index 8b00e3c6a3187..6beed08f30c61 100644 --- a/crates/sui-core/src/authority_active/checkpoint_driver/mod.rs +++ b/crates/sui-core/src/authority_active/checkpoint_driver/mod.rs @@ -9,7 +9,7 @@ use std::{ use parking_lot::Mutex; use sui_types::{ - base_types::{AuthorityName, TransactionDigest}, + base_types::{AuthorityName, ExecutionDigests, TransactionDigest}, error::SuiError, messages::{CertifiedTransaction, ConfirmationTransaction, TransactionInfoRequest}, messages_checkpoint::{ @@ -685,17 +685,17 @@ pub async fn augment_fragment_with_diff_transactions( where A: AuthorityAPI + Send + Sync + 'static + Clone, { - let mut diff_certs: BTreeMap = BTreeMap::new(); + let mut diff_certs: BTreeMap = BTreeMap::new(); // These are the trasnactions that we have that the other validator does not // have, so we can read them from our local database. for tx_digest in &fragment.diff.second.items { let cert = active_authority .state - .read_certificate(tx_digest) + .read_certificate(&tx_digest.transaction) .await? .ok_or(SuiError::CertificateNotfound { - certificate_digest: *tx_digest, + certificate_digest: tx_digest.transaction, })?; diff_certs.insert(*tx_digest, cert); } @@ -707,12 +707,12 @@ where .clone_client(&fragment.other.0.authority); for tx_digest in &fragment.diff.first.items { let response = client - .handle_transaction_info_request(TransactionInfoRequest::from(*tx_digest)) + .handle_transaction_info_request(TransactionInfoRequest::from(tx_digest.transaction)) .await?; let cert = response .certified_transaction .ok_or(SuiError::CertificateNotfound { - certificate_digest: *tx_digest, + certificate_digest: tx_digest.transaction, })?; diff_certs.insert(*tx_digest, cert); } @@ -772,7 +772,11 @@ where for digest in &unprocessed_digests { // If we have processed this continue with the next cert, nothing to do - if active_authority.state.database.effects_exists(digest)? { + if active_authority + .state + .database + .effects_exists(&digest.transaction)? + { continue; } @@ -781,7 +785,7 @@ where if let Err(err) = sync_digest( active_authority.state.name, active_authority.net.clone(), - *digest, + digest.transaction, per_other_authority_delay, ) .await diff --git a/crates/sui-core/src/authority_active/gossip/configurable_batch_action_client.rs b/crates/sui-core/src/authority_active/gossip/configurable_batch_action_client.rs index df1e5a7c4f7f3..8b6d4056cecf6 100644 --- a/crates/sui-core/src/authority_active/gossip/configurable_batch_action_client.rs +++ b/crates/sui-core/src/authority_active/gossip/configurable_batch_action_client.rs @@ -38,7 +38,7 @@ fn fix() { #[derive(Clone)] pub struct TestBatch { - pub digests: Vec, + pub digests: Vec, } #[derive(Clone)] @@ -210,7 +210,7 @@ pub async fn init_configurable_authorities( ) -> ( BTreeMap, Vec>, - Vec, + Vec, ) { let authority_count = 4; let (addr1, key1) = get_key_pair(); @@ -248,7 +248,7 @@ pub async fn init_configurable_authorities( // Execute transactions for every EmitUpdateItem Action, use the digest of the transaction to // create a batch action internal sequence. - let mut executed_digests = Vec::new(); + let mut to_be_executed_digests = Vec::new(); let mut batch_action_internal = Vec::new(); let framework_obj_ref = genesis::get_framework_object_ref(); @@ -269,10 +269,14 @@ pub async fn init_configurable_authorities( } // Add the digest and number to the internal actions. let t_b = TestBatch { - digests: vec![*transaction.digest()], + // TODO: need to put in here the real effects digest + digests: vec![ExecutionDigests::new( + *transaction.digest(), + TransactionEffectsDigest::random(), + )], }; batch_action_internal.push(BatchActionInternal::EmitUpdateItem(t_b)); - executed_digests.push(*transaction.digest()); + to_be_executed_digests.push(*transaction.digest()); } if let BatchAction::EmitError() = action { batch_action_internal.push(BatchActionInternal::EmitError()); @@ -285,8 +289,9 @@ pub async fn init_configurable_authorities( authority_clients.insert(name, client); } + let mut executed_digests = Vec::new(); // Execute certificate for each digest, and register the action sequence on the authorities who executed the certificates. - for digest in executed_digests.clone() { + for digest in to_be_executed_digests.clone() { // Get a cert let authority_clients_ref: Vec<_> = authority_clients.values().collect(); let authority_clients_slice = authority_clients_ref.as_slice(); @@ -298,7 +303,8 @@ pub async fn init_configurable_authorities( // TODO: This only works when every validator has equal stake .take(committee.quorum_threshold() as usize) { - _ = do_cert(cert_client, &cert1).await; + let effects = do_cert(cert_client, &cert1).await; + executed_digests.push(ExecutionDigests::new(digest, effects.digest())); // Register the internal actions to client cert_client diff --git a/crates/sui-core/src/authority_active/gossip/mod.rs b/crates/sui-core/src/authority_active/gossip/mod.rs index 6ebffb18632c6..e36b5206f2857 100644 --- a/crates/sui-core/src/authority_active/gossip/mod.rs +++ b/crates/sui-core/src/authority_active/gossip/mod.rs @@ -238,7 +238,7 @@ where // Upon receiving a transaction digest, store it if it is not processed already. Some(Ok(BatchInfoResponseItem(UpdateItem::Transaction((seq, digest))))) => { - if !self.state.database.effects_exists(&digest)? { + if !self.state.database.effects_exists(&digest.transaction)? { queue.push(async move { tokio::time::sleep(Duration::from_millis(EACH_ITEM_DELAY_MS)).await; digest @@ -267,9 +267,9 @@ where }, digest = &mut queue.next() , if !queue.is_empty() => { let digest = digest.unwrap(); - if !self.state.database.effects_exists(&digest)? { + if !self.state.database.effects_exists(&digest.transaction)? { // Download the certificate - let response = self.client.handle_transaction_info_request(TransactionInfoRequest::from(digest)).await?; + let response = self.client.handle_transaction_info_request(TransactionInfoRequest::from(digest.transaction)).await?; self.process_response(response).await?; } } diff --git a/crates/sui-core/src/authority_active/gossip/tests.rs b/crates/sui-core/src/authority_active/gossip/tests.rs index 43ba79ba1d735..cc11993d94c67 100644 --- a/crates/sui-core/src/authority_active/gossip/tests.rs +++ b/crates/sui-core/src/authority_active/gossip/tests.rs @@ -39,7 +39,7 @@ pub async fn test_gossip() { for digest in &digests { let result1 = client .handle_transaction_info_request(TransactionInfoRequest { - transaction_digest: *digest, + transaction_digest: digest.transaction, }) .await; @@ -78,7 +78,7 @@ pub async fn test_gossip_error() { for digest in &digests { let result1 = client .handle_transaction_info_request(TransactionInfoRequest { - transaction_digest: *digest, + transaction_digest: digest.transaction, }) .await; diff --git a/crates/sui-core/src/authority_batch.rs b/crates/sui-core/src/authority_batch.rs index 5ad76cc425977..fc3a8f15567df 100644 --- a/crates/sui-core/src/authority_batch.rs +++ b/crates/sui-core/src/authority_batch.rs @@ -146,7 +146,7 @@ impl crate::authority::AuthorityState { // The structures we use to build the next batch. The current_batch holds the sequence // of transactions in order, following the last batch. The loose transactions holds // transactions we may have received out of order. - let mut current_batch: Vec<(TxSequenceNumber, TransactionDigest)> = Vec::new(); + let mut current_batch: Vec<(TxSequenceNumber, ExecutionDigests)> = Vec::new(); while !exit { // Reset the flags. diff --git a/crates/sui-core/src/checkpoints/mod.rs b/crates/sui-core/src/checkpoints/mod.rs index 8480e6ccc8179..dcc9d0b58d4d5 100644 --- a/crates/sui-core/src/checkpoints/mod.rs +++ b/crates/sui-core/src/checkpoints/mod.rs @@ -14,7 +14,7 @@ use serde::{Deserialize, Serialize}; use std::{collections::HashSet, path::Path, sync::Arc}; use sui_storage::default_db_options; use sui_types::{ - base_types::{AuthorityName, TransactionDigest}, + base_types::{AuthorityName, ExecutionDigests}, batch::TxSequenceNumber, committee::Committee, error::SuiError, @@ -82,30 +82,30 @@ pub struct CheckpointStore { /// The signature key of the authority. pub secret: StableSyncAuthoritySigner, - /// The list of all transactions that are checkpointed mapping to the checkpoint + /// The list of all transaction/effects that are checkpointed mapping to the checkpoint /// sequence number they were assigned to. pub transactions_to_checkpoint: - DBMap, + DBMap, - /// The mapping from checkpoint to transactions contained within the checkpoint. + /// The mapping from checkpoint to transaction/effects contained within the checkpoint. /// The second part of the key is the local sequence number if the transaction was /// processed or Max(u64) / 2 + offset if not. It allows the authority to store and serve /// checkpoints in a causal order that can be processed in order. (Note the set /// of transactions in the checkpoint is global but not the order.) - pub checkpoint_contents: DBMap<(CheckpointSequenceNumber, TxSequenceNumber), TransactionDigest>, + pub checkpoint_contents: DBMap<(CheckpointSequenceNumber, TxSequenceNumber), ExecutionDigests>, - /// The set of pending transactions that were included in the last checkpoint + /// The set of pending transaction/effects that were included in the last checkpoint /// but that this authority has not yet processed. - pub unprocessed_transactions: DBMap, - /// The content of transactions we have received through the checkpoint creation process, + pub unprocessed_transactions: DBMap, + /// The content of transaction/effects we have received through the checkpoint creation process, /// that we should process before we move on to make a new proposal. This may be a only /// a subset of the digests contained in `unprocessed_transactions.` - pub unprocessed_contents: DBMap, + pub unprocessed_contents: DBMap, - /// The set of transactions this authority has processed but have not yet been + /// The set of transaction/effects this authority has processed but have not yet been /// included in a checkpoint, and their sequence number in the local sequence /// of this authority. - pub extra_transactions: DBMap, + pub extra_transactions: DBMap, /// The list of checkpoint, along with their authentication information pub checkpoints: DBMap, @@ -244,11 +244,11 @@ impl CheckpointStore { locals, ) = reopen! ( &db, - "transactions_to_checkpoint";, - "checkpoint_contents";<(CheckpointSequenceNumber,TxSequenceNumber),TransactionDigest>, - "unprocessed_transactions";, - "unprocessed_contents";, - "extra_transactions";, + "transactions_to_checkpoint";, + "checkpoint_contents";<(CheckpointSequenceNumber,TxSequenceNumber),ExecutionDigests>, + "unprocessed_transactions";, + "unprocessed_contents";, + "extra_transactions";, "checkpoints";, "local_fragments";, "fragments";, @@ -436,7 +436,7 @@ impl CheckpointStore { pub fn handle_internal_batch( &mut self, next_sequence_number: TxSequenceNumber, - transactions: &[(TxSequenceNumber, TransactionDigest)], + transactions: &[(TxSequenceNumber, ExecutionDigests)], ) -> Result<(), SuiError> { self.update_processed_transactions(transactions)?; @@ -905,7 +905,7 @@ impl CheckpointStore { pub fn update_new_checkpoint( &mut self, seq: CheckpointSequenceNumber, - transactions: &[TransactionDigest], + transactions: &[ExecutionDigests], ) -> Result<(), SuiError> { let batch = self.transactions_to_checkpoint.batch(); self.update_new_checkpoint_inner(seq, transactions, batch)?; @@ -917,7 +917,7 @@ impl CheckpointStore { fn update_new_checkpoint_inner( &mut self, seq: CheckpointSequenceNumber, - transactions: &[TransactionDigest], + transactions: &[ExecutionDigests], batch: DBBatch, ) -> Result<(), SuiError> { // Check that this checkpoint seq is new, and directly follows the last @@ -1019,7 +1019,7 @@ impl CheckpointStore { /// unprocessed transactions (this is the low watermark). fn update_processed_transactions( &mut self, // We take by &mut to prevent concurrent access. - transactions: &[(TxSequenceNumber, TransactionDigest)], + transactions: &[(TxSequenceNumber, ExecutionDigests)], ) -> Result { let in_checkpoint = self .transactions_to_checkpoint diff --git a/crates/sui-core/src/checkpoints/proposal.rs b/crates/sui-core/src/checkpoints/proposal.rs index e8726b04cf9bb..ec1efd5d04b76 100644 --- a/crates/sui-core/src/checkpoints/proposal.rs +++ b/crates/sui-core/src/checkpoints/proposal.rs @@ -5,7 +5,7 @@ use std::collections::{BTreeMap, HashSet}; use serde::{Deserialize, Serialize}; use sui_types::{ - base_types::{AuthorityName, TransactionDigest}, + base_types::{AuthorityName, ExecutionDigests}, messages_checkpoint::{ CheckpointContents, CheckpointFragment, CheckpointSequenceNumber, CheckpointSummary, SignedCheckpointProposal, @@ -40,8 +40,8 @@ impl CheckpointProposal { self.proposal.0.checkpoint.sequence_number() } - // Iterate over all transactions - pub fn transactions(&self) -> impl Iterator { + // Iterate over all transaction/effects + pub fn transactions(&self) -> impl Iterator { self.transactions.transactions.iter() } diff --git a/crates/sui-core/src/checkpoints/reconstruction.rs b/crates/sui-core/src/checkpoints/reconstruction.rs index 79bf121184d16..bbe912298d245 100644 --- a/crates/sui-core/src/checkpoints/reconstruction.rs +++ b/crates/sui-core/src/checkpoints/reconstruction.rs @@ -3,9 +3,10 @@ use std::collections::{BTreeMap, HashMap, VecDeque}; +use sui_types::base_types::ExecutionDigests; use sui_types::committee::StakeUnit; use sui_types::{ - base_types::{AuthorityName, TransactionDigest}, + base_types::AuthorityName, committee::Committee, error::SuiError, messages::CertifiedTransaction, @@ -15,8 +16,8 @@ use sui_types::{ pub struct FragmentReconstruction { pub committee: Committee, - pub global: GlobalCheckpoint, - pub extra_transactions: BTreeMap, + pub global: GlobalCheckpoint, + pub extra_transactions: BTreeMap, } impl FragmentReconstruction { diff --git a/crates/sui-core/src/checkpoints/tests/checkpoint_tests.rs b/crates/sui-core/src/checkpoints/tests/checkpoint_tests.rs index 6cb91b6068f75..a4b1c5c8871a7 100644 --- a/crates/sui-core/src/checkpoints/tests/checkpoint_tests.rs +++ b/crates/sui-core/src/checkpoints/tests/checkpoint_tests.rs @@ -89,12 +89,12 @@ fn crash_recovery() { // Do stuff - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); - let t6 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); + let t6 = ExecutionDigests::random(); cps.handle_internal_batch(4, &[(1, t1), (2, t2), (3, t3)]) .unwrap(); @@ -141,12 +141,12 @@ fn make_checkpoint_db() { let (_committee, _keys, mut stores) = random_ckpoint_store(); let (_, mut cps) = stores.pop().unwrap(); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); - let t6 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); + let t6 = ExecutionDigests::random(); cps.update_processed_transactions(&[(1, t1), (2, t2), (3, t3)]) .unwrap(); @@ -188,11 +188,11 @@ fn make_proposals() { let (_, mut cps3) = stores.pop().unwrap(); let (_, mut cps4) = stores.pop().unwrap(); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); cps1.update_processed_transactions(&[(1, t2), (2, t3)]) @@ -242,11 +242,11 @@ fn make_diffs() { let (_, mut cps3) = stores.pop().unwrap(); let (_, mut cps4) = stores.pop().unwrap(); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); cps1.update_processed_transactions(&[(1, t2), (2, t3)]) @@ -269,7 +269,7 @@ fn make_diffs() { let diff12 = p1.fragment_with(&p2); let diff23 = p2.fragment_with(&p3); - let mut global = GlobalCheckpoint::::new(); + let mut global = GlobalCheckpoint::::new(); global.insert(diff12.diff.clone()).unwrap(); global.insert(diff23.diff).unwrap(); @@ -296,12 +296,12 @@ fn latest_proposal() { let (_, mut cps3) = stores.pop().unwrap(); let (_, mut cps4) = stores.pop().unwrap(); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); - let t6 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); + let t6 = ExecutionDigests::random(); cps1.update_processed_transactions(&[(1, t2), (2, t3)]) .unwrap(); @@ -479,11 +479,11 @@ fn set_get_checkpoint() { let (_, mut cps3) = stores.pop().unwrap(); let (_, mut cps4) = stores.pop().unwrap(); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); cps1.update_processed_transactions(&[(1, t2), (2, t3)]) @@ -634,7 +634,7 @@ fn checkpoint_integration() { let old_checkpoint = cps.get_locals().next_checkpoint; let some_fresh_transactions: Vec<_> = (0..7) - .map(|_| TransactionDigest::random()) + .map(|_| ExecutionDigests::random()) .chain(unprocessed.clone().into_iter()) .enumerate() .map(|(i, d)| (i as u64 + next_tx_num, d)) @@ -655,7 +655,7 @@ fn checkpoint_integration() { // Step 2. Continue to process transactions while a proposal is out. let some_fresh_transactions: Vec<_> = (0..7) - .map(|_| TransactionDigest::random()) + .map(|_| ExecutionDigests::random()) .enumerate() .map(|(i, d)| (i as u64 + next_tx_num, d)) .collect(); @@ -668,7 +668,7 @@ fn checkpoint_integration() { // Step 3. Receive a Checkpoint unprocessed = (0..5) - .map(|_| TransactionDigest::random()) + .map(|_| ExecutionDigests::random()) .into_iter() .chain(some_fresh_transactions.iter().cloned().map(|(_, d)| d)) .collect(); @@ -745,10 +745,10 @@ async fn test_batch_to_checkpointing() { let t2 = &authority_state.batch_notifier.ticket().expect("ok"); let t3 = &authority_state.batch_notifier.ticket().expect("ok"); - store.side_sequence(t1.seq(), &TransactionDigest::random()); - store.side_sequence(t3.seq(), &TransactionDigest::random()); - store.side_sequence(t2.seq(), &TransactionDigest::random()); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t1.seq(), &ExecutionDigests::random()); + store.side_sequence(t3.seq(), &ExecutionDigests::random()); + store.side_sequence(t2.seq(), &ExecutionDigests::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } // Get transactions in order then batch. @@ -837,10 +837,10 @@ async fn test_batch_to_checkpointing_init_crash() { let t2 = &authority_state.batch_notifier.ticket().expect("ok"); let t3 = &authority_state.batch_notifier.ticket().expect("ok"); - store.side_sequence(t1.seq(), &TransactionDigest::random()); - store.side_sequence(t3.seq(), &TransactionDigest::random()); - store.side_sequence(t2.seq(), &TransactionDigest::random()); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t1.seq(), &ExecutionDigests::random()); + store.side_sequence(t3.seq(), &ExecutionDigests::random()); + store.side_sequence(t2.seq(), &ExecutionDigests::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } // Get transactions in order then batch. @@ -924,11 +924,11 @@ fn set_fragment_external() { cps4.set_consensus(Box::new(test_tx)) .expect("No issues setting the consensus"); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); cps1.update_processed_transactions(&[(1, t2), (2, t3)]) @@ -972,11 +972,11 @@ fn set_fragment_reconstruct() { let (_, mut cps3) = test_objects.pop().unwrap(); let (_, mut cps4) = test_objects.pop().unwrap(); - let t1 = TransactionDigest::random(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); - let t4 = TransactionDigest::random(); - let t5 = TransactionDigest::random(); + let t1 = ExecutionDigests::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); + let t4 = ExecutionDigests::random(); + let t5 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); cps1.update_processed_transactions(&[(1, t2), (2, t3)]) @@ -1019,8 +1019,8 @@ fn set_fragment_reconstruct() { fn set_fragment_reconstruct_two_components() { let (committee, _keys, mut test_objects) = random_ckpoint_store_num(2 * 3 + 1); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); for (_, cps) in &mut test_objects { @@ -1071,8 +1071,8 @@ fn set_fragment_reconstruct_two_components() { fn set_fragment_reconstruct_two_mutual() { let (committee, _, mut test_objects) = random_ckpoint_store_num(4); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); for (_, cps) in &mut test_objects { @@ -1130,8 +1130,8 @@ fn test_fragment_full_flow() { let (test_tx, rx) = TestConsensus::new(); - let t2 = TransactionDigest::random(); - let t3 = TransactionDigest::random(); + let t2 = ExecutionDigests::random(); + let t3 = ExecutionDigests::random(); // let t6 = TransactionDigest::random(); for (_, cps) in &mut test_objects { diff --git a/crates/sui-core/src/generate_format.rs b/crates/sui-core/src/generate_format.rs index 89960b77eaeea..67abd3038e33e 100644 --- a/crates/sui-core/src/generate_format.rs +++ b/crates/sui-core/src/generate_format.rs @@ -11,7 +11,7 @@ use serde_reflection::{Registry, Result, Samples, Tracer, TracerConfig}; use signature::Signer; use std::{fs::File, io::Write}; use sui_types::{ - base_types::{self, ObjectDigest, ObjectID, TransactionDigest}, + base_types::{self, ObjectDigest, ObjectID, TransactionDigest, TransactionEffectsDigest}, batch::UpdateItem, crypto::{get_key_pair, AuthoritySignature, Signature}, error::SuiError, @@ -54,6 +54,9 @@ fn get_registry() -> Result { tracer.trace_value(&mut samples, &od)?; tracer.trace_value(&mut samples, &td)?; + let teff = TransactionEffectsDigest::random(); + tracer.trace_value(&mut samples, &teff)?; + // 2. Trace the main entry point(s) + every enum separately. tracer.trace_type::(&samples)?; tracer.trace_type::(&samples)?; diff --git a/crates/sui-core/src/safe_client.rs b/crates/sui-core/src/safe_client.rs index ca43ffab4fcd3..36eef1efd4b99 100644 --- a/crates/sui-core/src/safe_client.rs +++ b/crates/sui-core/src/safe_client.rs @@ -183,7 +183,7 @@ impl SafeClient { _request: BatchInfoRequest, signed_batch: &SignedBatch, transactions_and_last_batch: &Option<( - Vec<(TxSequenceNumber, TransactionDigest)>, + Vec<(TxSequenceNumber, ExecutionDigests)>, AuthorityBatch, )>, ) -> SuiResult { @@ -263,7 +263,8 @@ where Ok(BatchInfoResponseItem(UpdateItem::Batch(_signed_batch))) => None, Ok(BatchInfoResponseItem(UpdateItem::Transaction((seq, digest)))) => { // Download the full transaction info - let transaction_info_request = TransactionInfoRequest::from(*digest); + let transaction_info_request = + TransactionInfoRequest::from(digest.transaction); let res = _client .handle_transaction_info_request(transaction_info_request) .await diff --git a/crates/sui-core/src/unit_tests/batch_tests.rs b/crates/sui-core/src/unit_tests/batch_tests.rs index bd481baa023a9..b70bee58a977b 100644 --- a/crates/sui-core/src/unit_tests/batch_tests.rs +++ b/crates/sui-core/src/unit_tests/batch_tests.rs @@ -91,7 +91,7 @@ async fn test_open_manager() { store .executed_sequence - .insert(&0, &TransactionDigest::new([0; 32])) + .insert(&0, &ExecutionDigests::random()) .expect("no error on write"); drop(store); drop(authority_state); @@ -113,7 +113,7 @@ async fn test_open_manager() { // TEST 3: If the database contains out of order transactions we just make a block with gaps store .executed_sequence - .insert(&2, &TransactionDigest::new([0; 32])) + .insert(&2, &ExecutionDigests::random()) .expect("no error on write"); drop(store); drop(authority_state); @@ -164,7 +164,7 @@ async fn test_batch_manager_happy_path() { // Send a transaction. { let t0 = &authority_state.batch_notifier.ticket().expect("ok"); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } // First we get a transaction update @@ -179,7 +179,7 @@ async fn test_batch_manager_happy_path() { { let t0 = &authority_state.batch_notifier.ticket().expect("ok"); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } // When we close the sending channel we also also end the service task @@ -226,10 +226,10 @@ async fn test_batch_manager_out_of_order() { let t2 = &authority_state.batch_notifier.ticket().expect("ok"); let t3 = &authority_state.batch_notifier.ticket().expect("ok"); - store.side_sequence(t1.seq(), &TransactionDigest::random()); - store.side_sequence(t3.seq(), &TransactionDigest::random()); - store.side_sequence(t2.seq(), &TransactionDigest::random()); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t1.seq(), &ExecutionDigests::random()); + store.side_sequence(t3.seq(), &ExecutionDigests::random()); + store.side_sequence(t2.seq(), &ExecutionDigests::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); } // Get transactions in order then batch. @@ -292,11 +292,11 @@ async fn test_batch_manager_drop_out_of_order() { let t2 = authority_state.batch_notifier.ticket().expect("ok"); let t3 = authority_state.batch_notifier.ticket().expect("ok"); - store.side_sequence(t1.seq(), &TransactionDigest::random()); + store.side_sequence(t1.seq(), &ExecutionDigests::random()); drop(t1); - store.side_sequence(t3.seq(), &TransactionDigest::random()); + store.side_sequence(t3.seq(), &ExecutionDigests::random()); drop(t3); - store.side_sequence(t2.seq(), &TransactionDigest::random()); + store.side_sequence(t2.seq(), &ExecutionDigests::random()); drop(t2); // Give a chance to send signals @@ -304,7 +304,7 @@ async fn test_batch_manager_drop_out_of_order() { // Still nothing has arrived out of order assert_eq!(rx.len(), 0); - store.side_sequence(t0.seq(), &TransactionDigest::random()); + store.side_sequence(t0.seq(), &ExecutionDigests::random()); drop(t0); // Get transactions in order then batch. @@ -367,7 +367,7 @@ async fn test_handle_move_order_with_batch() { println!("{:?}", y); assert!(matches!( y, - UpdateItem::Transaction((0, x)) if x == effects.transaction_digest + UpdateItem::Transaction((0, x)) if x.transaction == effects.transaction_digest )); assert!(matches!(rx.recv().await.unwrap(), UpdateItem::Batch(_))); @@ -399,7 +399,7 @@ async fn test_batch_store_retrieval() { .await }); // Send transactions out of order - let tx_zero = TransactionDigest::new([0; 32]); + let tx_zero = ExecutionDigests::random(); let inner_store = store.clone(); for _i in 0u64..105 { @@ -584,7 +584,7 @@ impl AuthorityAPI for TrustworthyAuthorityClient { while last_batch.next_sequence_number < request.length { let mut transactions = Vec::new(); for _i in 0..batch_size { - let rnd = TransactionDigest::random(); + let rnd = ExecutionDigests::random(); transactions.push((seq, rnd)); items.push(BatchInfoResponseItem(UpdateItem::Transaction((seq, rnd)))); seq += 1; @@ -711,7 +711,7 @@ impl AuthorityAPI for ByzantineAuthorityClient { while last_batch.next_sequence_number < request.length { let mut transactions = Vec::new(); for _i in 0..batch_size { - let rnd = TransactionDigest::random(); + let rnd = ExecutionDigests::random(); transactions.push((seq, rnd)); items.push(BatchInfoResponseItem(UpdateItem::Transaction((seq, rnd)))); seq += 1; @@ -721,7 +721,7 @@ impl AuthorityAPI for ByzantineAuthorityClient { // Pop last transaction let (seq, _) = transactions.pop().unwrap(); // Insert a different one - transactions.push((seq, TransactionDigest::random())); + transactions.push((seq, ExecutionDigests::random())); let new_batch = AuthorityBatch::make_next(&last_batch, &transactions).unwrap(); last_batch = new_batch; diff --git a/crates/sui-core/src/unit_tests/server_tests.rs b/crates/sui-core/src/unit_tests/server_tests.rs index 8db89f524c833..c62931deca45c 100644 --- a/crates/sui-core/src/unit_tests/server_tests.rs +++ b/crates/sui-core/src/unit_tests/server_tests.rs @@ -11,7 +11,7 @@ use crate::{ use futures::StreamExt; use std::sync::Arc; use sui_types::{ - base_types::{dbg_addr, dbg_object_id, TransactionDigest}, + base_types::{dbg_addr, dbg_object_id, ExecutionDigests}, batch::UpdateItem, object::ObjectFormatOptions, }; @@ -118,7 +118,7 @@ async fn test_subscription() { tokio::time::sleep(Duration::from_millis(10)).await; - let tx_zero = TransactionDigest::new([0; 32]); + let tx_zero = ExecutionDigests::random(); for _i in 0u64..105 { let ticket = state.batch_notifier.ticket().expect("all good"); db.executed_sequence @@ -313,7 +313,7 @@ async fn test_subscription_safe_client() { tokio::time::sleep(Duration::from_millis(10)).await; - let tx_zero = TransactionDigest::new([0; 32]); + let tx_zero = ExecutionDigests::random(); for _i in 0u64..105 { let ticket = server.state.batch_notifier.ticket().expect("all good"); db.executed_sequence diff --git a/crates/sui-core/tests/staged/sui.yaml b/crates/sui-core/tests/staged/sui.yaml index 94d7369fe34cf..f9f6386ab94b0 100644 --- a/crates/sui-core/tests/staged/sui.yaml +++ b/crates/sui-core/tests/staged/sui.yaml @@ -55,6 +55,12 @@ Data: Package: NEWTYPE: TYPENAME: MovePackage +ExecutionDigests: + STRUCT: + - transaction: + TYPENAME: TransactionDigest + - effects: + TYPENAME: TransactionEffectsDigest ExecutionStatus: ENUM: 0: @@ -648,6 +654,8 @@ SuiError: - error: STR TransactionDigest: NEWTYPESTRUCT: BYTES +TransactionEffectsDigest: + NEWTYPESTRUCT: BYTES TransactionKind: ENUM: 0: @@ -710,7 +718,7 @@ UpdateItem: NEWTYPE: TUPLE: - U64 - - TYPENAME: TransactionDigest + - TYPENAME: ExecutionDigests 1: Batch: NEWTYPE: diff --git a/crates/sui-types/src/base_types.rs b/crates/sui-types/src/base_types.rs index 193c99874cd4d..57be80b843ab5 100644 --- a/crates/sui-types/src/base_types.rs +++ b/crates/sui-types/src/base_types.rs @@ -10,7 +10,9 @@ use std::str::FromStr; use anyhow::anyhow; use base64ct::Encoding; +use curve25519_dalek::ristretto::RistrettoPoint; use digest::Digest; +use ed25519_dalek::Sha512; use hex::FromHex; use move_core_types::account_address::AccountAddress; use move_core_types::ident_str; @@ -30,6 +32,7 @@ use crate::object::{Object, Owner}; use crate::sui_serde::Base64; use crate::sui_serde::Hex; use crate::sui_serde::Readable; +use crate::waypoint::IntoPoint; #[cfg(test)] #[path = "unit_tests/base_types_tests.rs"] @@ -225,6 +228,12 @@ pub struct TransactionDigest( [u8; TRANSACTION_DIGEST_LENGTH], ); +impl IntoPoint for TransactionDigest { + fn into_point(&self) -> RistrettoPoint { + RistrettoPoint::hash_from_bytes::(&self.0) + } +} + // Each object has a unique digest #[serde_as] #[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema)] @@ -235,13 +244,56 @@ pub struct ObjectDigest( ); // We use SHA3-256 hence 32 bytes here #[serde_as] -#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema)] +#[derive( + Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema, Debug, +)] pub struct TransactionEffectsDigest( #[schemars(with = "Base64")] #[serde_as(as = "Readable")] pub [u8; TRANSACTION_DIGEST_LENGTH], ); +impl TransactionEffectsDigest { + // for testing + pub fn random() -> Self { + let random_bytes = rand::thread_rng().gen::<[u8; 32]>(); + Self(random_bytes) + } +} + +#[derive( + Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema, Debug, +)] +pub struct ExecutionDigests { + pub transaction: TransactionDigest, + pub effects: TransactionEffectsDigest, +} + +impl ExecutionDigests { + pub fn new(transaction: TransactionDigest, effects: TransactionEffectsDigest) -> Self { + Self { + transaction, + effects, + } + } + + pub fn random() -> Self { + Self { + transaction: TransactionDigest::random(), + effects: TransactionEffectsDigest::random(), + } + } +} + +impl IntoPoint for ExecutionDigests { + fn into_point(&self) -> RistrettoPoint { + let mut data = [0; 64]; + data[0..32].clone_from_slice(&self.transaction.0); + data[32..64].clone_from_slice(&self.effects.0); + RistrettoPoint::from_uniform_bytes(&data) + } +} + pub const STD_OPTION_MODULE_NAME: &IdentStr = ident_str!("Option"); pub const STD_OPTION_STRUCT_NAME: &IdentStr = STD_OPTION_MODULE_NAME; diff --git a/crates/sui-types/src/batch.rs b/crates/sui-types/src/batch.rs index 95948a6431e44..26b0ba1c70dca 100644 --- a/crates/sui-types/src/batch.rs +++ b/crates/sui-types/src/batch.rs @@ -1,24 +1,24 @@ // Copyright (c) 2022, Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use crate::base_types::{AuthorityName, TransactionDigest}; +use crate::base_types::{AuthorityName, ExecutionDigests}; use crate::crypto::{sha3_hash, AuthoritySignature, BcsSignable}; use crate::error::SuiError; use serde::{Deserialize, Serialize}; pub type TxSequenceNumber = u64; -/// Either a freshly sequenced transaction hash or a batch +/// Either a freshly sequenced transaction/effects tuple of hashes or a batch #[derive(Eq, PartialEq, Clone, Debug, Serialize, Deserialize)] pub enum UpdateItem { - Transaction((TxSequenceNumber, TransactionDigest)), + Transaction((TxSequenceNumber, ExecutionDigests)), Batch(SignedBatch), } pub type BatchDigest = [u8; 32]; #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Hash, Default, Debug, Serialize, Deserialize)] -pub struct TransactionBatch(pub Vec<(TxSequenceNumber, TransactionDigest)>); +pub struct TransactionBatch(pub Vec<(TxSequenceNumber, ExecutionDigests)>); impl BcsSignable for TransactionBatch {} #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Default, Debug, Serialize, Deserialize)] @@ -62,11 +62,11 @@ impl AuthorityBatch { } } - /// Make a batch, containing some transactions, and following the previous + /// Make a batch, containing some transaction/effects, and following the previous /// batch. pub fn make_next( previous_batch: &AuthorityBatch, - transactions: &[(TxSequenceNumber, TransactionDigest)], + transactions: &[(TxSequenceNumber, ExecutionDigests)], ) -> Result { let transaction_vec = transactions.to_vec(); if transaction_vec.is_empty() { diff --git a/crates/sui-types/src/messages_checkpoint.rs b/crates/sui-types/src/messages_checkpoint.rs index 278065ee43a57..ee2c0c12e5693 100644 --- a/crates/sui-types/src/messages_checkpoint.rs +++ b/crates/sui-types/src/messages_checkpoint.rs @@ -3,11 +3,12 @@ use std::collections::{BTreeMap, BTreeSet, HashSet}; +use crate::base_types::ExecutionDigests; use crate::crypto::Signable; use crate::messages::CertifiedTransaction; use crate::waypoint::{Waypoint, WaypointDiff}; use crate::{ - base_types::{AuthorityName, TransactionDigest}, + base_types::AuthorityName, committee::Committee, crypto::{sha3_hash, AuthoritySignature, BcsSignable, VerificationObligation}, error::SuiError, @@ -390,7 +391,7 @@ impl CertifiedCheckpoint { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CheckpointContents { - pub transactions: BTreeSet, + pub transactions: BTreeSet, } impl BcsSignable for CheckpointContents {} @@ -398,7 +399,7 @@ impl BcsSignable for CheckpointContents {} impl CheckpointContents { pub fn new(contents: T) -> CheckpointContents where - T: Iterator, + T: Iterator, { CheckpointContents { transactions: contents.collect(), @@ -415,8 +416,8 @@ impl CheckpointContents { pub struct CheckpointFragment { pub proposer: SignedCheckpointProposal, pub other: SignedCheckpointProposal, - pub diff: WaypointDiff, - pub certs: BTreeMap, + pub diff: WaypointDiff, + pub certs: BTreeMap, } impl CheckpointFragment { @@ -478,7 +479,7 @@ mod tests { let (authority_key, _committee) = make_committee_key(&mut rng); let name = authority_key[0].public_key_bytes(); - let set = [TransactionDigest::random()]; + let set = [ExecutionDigests::random()]; let set = CheckpointContents::new(set.iter().cloned()); let mut proposal = SignedCheckpoint::new(1, *name, &authority_key[0], &set); @@ -489,7 +490,7 @@ mod tests { // Error on different transactions let contents = CheckpointContents { - transactions: [TransactionDigest::random()].into_iter().collect(), + transactions: [ExecutionDigests::random()].into_iter().collect(), }; assert!(proposal.verify_with_transactions(&contents).is_err()); @@ -503,7 +504,7 @@ mod tests { let mut rng = StdRng::from_seed(RNG_SEED); let (keys, committee) = make_committee_key(&mut rng); - let set = [TransactionDigest::random()]; + let set = [ExecutionDigests::random()]; let set = CheckpointContents::new(set.iter().cloned()); let signed_checkpoints: Vec<_> = keys @@ -528,7 +529,7 @@ mod tests { .iter() .map(|k| { let name = k.public_key_bytes(); - let set: BTreeSet<_> = [TransactionDigest::random()].into_iter().collect(); + let set: BTreeSet<_> = [ExecutionDigests::random()].into_iter().collect(); let set = CheckpointContents::new(set.iter().cloned()); SignedCheckpoint::new(1, *name, k, &set) diff --git a/crates/sui-types/src/unit_tests/waypoint_tests.rs b/crates/sui-types/src/unit_tests/waypoint_tests.rs index 27c8485376d96..1c184f6431eae 100644 --- a/crates/sui-types/src/unit_tests/waypoint_tests.rs +++ b/crates/sui-types/src/unit_tests/waypoint_tests.rs @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use super::*; +use ed25519_dalek::Sha512; use rand::Rng; #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] @@ -19,6 +20,18 @@ fn make_item() -> Item { Item(item) } +impl From<&Item> for RistrettoPoint { + fn from(other: &Item) -> RistrettoPoint { + RistrettoPoint::hash_from_bytes::(&other.0) + } +} + +impl IntoPoint for Item { + fn into_point(&self) -> RistrettoPoint { + RistrettoPoint::hash_from_bytes::(&self.0) + } +} + #[test] fn test_diff() { let mut first = Waypoint::default(); diff --git a/crates/sui-types/src/waypoint.rs b/crates/sui-types/src/waypoint.rs index a3c90100d8cc9..4396c2776406d 100644 --- a/crates/sui-types/src/waypoint.rs +++ b/crates/sui-types/src/waypoint.rs @@ -9,7 +9,6 @@ use thiserror::Error; use std::collections::{BTreeMap, BTreeSet}; use curve25519_dalek::ristretto::RistrettoPoint; -use ed25519_dalek::Sha512; use crate::committee::StakeUnit; use crate::{base_types::AuthorityName, committee::Committee}; @@ -35,6 +34,20 @@ impl WaypointError { } } +#[allow(clippy::wrong_self_convention)] +pub trait IntoPoint { + fn into_point(&self) -> RistrettoPoint; +} + +impl IntoPoint for &T +where + T: IntoPoint, +{ + fn into_point(&self) -> RistrettoPoint { + (*self).into_point() + } +} + /* A MulHash accumulator: each element is mapped to a point on an elliptic curve on which the DL problem is @@ -53,20 +66,20 @@ impl Accumulator { /// Insert one item in the accumulator pub fn insert(&mut self, item: &I) where - I: AsRef<[u8]>, + I: IntoPoint, { - let point = RistrettoPoint::hash_from_bytes::(item.as_ref()); + let point: RistrettoPoint = item.into_point(); self.accumulator += point; } // Insert all items from an iterator into the accumulator - pub fn insert_all<'a, It>(&'a mut self, items: It) + pub fn insert_all<'a, I, It>(&'a mut self, items: It) where - It: IntoIterator, - It::Item: 'a + AsRef<[u8]>, + It: 'a + IntoIterator, + I: 'a + IntoPoint, { for i in items { - self.insert(&i); + self.insert(i); } } } @@ -106,7 +119,8 @@ where impl WaypointWithItems where - I: AsRef<[u8]> + Ord, + K: 'static, + I: 'static + Ord, { pub fn new(key: K) -> WaypointWithItems { WaypointWithItems { @@ -115,7 +129,12 @@ where items: BTreeSet::new(), } } +} +impl WaypointWithItems +where + I: IntoPoint + Ord, +{ /// Insert an element in the accumulator and list of items pub fn insert_full(&mut self, item: I) { self.waypoint.insert(&item); @@ -130,7 +149,7 @@ where #[derive(Clone, Debug, Serialize, Deserialize)] pub struct WaypointDiff where - I: AsRef<[u8]> + Ord, + I: Ord, { pub first: WaypointWithItems, pub second: WaypointWithItems, @@ -138,7 +157,8 @@ where impl WaypointDiff where - I: AsRef<[u8]> + Ord, + K: 'static, + I: 'static + Ord, { pub fn new( first_key: K, @@ -176,7 +196,13 @@ where second: self.first, } } +} +impl<'a, K, I> WaypointDiff +where + I: 'static + Ord + IntoPoint, + K: 'static, +{ /// Check the internal invariants: ie that adding to both /// waypoints the missing elements makes them point to the /// accumulated same set. @@ -207,8 +233,7 @@ where impl Default for GlobalCheckpoint where - K: Eq + Ord + Clone, - I: AsRef<[u8]> + Ord + Clone, + I: Ord, { fn default() -> Self { Self::new() @@ -217,8 +242,7 @@ where impl GlobalCheckpoint where - K: Eq + Ord + Clone, - I: AsRef<[u8]> + Ord + Clone, + I: Ord, { /// Initializes an empty global checkpoint at a specific /// sequence number. @@ -228,6 +252,121 @@ where authority_waypoints: BTreeMap::new(), } } +} + +impl GlobalCheckpoint +where + I: AsRef<[u8]> + Ord, +{ + /// In case keys are authority names we can check if the set of + /// authorities represented in this checkpoint represent a quorum + pub fn has_quorum(&self, committee: &Committee) -> bool { + let authority_weights: StakeUnit = self + .authority_waypoints + .keys() + .map(|name| committee.weight(name)) + .sum(); + authority_weights >= committee.quorum_threshold() + } +} + +impl GlobalCheckpoint +where + K: 'static + Eq + Ord + Clone, + I: 'static + Ord + Clone + IntoPoint, +{ + /// Checks the internal invariants of the checkpoint, namely that + /// all the contained waypoints + the associated items lead to the + /// reference waypoint. + pub fn check(&self) -> bool { + let root = self.reference_waypoint.clone(); + for v in self.authority_waypoints.values() { + let mut inner_root = v.waypoint.clone(); + inner_root.insert_all(v.items.iter()); + + if inner_root != root { + return false; + } + } + true + } + + /// Given our proposal, a waypoint us-other, and a global checkpoint + /// that either contains us, or the other, what is the actual set of + /// items in the checkpoint? + pub fn checkpoint_items( + &self, + diff: &WaypointDiff, + mut own_proposal: BTreeSet, + ) -> Result, WaypointError> { + // Case 1 -- we are in the checkpoint (easy) + + // If the authority is one of the participants in the checkpoint + // just add our proposal to the diff with the global waypoint, and + // this is the checkpoint. + if self.authority_waypoints.contains_key(&diff.first.key) { + let mut all_elements = self.authority_waypoints[&diff.first.key].items.clone(); + all_elements.extend(own_proposal); + return Ok(all_elements); + } + + // Case 2 -- the other side of our diff is in the checkpoint (harder) + + // If not then we need to compute the difference. + if !self.authority_waypoints.contains_key(&diff.second.key) { + return Err(WaypointError::generic( + "Need the second key at least to link into the checkpoint.".to_string(), + )); + } + + // Union of items, to catch up with second + own_proposal.extend(diff.first.items.clone()); + // Remove items not in second + let mut second_items: BTreeSet = own_proposal + .difference(&diff.second.items) + .cloned() + .collect(); + // Add items from second to global checkpoint + second_items.extend(self.authority_waypoints[&diff.second.key].items.clone()); + + Ok(second_items) + } + + /// Provides the set of element that need to be added to the first party + /// to catch up with the checkpoint. + pub fn catch_up_items(&self, diff: WaypointDiff) -> Result, WaypointError> { + // If the authority is one of the participants in the checkpoint + // just read the different. + if self.authority_waypoints.contains_key(&diff.first.key) { + return Ok(self.authority_waypoints[&diff.first.key].items.clone()); + } + + // If not then we need to compute the difference. + if !self.authority_waypoints.contains_key(&diff.second.key) { + return Err(WaypointError::generic( + "Need the second key at least to link into the checkpoint.".to_string(), + )); + } + let item_sum: BTreeSet<_> = diff + .first + .items + .union(&self.authority_waypoints[&diff.second.key].items) + .cloned() + .collect(); + let item_sum: BTreeSet<_> = item_sum.difference(&diff.second.items).cloned().collect(); + + // The root after we add the extra items should be the same as if we constructed + // a checkpoint including the first waypoint. + debug_assert!({ + let mut first_root = diff.first.waypoint.clone(); + first_root.insert_all(item_sum.iter()); + + let mut ck2 = self.clone(); + ck2.insert(diff.swap()).is_ok() && first_root == ck2.reference_waypoint + }); + + Ok(item_sum) + } /// Inserts a waypoint diff into the checkpoint. If the checkpoint /// is empty both ends of the diff are inserted, and the reference @@ -306,113 +445,4 @@ where Ok(()) } - - /// Checks the internal invariants of the checkpoint, namely that - /// all the contained waypoints + the associated items lead to the - /// reference waypoint. - pub fn check(&self) -> bool { - let root = self.reference_waypoint.clone(); - for v in self.authority_waypoints.values() { - let mut inner_root = v.waypoint.clone(); - inner_root.insert_all(v.items.iter()); - - if inner_root != root { - return false; - } - } - true - } - - /// Provides the set of element that need to be added to the first party - /// to catch up with the checkpoint. - pub fn catch_up_items(&self, diff: WaypointDiff) -> Result, WaypointError> { - // If the authority is one of the participants in the checkpoint - // just read the different. - if self.authority_waypoints.contains_key(&diff.first.key) { - return Ok(self.authority_waypoints[&diff.first.key].items.clone()); - } - - // If not then we need to compute the difference. - if !self.authority_waypoints.contains_key(&diff.second.key) { - return Err(WaypointError::generic( - "Need the second key at least to link into the checkpoint.".to_string(), - )); - } - let item_sum: BTreeSet<_> = diff - .first - .items - .union(&self.authority_waypoints[&diff.second.key].items) - .cloned() - .collect(); - let item_sum: BTreeSet<_> = item_sum.difference(&diff.second.items).cloned().collect(); - - // The root after we add the extra items should be the same as if we constructed - // a checkpoint including the first waypoint. - debug_assert!({ - let mut first_root = diff.first.waypoint.clone(); - first_root.insert_all(item_sum.iter()); - - let mut ck2 = self.clone(); - ck2.insert(diff.swap()).is_ok() && first_root == ck2.reference_waypoint - }); - - Ok(item_sum) - } - - /// Given our proposal, a waypoint us-other, and a global checkpoint - /// that either contains us, or the other, what is the actual set of - /// items in the checkpoint? - pub fn checkpoint_items( - &self, - diff: &WaypointDiff, - mut own_proposal: BTreeSet, - ) -> Result, WaypointError> { - // Case 1 -- we are in the checkpoint (easy) - - // If the authority is one of the participants in the checkpoint - // just add our proposal to the diff with the global waypoint, and - // this is the checkpoint. - if self.authority_waypoints.contains_key(&diff.first.key) { - let mut all_elements = self.authority_waypoints[&diff.first.key].items.clone(); - all_elements.extend(own_proposal); - return Ok(all_elements); - } - - // Case 2 -- the other side of our diff is in the checkpoint (harder) - - // If not then we need to compute the difference. - if !self.authority_waypoints.contains_key(&diff.second.key) { - return Err(WaypointError::generic( - "Need the second key at least to link into the checkpoint.".to_string(), - )); - } - - // Union of items, to catch up with second - own_proposal.extend(diff.first.items.clone()); - // Remove items not in second - let mut second_items: BTreeSet = own_proposal - .difference(&diff.second.items) - .cloned() - .collect(); - // Add items from second to global checkpoint - second_items.extend(self.authority_waypoints[&diff.second.key].items.clone()); - - Ok(second_items) - } -} - -impl GlobalCheckpoint -where - I: AsRef<[u8]> + Ord, -{ - /// In case keys are authority names we can check if the set of - /// authorities represented in this checkpoint represent a quorum - pub fn has_quorum(&self, committee: &Committee) -> bool { - let authority_weights: StakeUnit = self - .authority_waypoints - .keys() - .map(|name| committee.weight(name)) - .sum(); - authority_weights >= committee.quorum_threshold() - } } diff --git a/crates/sui/tests/full_node_tests.rs b/crates/sui/tests/full_node_tests.rs index 47db7f05fd7c5..40b48430929dd 100644 --- a/crates/sui/tests/full_node_tests.rs +++ b/crates/sui/tests/full_node_tests.rs @@ -78,7 +78,7 @@ async fn wait_for_tx(wait_digest: TransactionDigest, state: Arc) // Upon receiving a transaction digest we store it, if it is not processed already. Some(Ok(BatchInfoResponseItem(UpdateItem::Transaction((_seq, digest))))) => { info!(?digest, "Received Transaction"); - if wait_digest == digest { + if wait_digest == digest.transaction { info!(?digest, "Digest found"); break; }