Skip to content

Commit

Permalink
declone and close the door (paritytech#12035)
Browse files Browse the repository at this point in the history
* declone and close the door

* cargo fmt

* remove brackets
  • Loading branch information
gilescope authored Aug 15, 2022
1 parent 90c8ac3 commit 20b5aac
Show file tree
Hide file tree
Showing 72 changed files with 344 additions and 512 deletions.
2 changes: 0 additions & 2 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ rustflags = [
"-Aclippy::if-same-then-else",
"-Aclippy::clone-double-ref",
"-Dclippy::complexity",
"-Aclippy::clone_on_copy", # Too common
"-Aclippy::needless_lifetimes", # Backward compat?
"-Aclippy::zero-prefixed-literal", # 00_1000_000
"-Aclippy::type_complexity", # raison d'etre
"-Aclippy::nonminimal-bool", # maybe
Expand Down
2 changes: 1 addition & 1 deletion bin/node/executor/tests/basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ fn blocks() -> ((Vec<u8>, Hash), (Vec<u8>, Hash)) {
let block2 = construct_block(
&mut t,
2,
block1.1.clone(),
block1.1,
vec![
CheckedExtrinsic {
signed: None,
Expand Down
2 changes: 1 addition & 1 deletion bin/node/executor/tests/fees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() {
let block2 = construct_block(
&mut tt,
2,
block1.1.clone(),
block1.1,
vec![
CheckedExtrinsic {
signed: None,
Expand Down
12 changes: 6 additions & 6 deletions bin/node/runtime/src/impls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,12 +197,12 @@ mod multiplier_tests {
fn truth_value_update_poc_works() {
let fm = Multiplier::saturating_from_rational(1, 2);
let test_set = vec![
(0, fm.clone()),
(100, fm.clone()),
(1000, fm.clone()),
(target(), fm.clone()),
(max_normal() / 2, fm.clone()),
(max_normal(), fm.clone()),
(0, fm),
(100, fm),
(1000, fm),
(target(), fm),
(max_normal() / 2, fm),
(max_normal(), fm),
];
test_set.into_iter().for_each(|(w, fm)| {
run_with_system_weight(w, || {
Expand Down
2 changes: 1 addition & 1 deletion client/api/src/in_mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ impl<Block: BlockT> Blockchain<Block> {

{
let mut storage = self.storage.write();
storage.leaves.import(hash, number, header.parent_hash().clone());
storage.leaves.import(hash, number, *header.parent_hash());
storage.blocks.insert(hash, StoredBlock::new(header, body, justifications));

if let NewBlockState::Final = new_state {
Expand Down
19 changes: 9 additions & 10 deletions client/authority-discovery/src/worker/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ pub(crate) struct TestApi {
impl ProvideRuntimeApi<Block> for TestApi {
type Api = RuntimeApi;

fn runtime_api<'a>(&'a self) -> ApiRef<'a, Self::Api> {
fn runtime_api(&self) -> ApiRef<'_, Self::Api> {
RuntimeApi { authorities: self.authorities.clone() }.into()
}
}
Expand Down Expand Up @@ -530,7 +530,7 @@ impl DhtValueFoundTester {
) -> Option<&HashSet<Multiaddr>> {
let (_dht_event_tx, dht_event_rx) = channel(1);
let local_test_api =
Arc::new(TestApi { authorities: vec![self.remote_authority_public.clone().into()] });
Arc::new(TestApi { authorities: vec![self.remote_authority_public.into()] });
let local_network: Arc<TestNetwork> = Arc::new(Default::default());
let local_key_store = KeyStore::new();

Expand All @@ -555,8 +555,7 @@ impl DhtValueFoundTester {
self.local_worker
.as_ref()
.map(|w| {
w.addr_cache
.get_addresses_by_authority_id(&self.remote_authority_public.clone().into())
w.addr_cache.get_addresses_by_authority_id(&self.remote_authority_public.into())
})
.unwrap()
}
Expand All @@ -569,7 +568,7 @@ fn limit_number_of_addresses_added_to_cache_per_authority() {
let addresses = (1..100).map(|i| tester.multiaddr_with_peer_id(i)).collect();
let kv_pairs = block_on(build_dht_event::<TestNetwork>(
addresses,
tester.remote_authority_public.clone().into(),
tester.remote_authority_public.into(),
&tester.remote_key_store,
None,
));
Expand All @@ -584,7 +583,7 @@ fn strict_accept_address_with_peer_signature() {
let addr = tester.multiaddr_with_peer_id(1);
let kv_pairs = block_on(build_dht_event(
vec![addr.clone()],
tester.remote_authority_public.clone().into(),
tester.remote_authority_public.into(),
&tester.remote_key_store,
Some(&TestSigner { keypair: &tester.remote_node_key }),
));
Expand All @@ -604,7 +603,7 @@ fn reject_address_with_rogue_peer_signature() {
let rogue_remote_node_key = Keypair::generate_ed25519();
let kv_pairs = block_on(build_dht_event(
vec![tester.multiaddr_with_peer_id(1)],
tester.remote_authority_public.clone().into(),
tester.remote_authority_public.into(),
&tester.remote_key_store,
Some(&TestSigner { keypair: &rogue_remote_node_key }),
));
Expand All @@ -622,7 +621,7 @@ fn reject_address_with_invalid_peer_signature() {
let mut tester = DhtValueFoundTester::new();
let mut kv_pairs = block_on(build_dht_event(
vec![tester.multiaddr_with_peer_id(1)],
tester.remote_authority_public.clone().into(),
tester.remote_authority_public.into(),
&tester.remote_key_store,
Some(&TestSigner { keypair: &tester.remote_node_key }),
));
Expand All @@ -644,7 +643,7 @@ fn reject_address_without_peer_signature() {
let mut tester = DhtValueFoundTester::new();
let kv_pairs = block_on(build_dht_event::<TestNetwork>(
vec![tester.multiaddr_with_peer_id(1)],
tester.remote_authority_public.clone().into(),
tester.remote_authority_public.into(),
&tester.remote_key_store,
None,
));
Expand All @@ -662,7 +661,7 @@ fn do_not_cache_addresses_without_peer_id() {
"/ip6/2001:db8:0:0:0:0:0:2/tcp/30333".parse().unwrap();
let kv_pairs = block_on(build_dht_event::<TestNetwork>(
vec![multiaddr_with_peer_id.clone(), multiaddr_without_peer_id],
tester.remote_authority_public.clone().into(),
tester.remote_authority_public.into(),
&tester.remote_key_store,
None,
));
Expand Down
2 changes: 1 addition & 1 deletion client/beefy/src/round.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ where
}

pub(crate) fn should_self_vote(&self, round: &(P, NumberFor<B>)) -> bool {
Some(round.1.clone()) > self.best_done &&
Some(round.1) > self.best_done &&
self.rounds.get(round).map(|tracker| !tracker.has_self_vote()).unwrap_or(true)
}

Expand Down
2 changes: 1 addition & 1 deletion client/beefy/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ fn add_auth_change_digest(header: &mut Header, new_auth_set: BeefyValidatorSet)
}

pub(crate) fn make_beefy_ids(keys: &[BeefyKeyring]) -> Vec<AuthorityId> {
keys.iter().map(|key| key.clone().public().into()).collect()
keys.iter().map(|&key| key.public().into()).collect()
}

pub(crate) fn create_beefy_keystore(authority: BeefyKeyring) -> SyncCryptoStorePtr {
Expand Down
2 changes: 1 addition & 1 deletion client/beefy/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1134,7 +1134,7 @@ pub(crate) mod tests {
let mmr_root_hash = H256::random();
header.digest_mut().push(DigestItem::Consensus(
BEEFY_ENGINE_ID,
ConsensusLog::<AuthorityId>::MmrRoot(mmr_root_hash.clone()).encode(),
ConsensusLog::<AuthorityId>::MmrRoot(mmr_root_hash).encode(),
));

// verify validator set is correctly extracted from digest
Expand Down
2 changes: 1 addition & 1 deletion client/cli/src/commands/chain_info_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl ChainInfoCmd {
state_cache_child_ratio: config.state_cache_child_ratio.map(|v| (v, 100)),
state_pruning: config.state_pruning.clone(),
source: config.database.clone(),
blocks_pruning: config.blocks_pruning.clone(),
blocks_pruning: config.blocks_pruning,
};
let backend = sc_service::new_db_backend::<B>(db_config)?;
let info: ChainInfo<B> = backend.blockchain().info().into();
Expand Down
4 changes: 2 additions & 2 deletions client/consensus/aura/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -821,7 +821,7 @@ mod tests {
block_import: client,
env: environ,
keystore: keystore.into(),
sync_oracle: DummyOracle.clone(),
sync_oracle: DummyOracle,
justification_sync_link: (),
force_authoring: false,
backoff_authoring_blocks: Some(BackoffAuthoringOnFinalizedHeadLagging::default()),
Expand Down Expand Up @@ -873,7 +873,7 @@ mod tests {
block_import: client.clone(),
env: environ,
keystore: keystore.into(),
sync_oracle: DummyOracle.clone(),
sync_oracle: DummyOracle,
justification_sync_link: (),
force_authoring: false,
backoff_authoring_blocks: Option::<()>::None,
Expand Down
2 changes: 1 addition & 1 deletion client/consensus/babe/src/authorship.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ mod tests {

assert!(claim_slot(10.into(), &epoch, &keystore).is_none());

epoch.authorities.push((valid_public_key.clone().into(), 10));
epoch.authorities.push((valid_public_key.into(), 10));
assert_eq!(claim_slot(10.into(), &epoch, &keystore).unwrap().1, valid_public_key.into());
}
}
2 changes: 1 addition & 1 deletion client/consensus/babe/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ impl DummyProposer {
// that will re-check the randomness logic off-chain.
let digest_data = ConsensusLog::NextEpochData(NextEpochDescriptor {
authorities: epoch.authorities.clone(),
randomness: epoch.randomness.clone(),
randomness: epoch.randomness,
})
.encode();
let digest = DigestItem::Consensus(BABE_ENGINE_ID, digest_data);
Expand Down
2 changes: 1 addition & 1 deletion client/consensus/common/src/import_queue/basic_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ mod tests {
_number: BlockNumber,
_success: bool,
) {
self.events.push(Event::JustificationImported(hash.clone()))
self.events.push(Event::JustificationImported(*hash))
}
}

Expand Down
16 changes: 8 additions & 8 deletions client/consensus/epochs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1063,7 +1063,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_a_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"A", 1, *b"0", incremented_epoch)
Expand All @@ -1080,7 +1080,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_x_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"X", 1, *b"0", incremented_epoch)
Expand Down Expand Up @@ -1145,7 +1145,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_a_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"A", 1, *b"0", incremented_epoch)
Expand All @@ -1162,7 +1162,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_x_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"X", 1, *b"0", incremented_epoch)
Expand Down Expand Up @@ -1220,7 +1220,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_a_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"1", 1, *b"0", incremented_epoch)
Expand Down Expand Up @@ -1330,7 +1330,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_a_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"A", 1, *b"0", incremented_epoch)
Expand All @@ -1347,7 +1347,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&epoch_b_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"B", 201, *b"A", incremented_epoch)
Expand All @@ -1364,7 +1364,7 @@ mod tests {
let incremented_epoch = epoch_changes
.viable_epoch(&genesis_epoch_x_descriptor, &make_genesis)
.unwrap()
.increment(next_descriptor.clone());
.increment(next_descriptor);

epoch_changes
.import(&is_descendent_of, *b"C", 1, *b"0", incremented_epoch)
Expand Down
6 changes: 3 additions & 3 deletions client/consensus/manual-seal/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ mod tests {
assert_eq!(
created_block,
CreatedBlock {
hash: created_block.hash.clone(),
hash: created_block.hash,
aux: ImportedAux {
header_only: false,
clear_justification_requests: false,
Expand Down Expand Up @@ -422,7 +422,7 @@ mod tests {
assert_eq!(
created_block,
CreatedBlock {
hash: created_block.hash.clone(),
hash: created_block.hash,
aux: ImportedAux {
header_only: false,
clear_justification_requests: false,
Expand Down Expand Up @@ -502,7 +502,7 @@ mod tests {
assert_eq!(
created_block,
CreatedBlock {
hash: created_block.hash.clone(),
hash: created_block.hash,
aux: ImportedAux {
header_only: false,
clear_justification_requests: false,
Expand Down
2 changes: 1 addition & 1 deletion client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3390,7 +3390,7 @@ pub(crate) mod tests {
assert!(backend.remove_leaf_block(&best_hash).is_err());
assert!(backend.have_state_at(&prev_hash, 1));
backend.remove_leaf_block(&prev_hash).unwrap();
assert_eq!(None, backend.blockchain().header(BlockId::hash(prev_hash.clone())).unwrap());
assert_eq!(None, backend.blockchain().header(BlockId::hash(prev_hash)).unwrap());
assert!(!backend.have_state_at(&prev_hash, 1));
}

Expand Down
6 changes: 3 additions & 3 deletions client/db/src/storage_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1223,7 +1223,7 @@ mod tests {
let mut s = CachingState::new(
InMemoryBackend::<BlakeTwo256>::default(),
shared.clone(),
Some(root_parent.clone()),
Some(root_parent),
);

let key = H256::random()[..].to_vec();
Expand Down Expand Up @@ -1307,14 +1307,14 @@ mod tests {
let mut s = CachingState::new(
InMemoryBackend::<BlakeTwo256>::default(),
shared.clone(),
Some(root_parent.clone()),
Some(root_parent),
);
s.cache.sync_cache(
&[],
&[],
vec![(key.clone(), Some(vec![2]))],
vec![],
Some(h0.clone()),
Some(h0),
Some(0),
true,
);
Expand Down
3 changes: 1 addition & 2 deletions client/executor/wasmtime/src/host.rs
Original file line number Diff line number Diff line change
Expand Up @@ -276,12 +276,11 @@ impl<'a> Sandbox for HostContext<'a> {
.ok_or("Runtime doesn't have a table; sandbox is unavailable")?;
let table_item = table.get(&mut self.caller, dispatch_thunk_id);

table_item
*table_item
.ok_or("dispatch_thunk_id is out of bounds")?
.funcref()
.ok_or("dispatch_thunk_idx should be a funcref")?
.ok_or("dispatch_thunk_idx should point to actual func")?
.clone()
};

let guest_env = match sandbox::GuestEnvironment::decode(self.sandbox_store(), raw_env_def) {
Expand Down
Loading

0 comments on commit 20b5aac

Please sign in to comment.