Fix clippy.

This commit is contained in:
Peilun Li 2024-06-20 17:33:32 +08:00
parent c67ae6f835
commit 289e63c36b
3 changed files with 10 additions and 6 deletions

View File

@ -997,7 +997,11 @@ mod tests {
assert_eq!(peer_id, *ctx.network_globals.peer_id.read()); assert_eq!(peer_id, *ctx.network_globals.peer_id.read());
assert_eq!( assert_eq!(
addr, addr,
*ctx.network_globals.listen_multiaddrs.read().get(0).unwrap() *ctx.network_globals
.listen_multiaddrs
.read()
.first()
.unwrap()
); );
} }
Ok(_) => panic!("Unexpected sync message type received"), Ok(_) => panic!("Unexpected sync message type received"),

View File

@ -208,7 +208,7 @@ impl FlowRead for FlowStore {
} }
fn get_shard_config(&self) -> ShardConfig { fn get_shard_config(&self) -> ShardConfig {
self.config.shard_config.read().clone() *self.config.shard_config.read()
} }
} }

View File

@ -170,7 +170,7 @@ impl LogStoreChunkWrite for LogManager {
// TODO: Use another struct to avoid confusion. // TODO: Use another struct to avoid confusion.
let mut flow_entry_array = chunks; let mut flow_entry_array = chunks;
flow_entry_array.start_index += tx.start_entry_index; flow_entry_array.start_index += tx.start_entry_index;
self.append_entries(flow_entry_array, &mut *merkle)?; self.append_entries(flow_entry_array, &mut merkle)?;
Ok(()) Ok(())
} }
@ -203,7 +203,7 @@ impl LogStoreChunkWrite for LogManager {
// TODO: Use another struct to avoid confusion. // TODO: Use another struct to avoid confusion.
let mut flow_entry_array = chunks; let mut flow_entry_array = chunks;
flow_entry_array.start_index += tx.start_entry_index; flow_entry_array.start_index += tx.start_entry_index;
self.append_entries(flow_entry_array, &mut *merkle)?; self.append_entries(flow_entry_array, &mut merkle)?;
if let Some(file_proof) = maybe_file_proof { if let Some(file_proof) = maybe_file_proof {
let updated_node_list = merkle.pora_chunks_merkle.fill_with_file_proof( let updated_node_list = merkle.pora_chunks_merkle.fill_with_file_proof(
@ -258,7 +258,7 @@ impl LogStoreWrite for LogManager {
} }
let maybe_same_data_tx_seq = self.tx_store.put_tx(tx.clone())?.first().cloned(); let maybe_same_data_tx_seq = self.tx_store.put_tx(tx.clone())?.first().cloned();
// TODO(zz): Should we validate received tx? // TODO(zz): Should we validate received tx?
self.append_subtree_list(tx.merkle_nodes.clone(), &mut *merkle)?; self.append_subtree_list(tx.merkle_nodes.clone(), &mut merkle)?;
merkle.commit_merkle(tx.seq)?; merkle.commit_merkle(tx.seq)?;
debug!( debug!(
"commit flow root: root={:?}", "commit flow root: root={:?}",
@ -1041,7 +1041,7 @@ impl LogManager {
for (_, offset) in &to_tx_offset_list { for (_, offset) in &to_tx_offset_list {
let mut data = batch_data.clone(); let mut data = batch_data.clone();
data.start_index += offset; data.start_index += offset;
self.append_entries(data, &mut *merkle)?; self.append_entries(data, &mut merkle)?;
} }
} }
// num_entries() includes the rear padding data, so no need for more padding. // num_entries() includes the rear padding data, so no need for more padding.