Check the local flow root against the contract state. (#229)

* Check the local flow root against the contract state.

* Check zero contract root.

* Fix wrong root before the first segment.

* Update contracts.

* Fix proof insertion.
This commit is contained in:
peilun-conflux 2024-10-12 16:50:31 +08:00 committed by GitHub
parent 48868b60db
commit 45fa344564
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 112 additions and 35 deletions

View File

@ -226,18 +226,18 @@ impl<E: HashElement, A: Algorithm<E>> AppendMerkleTree<E, A> {
&mut self,
proof: RangeProof<E>,
) -> Result<Vec<(usize, usize, E)>> {
self.fill_with_proof(
proof
.left_proof
.proof_nodes_in_tree()
.split_off(self.leaf_height),
)?;
self.fill_with_proof(
proof
.right_proof
.proof_nodes_in_tree()
.split_off(self.leaf_height),
)
let mut updated_nodes = Vec::new();
let mut left_nodes = proof.left_proof.proof_nodes_in_tree();
if left_nodes.len() >= self.leaf_height {
updated_nodes
.append(&mut self.fill_with_proof(left_nodes.split_off(self.leaf_height))?);
}
let mut right_nodes = proof.right_proof.proof_nodes_in_tree();
if right_nodes.len() >= self.leaf_height {
updated_nodes
.append(&mut self.fill_with_proof(right_nodes.split_off(self.leaf_height))?);
}
Ok(updated_nodes)
}
pub fn fill_with_file_proof(

View File

@ -9,5 +9,5 @@ exit-future = "0.2.0"
futures = "0.3.21"
lazy_static = "1.4.0"
lighthouse_metrics = { path = "../lighthouse_metrics" }
tokio = { version = "1.19.2", features = ["rt"] }
tokio = { version = "1.38.0", features = ["full"] }
tracing = "0.1.35"

View File

@ -222,7 +222,7 @@ impl LogEntryFetcher {
) -> UnboundedReceiver<LogFetchProgress> {
let provider = self.provider.clone();
let (recover_tx, recover_rx) = tokio::sync::mpsc::unbounded_channel();
let contract = ZgsFlow::new(self.contract_address, provider.clone());
let contract = self.flow_contract();
let log_page_size = self.log_page_size;
executor.spawn(
@ -305,7 +305,7 @@ impl LogEntryFetcher {
mut watch_progress_rx: UnboundedReceiver<u64>,
) -> UnboundedReceiver<LogFetchProgress> {
let (watch_tx, watch_rx) = tokio::sync::mpsc::unbounded_channel();
let contract = ZgsFlow::new(self.contract_address, self.provider.clone());
let contract = self.flow_contract();
let provider = self.provider.clone();
let confirmation_delay = self.confirmation_delay;
let log_page_size = self.log_page_size;
@ -583,6 +583,10 @@ impl LogEntryFetcher {
pub fn provider(&self) -> &Provider<RetryClient<Http>> {
self.provider.as_ref()
}
pub fn flow_contract(&self) -> ZgsFlow<Provider<RetryClient<Http>>> {
ZgsFlow::new(self.contract_address, self.provider.clone())
}
}
async fn check_watch_process(

View File

@ -510,6 +510,41 @@ impl LogSyncManager {
}
self.data_cache.garbage_collect(self.next_tx_seq);
self.next_tx_seq += 1;
// Check if the computed data root matches on-chain state.
// If the call fails, we won't check the root here and return `true` directly.
let flow_contract = self.log_fetcher.flow_contract();
match flow_contract
.get_flow_root_by_tx_seq(tx.seq.into())
.call()
.await
{
Ok(contract_root_bytes) => {
let contract_root = H256::from_slice(&contract_root_bytes);
// contract_root is zero for tx submitted before upgrading.
if !contract_root.is_zero() {
match self.store.get_context() {
Ok((local_root, _)) => {
if contract_root != local_root {
error!(
?contract_root,
?local_root,
"local flow root and on-chain flow root mismatch"
);
return false;
}
}
Err(e) => {
warn!(?e, "fail to read the local flow root");
}
}
}
}
Err(e) => {
warn!(?e, "fail to read the on-chain flow root");
}
}
true
}
}

View File

@ -29,7 +29,7 @@ itertools = "0.13.0"
serde = { version = "1.0.197", features = ["derive"] }
parking_lot = "0.12.3"
serde_json = "1.0.127"
tokio = { version = "1.10.0", features = ["sync"] }
tokio = { version = "1.38.0", features = ["full"] }
task_executor = { path = "../../common/task_executor" }
[dev-dependencies]

View File

@ -712,7 +712,7 @@ impl LogManager {
tx_store.rebuild_last_chunk_merkle(pora_chunks_merkle.leaves(), tx_seq)?
}
// Initialize
None => Merkle::new_with_depth(vec![], log2_pow2(PORA_CHUNK_SIZE) + 1, None),
None => Merkle::new_with_depth(vec![], 1, None),
};
debug!(
@ -761,6 +761,10 @@ impl LogManager {
.merkle
.write()
.try_initialize(&log_manager.flow_store)?;
info!(
"Log manager initialized, state={:?}",
log_manager.get_context()?
);
Ok(log_manager)
}

View File

@ -335,11 +335,7 @@ impl TransactionStore {
}
let mut merkle = if last_chunk_start_index == 0 {
// The first entry hash is initialized as zero.
AppendMerkleTree::<H256, Sha3Algorithm>::new_with_depth(
vec![H256::zero()],
log2_pow2(PORA_CHUNK_SIZE) + 1,
None,
)
AppendMerkleTree::<H256, Sha3Algorithm>::new_with_depth(vec![H256::zero()], 1, None)
} else {
AppendMerkleTree::<H256, Sha3Algorithm>::new_with_depth(
vec![],

View File

@ -1 +1 @@
75c251804a29ab22adced50d92478cf0baf834bc
66ff70bc88547c7467efd35ba500ae5f25cf8960

View File

@ -40,8 +40,8 @@
"type": "function"
}
],
"bytecode": "0x608060405234801561001057600080fd5b5060be8061001f6000396000f3fe6080604052348015600f57600080fd5b506004361060325760003560e01c806361ec5082146037578063da6eb36a14604b575b600080fd5b600060405190815260200160405180910390f35b605b6056366004605d565b505050565b005b600080600060608486031215607157600080fd5b50508135936020830135935060409092013591905056fea264697066735822122044ebf96fcad90f0bbc521513843d64fbc182c5c913a8210a4d638393793be63064736f6c63430008100033",
"deployedBytecode": "0x6080604052348015600f57600080fd5b506004361060325760003560e01c806361ec5082146037578063da6eb36a14604b575b600080fd5b600060405190815260200160405180910390f35b605b6056366004605d565b505050565b005b600080600060608486031215607157600080fd5b50508135936020830135935060409092013591905056fea264697066735822122044ebf96fcad90f0bbc521513843d64fbc182c5c913a8210a4d638393793be63064736f6c63430008100033",
"bytecode": "0x608060405234801561001057600080fd5b5060be8061001f6000396000f3fe6080604052348015600f57600080fd5b506004361060325760003560e01c806361ec5082146037578063da6eb36a14604b575b600080fd5b600060405190815260200160405180910390f35b605b6056366004605d565b505050565b005b600080600060608486031215607157600080fd5b50508135936020830135935060409092013591905056fea264697066735822122080db0b00f4b93cc320a2df449a74e503451a2675da518eff0fc5b7cf0ae8c90c64736f6c63430008100033",
"deployedBytecode": "0x6080604052348015600f57600080fd5b506004361060325760003560e01c806361ec5082146037578063da6eb36a14604b575b600080fd5b600060405190815260200160405180910390f35b605b6056366004605d565b505050565b005b600080600060608486031215607157600080fd5b50508135936020830135935060409092013591905056fea264697066735822122080db0b00f4b93cc320a2df449a74e503451a2675da518eff0fc5b7cf0ae8c90c64736f6c63430008100033",
"linkReferences": {},
"deployedLinkReferences": {}
}

View File

@ -70,8 +70,8 @@
"type": "function"
}
],
"bytecode": "0x608060405234801561001057600080fd5b5060f18061001f6000396000f3fe60806040526004361060265760003560e01c806359e9670014602b578063b7a3c04c14603c575b600080fd5b603a60363660046058565b5050565b005b348015604757600080fd5b50603a60533660046079565b505050565b60008060408385031215606a57600080fd5b50508035926020909101359150565b600080600060608486031215608d57600080fd5b8335925060208401356001600160a01b038116811460aa57600080fd5b92959294505050604091909101359056fea2646970667358221220ce57385afc7714a4000e530d1e1154d214fc1c0e2392abde201018635be1a2ab64736f6c63430008100033",
"deployedBytecode": "0x60806040526004361060265760003560e01c806359e9670014602b578063b7a3c04c14603c575b600080fd5b603a60363660046058565b5050565b005b348015604757600080fd5b50603a60533660046079565b505050565b60008060408385031215606a57600080fd5b50508035926020909101359150565b600080600060608486031215608d57600080fd5b8335925060208401356001600160a01b038116811460aa57600080fd5b92959294505050604091909101359056fea2646970667358221220ce57385afc7714a4000e530d1e1154d214fc1c0e2392abde201018635be1a2ab64736f6c63430008100033",
"bytecode": "0x608060405234801561001057600080fd5b5060f18061001f6000396000f3fe60806040526004361060265760003560e01c806359e9670014602b578063b7a3c04c14603c575b600080fd5b603a60363660046058565b5050565b005b348015604757600080fd5b50603a60533660046079565b505050565b60008060408385031215606a57600080fd5b50508035926020909101359150565b600080600060608486031215608d57600080fd5b8335925060208401356001600160a01b038116811460aa57600080fd5b92959294505050604091909101359056fea2646970667358221220d2f22ec6a41724281bad8a768c241562927a5fcc8ba600f3b3784f584a68c65864736f6c63430008100033",
"deployedBytecode": "0x60806040526004361060265760003560e01c806359e9670014602b578063b7a3c04c14603c575b600080fd5b603a60363660046058565b5050565b005b348015604757600080fd5b50603a60533660046079565b505050565b60008060408385031215606a57600080fd5b50508035926020909101359150565b600080600060608486031215608d57600080fd5b8335925060208401356001600160a01b038116811460aa57600080fd5b92959294505050604091909101359056fea2646970667358221220d2f22ec6a41724281bad8a768c241562927a5fcc8ba600f3b3784f584a68c65864736f6c63430008100033",
"linkReferences": {},
"deployedLinkReferences": {}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long