Compare commits

..

3 Commits

Author SHA1 Message Date
Joel Liu
e20be63026
Query logs via LogQuery in watch loop (#177)
Some checks failed
abi-consistent-check / build-and-compare (push) Has been cancelled
code-coverage / unittest-cov (push) Has been cancelled
rust / check (push) Has been cancelled
rust / test (push) Has been cancelled
rust / lints (push) Has been cancelled
functional-test / test (push) Has been cancelled
* queyr logs via LogQuery in wath loop

* fix lints error
2024-09-02 18:49:18 +08:00
boqiu
e7a562fa61 hotfix for json serde camel case 2024-09-02 16:12:01 +08:00
Bo QIU
4edd61b9d2
return network identity for status rpc (#178) 2024-09-02 16:05:44 +08:00
5 changed files with 21 additions and 11 deletions

View File

@ -290,6 +290,7 @@ impl LogEntryFetcher {
let contract = ZgsFlow::new(self.contract_address, self.provider.clone());
let provider = self.provider.clone();
let confirmation_delay = self.confirmation_delay;
let log_page_size = self.log_page_size;
executor.spawn(
async move {
debug!("start_watch starts, start={}", start_block_number);
@ -305,6 +306,7 @@ impl LogEntryFetcher {
confirmation_delay,
&contract,
&block_hash_cache,
log_page_size,
)
.await
{
@ -340,6 +342,7 @@ impl LogEntryFetcher {
confirmation_delay: u64,
contract: &ZgsFlow<Provider<RetryClient<Http>>>,
block_hash_cache: &Arc<RwLock<BTreeMap<u64, Option<BlockHashAndSubmissionIndex>>>>,
log_page_size: u64,
) -> Result<Option<(u64, H256, Option<Option<u64>>)>> {
let latest_block_number = provider.get_block_number().await?.as_u64();
debug!(
@ -408,8 +411,11 @@ impl LogEntryFetcher {
.to_block(to_block_number)
.address(contract.address().into())
.filter;
let mut stream = LogQuery::new(provider, &filter, Duration::from_millis(10))
.with_page_size(log_page_size);
let mut block_logs: BTreeMap<u64, Vec<Log>> = BTreeMap::new();
for log in provider.get_logs(&filter).await? {
while let Some(maybe_log) = stream.next().await {
let log = maybe_log?;
let block_number = log
.block_number
.ok_or_else(|| anyhow!("block number missing"))?
@ -496,20 +502,20 @@ impl LogEntryFetcher {
} else {
None
};
for log in log_events.into_iter() {
if let Err(e) = watch_tx.send(log) {
warn!("send LogFetchProgress::Transaction failed: {:?}", e);
return Ok(progress);
}
}
if let Some(p) = &new_progress {
if let Err(e) = watch_tx.send(LogFetchProgress::SyncedBlock(*p)) {
warn!("send LogFetchProgress failed: {:?}", e);
warn!("send LogFetchProgress::SyncedBlock failed: {:?}", e);
return Ok(progress);
} else {
block_hash_cache.write().await.insert(p.0, None);
}
}
for log in log_events.into_iter() {
if let Err(e) = watch_tx.send(log) {
warn!("send log failed: {:?}", e);
return Ok(progress);
}
}
progress = new_progress;
}
}

View File

@ -339,8 +339,7 @@ impl LogSyncManager {
LogFetchProgress::Transaction(tx) => {
if !self.put_tx(tx.clone()).await {
// Unexpected error.
error!("log sync write error");
break;
bail!("log sync write error");
}
if let Err(e) = self.event_send.send(LogSyncEvent::TxSynced { tx }) {
// TODO: Do we need to wait until all receivers are initialized?

View File

@ -7,7 +7,8 @@ use merkle_tree::RawLeafSha3Algorithm;
use network::Multiaddr;
use serde::{Deserialize, Serialize};
use shared_types::{
compute_padded_chunk_size, compute_segment_size, DataRoot, FileProof, Transaction, CHUNK_SIZE,
compute_padded_chunk_size, compute_segment_size, DataRoot, FileProof, NetworkIdentity,
Transaction, CHUNK_SIZE,
};
use std::collections::HashSet;
use std::hash::Hasher;
@ -28,6 +29,7 @@ pub struct Status {
pub connected_peers: usize,
pub log_sync_height: u64,
pub log_sync_block: H256,
pub network_identity: NetworkIdentity,
}
#[derive(Serialize, Deserialize)]

View File

@ -30,6 +30,7 @@ impl RpcServer for RpcServerImpl {
connected_peers: self.ctx.network_globals.connected_peers(),
log_sync_height: sync_progress.0,
log_sync_block: sync_progress.1,
network_identity: self.ctx.network_globals.network_id(),
})
}

View File

@ -370,6 +370,7 @@ impl TryFrom<FileProof> for FlowProof {
#[derive(
DeriveEncode, DeriveDecode, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
pub struct NetworkIdentity {
/// The chain id of the blockchain network.
pub chain_id: u64,
@ -384,6 +385,7 @@ pub struct NetworkIdentity {
#[derive(
DeriveEncode, DeriveDecode, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
pub struct ProtocolVersion {
pub major: u8,
pub minor: u8,