mirror of
https://github.com/0glabs/0g-storage-node.git
synced 2024-11-15 04:25:19 +00:00
Update merkle tree trait.
This commit is contained in:
parent
5100c22933
commit
7589bdf4bb
@ -104,7 +104,7 @@ impl<E: HashElement, A: Algorithm<E>> AppendMerkleTree<E, A> {
|
||||
}
|
||||
for (layer_index, position, h) in initial_data.extra_mpt_nodes {
|
||||
// TODO: Delete duplicate nodes from DB.
|
||||
merkle.layers[layer_index][position] = h;
|
||||
merkle.node_manager.add_node(layer_index, position, h);
|
||||
}
|
||||
Ok(merkle)
|
||||
}
|
||||
@ -385,7 +385,7 @@ impl<E: HashElement, A: Algorithm<E>> AppendMerkleTree<E, A> {
|
||||
for layer in &self.layers {
|
||||
right_most_nodes.push((layer.len() - 1, layer.last().unwrap().clone()));
|
||||
}
|
||||
let root = self.root().clone();
|
||||
let root = self.root();
|
||||
self.delta_nodes_map
|
||||
.insert(tx_seq, DeltaNodes::new(right_most_nodes));
|
||||
self.root_to_tx_seq_map.insert(root, tx_seq);
|
||||
@ -566,7 +566,7 @@ impl<E: HashElement, A: Algorithm<E>> AppendMerkleTree<E, A> {
|
||||
bail!("empty tree");
|
||||
}
|
||||
Ok(HistoryTree {
|
||||
layers: &self.layers,
|
||||
node_manager: &self.node_manager,
|
||||
delta_nodes,
|
||||
leaf_height: self.leaf_height,
|
||||
})
|
||||
@ -596,10 +596,10 @@ impl<E: HashElement, A: Algorithm<E>> AppendMerkleTree<E, A> {
|
||||
fn first_known_root_at(&self, index: usize) -> (usize, E) {
|
||||
let mut height = 0;
|
||||
let mut index_in_layer = index;
|
||||
while height < self.layers.len() {
|
||||
while height < self.node_manager.num_layers() {
|
||||
let node = self.node(height, index_in_layer);
|
||||
if !node.is_null() {
|
||||
return (height + 1, node.clone());
|
||||
return (height + 1, node);
|
||||
}
|
||||
height += 1;
|
||||
index_in_layer /= 2;
|
||||
@ -644,7 +644,7 @@ impl<E: HashElement> DeltaNodes<E> {
|
||||
|
||||
pub struct HistoryTree<'m, E: HashElement> {
|
||||
/// A reference to the global tree nodes.
|
||||
layers: &'m Vec<Vec<E>>,
|
||||
node_manager: &'m NodeManager<E>,
|
||||
/// The delta nodes that are difference from `layers`.
|
||||
/// This could be a reference, we just take ownership for convenience.
|
||||
delta_nodes: &'m DeltaNodes<E>,
|
||||
@ -655,16 +655,18 @@ pub struct HistoryTree<'m, E: HashElement> {
|
||||
impl<E: HashElement, A: Algorithm<E>> MerkleTreeRead for AppendMerkleTree<E, A> {
|
||||
type E = E;
|
||||
|
||||
fn node(&self, layer: usize, index: usize) -> &Self::E {
|
||||
&self.layers[layer][index]
|
||||
fn node(&self, layer: usize, index: usize) -> Self::E {
|
||||
self.node_manager
|
||||
.get_node(layer, index)
|
||||
.expect("index checked")
|
||||
}
|
||||
|
||||
fn height(&self) -> usize {
|
||||
self.layers.len()
|
||||
self.node_manager.num_layers()
|
||||
}
|
||||
|
||||
fn layer_len(&self, layer_height: usize) -> usize {
|
||||
self.layers[layer_height].len()
|
||||
self.node_manager.layer_size(layer_height)
|
||||
}
|
||||
|
||||
fn padding_node(&self, height: usize) -> Self::E {
|
||||
@ -674,10 +676,13 @@ impl<E: HashElement, A: Algorithm<E>> MerkleTreeRead for AppendMerkleTree<E, A>
|
||||
|
||||
impl<'a, E: HashElement> MerkleTreeRead for HistoryTree<'a, E> {
|
||||
type E = E;
|
||||
fn node(&self, layer: usize, index: usize) -> &Self::E {
|
||||
fn node(&self, layer: usize, index: usize) -> Self::E {
|
||||
match self.delta_nodes.get(layer, index).expect("range checked") {
|
||||
Some(node) if *node != E::null() => node,
|
||||
_ => &self.layers[layer][index],
|
||||
Some(node) if *node != E::null() => node.clone(),
|
||||
_ => self
|
||||
.node_manager
|
||||
.get_node(layer, index)
|
||||
.expect("index checked"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ pub trait Algorithm<E: HashElement> {
|
||||
|
||||
pub trait MerkleTreeRead {
|
||||
type E: HashElement;
|
||||
fn node(&self, layer: usize, index: usize) -> &Self::E;
|
||||
fn node(&self, layer: usize, index: usize) -> Self::E;
|
||||
fn height(&self) -> usize;
|
||||
fn layer_len(&self, layer_height: usize) -> usize;
|
||||
fn padding_node(&self, height: usize) -> Self::E;
|
||||
@ -58,7 +58,7 @@ pub trait MerkleTreeRead {
|
||||
self.layer_len(0)
|
||||
}
|
||||
|
||||
fn root(&self) -> &Self::E {
|
||||
fn root(&self) -> Self::E {
|
||||
self.node(self.height() - 1, 0)
|
||||
}
|
||||
|
||||
@ -70,16 +70,16 @@ pub trait MerkleTreeRead {
|
||||
self.leaves()
|
||||
);
|
||||
}
|
||||
if self.node(0, leaf_index) == &Self::E::null() {
|
||||
if self.node(0, leaf_index) == Self::E::null() {
|
||||
bail!("Not ready to generate proof for leaf_index={}", leaf_index);
|
||||
}
|
||||
if self.height() == 1 {
|
||||
return Proof::new(vec![self.root().clone(), self.root().clone()], vec![]);
|
||||
return Proof::new(vec![self.root(), self.root().clone()], vec![]);
|
||||
}
|
||||
let mut lemma: Vec<Self::E> = Vec::with_capacity(self.height()); // path + root
|
||||
let mut path: Vec<bool> = Vec::with_capacity(self.height() - 2); // path - 1
|
||||
let mut index_in_layer = leaf_index;
|
||||
lemma.push(self.node(0, leaf_index).clone());
|
||||
lemma.push(self.node(0, leaf_index));
|
||||
for height in 0..(self.height() - 1) {
|
||||
trace!(
|
||||
"gen_proof: height={} index={} hash={:?}",
|
||||
@ -93,15 +93,15 @@ pub trait MerkleTreeRead {
|
||||
// TODO: This can be skipped if the tree size is available in validation.
|
||||
lemma.push(self.padding_node(height));
|
||||
} else {
|
||||
lemma.push(self.node(height, index_in_layer + 1).clone());
|
||||
lemma.push(self.node(height, index_in_layer + 1));
|
||||
}
|
||||
} else {
|
||||
path.push(false);
|
||||
lemma.push(self.node(height, index_in_layer - 1).clone());
|
||||
lemma.push(self.node(height, index_in_layer - 1));
|
||||
}
|
||||
index_in_layer >>= 1;
|
||||
}
|
||||
lemma.push(self.root().clone());
|
||||
lemma.push(self.root());
|
||||
if lemma.contains(&Self::E::null()) {
|
||||
bail!(
|
||||
"Not enough data to generate proof, lemma={:?} path={:?}",
|
||||
|
@ -6,6 +6,7 @@ use tracing::error;
|
||||
|
||||
pub struct NodeManager<E: HashElement> {
|
||||
cache: HashMap<(usize, usize), E>,
|
||||
layer_size: Vec<usize>,
|
||||
db: Arc<dyn NodeDatabase<E>>,
|
||||
}
|
||||
|
||||
@ -13,6 +14,7 @@ impl<E: HashElement> NodeManager<E> {
|
||||
pub fn new(db: Arc<dyn NodeDatabase<E>>) -> Self {
|
||||
Self {
|
||||
cache: HashMap::new(),
|
||||
layer_size: vec![],
|
||||
db,
|
||||
}
|
||||
}
|
||||
@ -32,6 +34,21 @@ impl<E: HashElement> NodeManager<E> {
|
||||
error!("Failed to save node: {}", e);
|
||||
}
|
||||
self.cache.insert((layer, pos), node);
|
||||
if pos + 1 > self.layer_size[layer] {
|
||||
self.layer_size[layer] = pos + 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_layer(&mut self) {
|
||||
self.layer_size.push(0);
|
||||
}
|
||||
|
||||
pub fn layer_size(&self, layer: usize) -> usize {
|
||||
self.layer_size[layer]
|
||||
}
|
||||
|
||||
pub fn num_layers(&self) -> usize {
|
||||
self.layer_size.len()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -436,7 +436,7 @@ impl FlowDBStore {
|
||||
let mut expected_index = 0;
|
||||
|
||||
let empty_data = vec![0; PORA_CHUNK_SIZE * ENTRY_SIZE];
|
||||
let empty_root = *Merkle::new(
|
||||
let empty_root = Merkle::new(
|
||||
Arc::new(EmptyNodeDatabase {}),
|
||||
data_to_merkle_leaves(&empty_data)?,
|
||||
0,
|
||||
|
@ -206,7 +206,7 @@ impl EntryBatch {
|
||||
}
|
||||
}
|
||||
Ok(Some(
|
||||
*try_option!(self.to_merkle_tree(is_first_chunk)?).root(),
|
||||
try_option!(self.to_merkle_tree(is_first_chunk)?).root(),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ impl MerkleManager {
|
||||
if self.pora_chunks_merkle.leaves() == 0 && self.last_chunk_merkle.leaves() == 0 {
|
||||
self.last_chunk_merkle.append(H256::zero());
|
||||
self.pora_chunks_merkle
|
||||
.update_last(*self.last_chunk_merkle.root());
|
||||
.update_last(self.last_chunk_merkle.root());
|
||||
} else if self.last_chunk_merkle.leaves() != 0 {
|
||||
let last_chunk_start_index = self.last_chunk_start_index();
|
||||
let last_chunk_data = flow_store.get_available_entries(
|
||||
@ -355,7 +355,7 @@ impl LogStoreWrite for LogManager {
|
||||
merkle.revert_merkle_tree(tx_seq, &self.tx_store)?;
|
||||
merkle.try_initialize(&self.flow_store)?;
|
||||
assert_eq!(
|
||||
Some(*merkle.last_chunk_merkle.root()),
|
||||
Some(merkle.last_chunk_merkle.root()),
|
||||
merkle
|
||||
.pora_chunks_merkle
|
||||
.leaf_at(merkle.pora_chunks_merkle.leaves() - 1)?
|
||||
@ -577,7 +577,7 @@ impl LogStoreRead for LogManager {
|
||||
fn get_context(&self) -> crate::error::Result<(DataRoot, u64)> {
|
||||
let merkle = self.merkle.read_recursive();
|
||||
Ok((
|
||||
*merkle.pora_chunks_merkle.root(),
|
||||
merkle.pora_chunks_merkle.root(),
|
||||
merkle.last_chunk_start_index() + merkle.last_chunk_merkle.leaves() as u64,
|
||||
))
|
||||
}
|
||||
@ -727,7 +727,7 @@ impl LogManager {
|
||||
last_chunk_merkle.leaves(),
|
||||
);
|
||||
if last_chunk_merkle.leaves() != 0 {
|
||||
pora_chunks_merkle.append(*last_chunk_merkle.root());
|
||||
pora_chunks_merkle.append(last_chunk_merkle.root());
|
||||
// update the merkle root
|
||||
pora_chunks_merkle.commit(start_tx_seq);
|
||||
}
|
||||
@ -893,16 +893,16 @@ impl LogManager {
|
||||
// `last_chunk_merkle` was empty, so this is a new leaf in the top_tree.
|
||||
merkle
|
||||
.pora_chunks_merkle
|
||||
.append_subtree(1, *merkle.last_chunk_merkle.root())?;
|
||||
.append_subtree(1, merkle.last_chunk_merkle.root())?;
|
||||
} else {
|
||||
merkle
|
||||
.pora_chunks_merkle
|
||||
.update_last(*merkle.last_chunk_merkle.root());
|
||||
.update_last(merkle.last_chunk_merkle.root());
|
||||
}
|
||||
if merkle.last_chunk_merkle.leaves() == PORA_CHUNK_SIZE {
|
||||
batch_root_map.insert(
|
||||
merkle.pora_chunks_merkle.leaves() - 1,
|
||||
(*merkle.last_chunk_merkle.root(), 1),
|
||||
(merkle.last_chunk_merkle.root(), 1),
|
||||
);
|
||||
self.complete_last_chunk_merkle(
|
||||
merkle.pora_chunks_merkle.leaves() - 1,
|
||||
@ -958,7 +958,7 @@ impl LogManager {
|
||||
.append_list(data_to_merkle_leaves(&pad_data)?);
|
||||
merkle
|
||||
.pora_chunks_merkle
|
||||
.update_last(*merkle.last_chunk_merkle.root());
|
||||
.update_last(merkle.last_chunk_merkle.root());
|
||||
} else {
|
||||
if last_chunk_pad != 0 {
|
||||
is_full_empty = false;
|
||||
@ -968,10 +968,10 @@ impl LogManager {
|
||||
.append_list(data_to_merkle_leaves(&pad_data[..last_chunk_pad])?);
|
||||
merkle
|
||||
.pora_chunks_merkle
|
||||
.update_last(*merkle.last_chunk_merkle.root());
|
||||
.update_last(merkle.last_chunk_merkle.root());
|
||||
root_map.insert(
|
||||
merkle.pora_chunks_merkle.leaves() - 1,
|
||||
(*merkle.last_chunk_merkle.root(), 1),
|
||||
(merkle.last_chunk_merkle.root(), 1),
|
||||
);
|
||||
completed_chunk_index = Some(merkle.pora_chunks_merkle.leaves() - 1);
|
||||
}
|
||||
@ -982,7 +982,7 @@ impl LogManager {
|
||||
let data = pad_data[start_index * ENTRY_SIZE
|
||||
..(start_index + PORA_CHUNK_SIZE) * ENTRY_SIZE]
|
||||
.to_vec();
|
||||
let root = *Merkle::new(
|
||||
let root = Merkle::new(
|
||||
Arc::new(EmptyNodeDatabase {}),
|
||||
data_to_merkle_leaves(&data)?,
|
||||
0,
|
||||
@ -1068,7 +1068,7 @@ impl LogManager {
|
||||
}
|
||||
merkle
|
||||
.pora_chunks_merkle
|
||||
.update_last(*merkle.last_chunk_merkle.root());
|
||||
.update_last(merkle.last_chunk_merkle.root());
|
||||
}
|
||||
let chunk_roots = self.flow_store.append_entries(flow_entry_array)?;
|
||||
for (chunk_index, chunk_root) in chunk_roots {
|
||||
|
Loading…
Reference in New Issue
Block a user