mirror of
https://github.com/0glabs/0g-storage-node.git
synced 2024-11-10 10:05:17 +00:00
Add py tests for auto sync
This commit is contained in:
parent
5ce22ab020
commit
c75fbe9f69
@ -283,8 +283,13 @@ impl SerialBatcher {
|
|||||||
|
|
||||||
/// Schedule file sync in sequence.
|
/// Schedule file sync in sequence.
|
||||||
async fn schedule_next(&mut self) -> Result<bool> {
|
async fn schedule_next(&mut self) -> Result<bool> {
|
||||||
|
let max_tx_seq = self.max_tx_seq.load(Ordering::Relaxed);
|
||||||
|
if max_tx_seq == u64::MAX {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
let mut next_tx_seq = self.next_tx_seq.load(Ordering::Relaxed);
|
let mut next_tx_seq = self.next_tx_seq.load(Ordering::Relaxed);
|
||||||
if next_tx_seq > self.max_tx_seq.load(Ordering::Relaxed) {
|
if next_tx_seq > max_tx_seq {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,3 +63,13 @@ TX_PARAMS1 = {
|
|||||||
|
|
||||||
NO_SEAL_FLAG = 0x1
|
NO_SEAL_FLAG = 0x1
|
||||||
NO_MERKLE_PROOF_FLAG = 0x2
|
NO_MERKLE_PROOF_FLAG = 0x2
|
||||||
|
|
||||||
|
def update_config(default: dict, custom: dict):
|
||||||
|
"""
|
||||||
|
Supports to update configurations with dict value.
|
||||||
|
"""
|
||||||
|
for (key, value) in custom.items():
|
||||||
|
if default.get(key) is None or type(value) != dict:
|
||||||
|
default[key] = value
|
||||||
|
else:
|
||||||
|
update_config(default[key], value)
|
||||||
|
@ -5,18 +5,14 @@ import random
|
|||||||
from test_framework.test_framework import TestFramework
|
from test_framework.test_framework import TestFramework
|
||||||
from utility.submission import create_submission
|
from utility.submission import create_submission
|
||||||
from utility.submission import submit_data
|
from utility.submission import submit_data
|
||||||
from utility.utils import (
|
from utility.utils import wait_until
|
||||||
assert_equal,
|
|
||||||
wait_until,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RandomTest(TestFramework):
|
class RandomTest(TestFramework):
|
||||||
def setup_params(self):
|
def setup_params(self):
|
||||||
self.num_blockchain_nodes = 1
|
self.num_blockchain_nodes = 1
|
||||||
self.num_nodes = 4
|
self.num_nodes = 4
|
||||||
for i in range(self.num_nodes):
|
for i in range(self.num_nodes):
|
||||||
self.zgs_node_configs[i] = {"find_peer_timeout_secs": 1, "confirmation_block_count": 1, "sync": {"auto_sync_enabled": True}}
|
self.zgs_node_configs[i] = {"confirmation_block_count": 1, "sync": {"auto_sync_enabled": True}}
|
||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
max_size = 256 * 1024 * 64
|
max_size = 256 * 1024 * 64
|
||||||
|
34
tests/sync_auto_random.py
Normal file
34
tests/sync_auto_random.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from test_framework.test_framework import TestFramework
|
||||||
|
from utility.utils import wait_until
|
||||||
|
|
||||||
|
class AutoRandomSyncTest(TestFramework):
|
||||||
|
def setup_params(self):
|
||||||
|
self.num_nodes = 2
|
||||||
|
|
||||||
|
# Enable random auto sync only
|
||||||
|
for i in range(self.num_nodes):
|
||||||
|
self.zgs_node_configs[i] = {
|
||||||
|
"sync": {
|
||||||
|
"auto_sync_enabled": True,
|
||||||
|
"max_sequential_workers": 0,
|
||||||
|
"max_random_workers": 3,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def run_test(self):
|
||||||
|
# Submit and upload files on node 0
|
||||||
|
data_root_1 = self.__upload_file__(0, 256 * 1024)
|
||||||
|
data_root_2 = self.__upload_file__(0, 256 * 1024)
|
||||||
|
|
||||||
|
# Files should be available on node 1 via auto sync
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_1) is not None)
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_1)["finalized"])
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_2) is not None)
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_2)["finalized"])
|
||||||
|
|
||||||
|
assert 1 > 2
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
AutoRandomSyncTest().main()
|
32
tests/sync_auto_sequential.py
Normal file
32
tests/sync_auto_sequential.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from test_framework.test_framework import TestFramework
|
||||||
|
from utility.utils import wait_until
|
||||||
|
|
||||||
|
class AutoSequentialSyncTest(TestFramework):
|
||||||
|
def setup_params(self):
|
||||||
|
self.num_nodes = 2
|
||||||
|
|
||||||
|
# Enable sequential auto sync only
|
||||||
|
for i in range(self.num_nodes):
|
||||||
|
self.zgs_node_configs[i] = {
|
||||||
|
"sync": {
|
||||||
|
"auto_sync_enabled": True,
|
||||||
|
"max_sequential_workers": 3,
|
||||||
|
"max_random_workers": 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def run_test(self):
|
||||||
|
# Submit and upload files on node 0
|
||||||
|
data_root_1 = self.__upload_file__(0, 256 * 1024)
|
||||||
|
data_root_2 = self.__upload_file__(0, 256 * 1024)
|
||||||
|
|
||||||
|
# Files should be available on node 1 via auto sync
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_1) is not None)
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_1)["finalized"])
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_2) is not None)
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_2)["finalized"])
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
AutoSequentialSyncTest().main()
|
32
tests/sync_auto_test.py
Normal file
32
tests/sync_auto_test.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from test_framework.test_framework import TestFramework
|
||||||
|
from utility.utils import wait_until
|
||||||
|
|
||||||
|
class AutoSyncTest(TestFramework):
|
||||||
|
def setup_params(self):
|
||||||
|
self.num_nodes = 2
|
||||||
|
|
||||||
|
# Enable auto sync
|
||||||
|
for i in range(self.num_nodes):
|
||||||
|
self.zgs_node_configs[i] = {
|
||||||
|
"sync": {
|
||||||
|
"auto_sync_enabled": True,
|
||||||
|
"max_sequential_workers": 3,
|
||||||
|
"max_random_workers": 3,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def run_test(self):
|
||||||
|
# Submit and upload files on node 0
|
||||||
|
data_root_1 = self.__upload_file__(0, 256 * 1024)
|
||||||
|
data_root_2 = self.__upload_file__(0, 256 * 1024)
|
||||||
|
|
||||||
|
# Files should be available on node 1 via auto sync
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_1) is not None)
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_1)["finalized"])
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_2) is not None)
|
||||||
|
wait_until(lambda: self.nodes[1].zgs_get_file_info(data_root_2)["finalized"])
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
AutoSyncTest().main()
|
@ -4,8 +4,7 @@ import random
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from test_framework.test_framework import TestFramework
|
from test_framework.test_framework import TestFramework
|
||||||
from utility.submission import create_submission
|
from utility.submission import data_to_segments
|
||||||
from utility.submission import submit_data, data_to_segments
|
|
||||||
from utility.utils import (
|
from utility.utils import (
|
||||||
assert_equal,
|
assert_equal,
|
||||||
wait_until,
|
wait_until,
|
||||||
@ -13,9 +12,7 @@ from utility.utils import (
|
|||||||
|
|
||||||
class SyncTest(TestFramework):
|
class SyncTest(TestFramework):
|
||||||
def setup_params(self):
|
def setup_params(self):
|
||||||
self.num_blockchain_nodes = 2
|
|
||||||
self.num_nodes = 2
|
self.num_nodes = 2
|
||||||
self.__deployed_contracts = 0
|
|
||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
# By default, auto_sync_enabled and sync_file_on_announcement_enabled are both false,
|
# By default, auto_sync_enabled and sync_file_on_announcement_enabled are both false,
|
||||||
@ -32,18 +29,7 @@ class SyncTest(TestFramework):
|
|||||||
# stop client2, preventing it from receiving AnnounceFile
|
# stop client2, preventing it from receiving AnnounceFile
|
||||||
client2.shutdown()
|
client2.shutdown()
|
||||||
|
|
||||||
# Create submission
|
data_root = self.__upload_file__(0, 256 * 1024)
|
||||||
chunk_data = random.randbytes(256 * 1024)
|
|
||||||
data_root = self.__create_submission(chunk_data)
|
|
||||||
|
|
||||||
# Ensure log entry sync from blockchain node
|
|
||||||
wait_until(lambda: client1.zgs_get_file_info(data_root) is not None)
|
|
||||||
assert_equal(client1.zgs_get_file_info(data_root)["finalized"], False)
|
|
||||||
|
|
||||||
# Upload file to storage node
|
|
||||||
segments = submit_data(client1, chunk_data)
|
|
||||||
self.log.info("segments: %s", [(s["root"], s["index"], s["proof"]) for s in segments])
|
|
||||||
wait_until(lambda: client1.zgs_get_file_info(data_root)["finalized"])
|
|
||||||
|
|
||||||
# restart client2
|
# restart client2
|
||||||
client2.start()
|
client2.start()
|
||||||
@ -75,7 +61,7 @@ class SyncTest(TestFramework):
|
|||||||
|
|
||||||
# Prepare 3 segments to upload
|
# Prepare 3 segments to upload
|
||||||
chunk_data = random.randbytes(256 * 1024 * 3)
|
chunk_data = random.randbytes(256 * 1024 * 3)
|
||||||
data_root = self.__create_submission(chunk_data)
|
data_root = self.__submit_file__(chunk_data)
|
||||||
|
|
||||||
# Ensure log entry sync from blockchain node
|
# Ensure log entry sync from blockchain node
|
||||||
wait_until(lambda: client1.zgs_get_file_info(data_root) is not None)
|
wait_until(lambda: client1.zgs_get_file_info(data_root) is not None)
|
||||||
@ -111,13 +97,5 @@ class SyncTest(TestFramework):
|
|||||||
# Validate data
|
# Validate data
|
||||||
assert_equal(client2.zgs_download_segment_decoded(data_root, 1024, 2048), chunk_data[1024*256:2048*256])
|
assert_equal(client2.zgs_download_segment_decoded(data_root, 1024, 2048), chunk_data[1024*256:2048*256])
|
||||||
|
|
||||||
def __create_submission(self, chunk_data: bytes) -> str:
|
|
||||||
submissions, data_root = create_submission(chunk_data)
|
|
||||||
self.contract.submit(submissions)
|
|
||||||
self.__deployed_contracts += 1
|
|
||||||
wait_until(lambda: self.contract.num_submissions() == self.__deployed_contracts)
|
|
||||||
self.log.info("Submission created, data root: %s, submissions(%s) = %s", data_root, len(submissions), submissions)
|
|
||||||
return data_root
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
SyncTest().main()
|
SyncTest().main()
|
||||||
|
@ -2,9 +2,7 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import rlp
|
|
||||||
|
|
||||||
from eth_utils import decode_hex, keccak
|
|
||||||
from web3 import Web3, HTTPProvider
|
from web3 import Web3, HTTPProvider
|
||||||
from web3.middleware import construct_sign_and_send_raw_middleware
|
from web3.middleware import construct_sign_and_send_raw_middleware
|
||||||
from enum import Enum, unique
|
from enum import Enum, unique
|
||||||
@ -12,7 +10,6 @@ from config.node_config import (
|
|||||||
GENESIS_PRIV_KEY,
|
GENESIS_PRIV_KEY,
|
||||||
GENESIS_PRIV_KEY1,
|
GENESIS_PRIV_KEY1,
|
||||||
TX_PARAMS,
|
TX_PARAMS,
|
||||||
MINER_ID,
|
|
||||||
)
|
)
|
||||||
from utility.simple_rpc_proxy import SimpleRpcProxy
|
from utility.simple_rpc_proxy import SimpleRpcProxy
|
||||||
from utility.utils import (
|
from utility.utils import (
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import json
|
import json
|
||||||
from web3 import Web3
|
|
||||||
|
|
||||||
|
|
||||||
def load_contract_metadata(path: str, name: str):
|
def load_contract_metadata(path: str, name: str):
|
||||||
path = Path(path)
|
path = Path(path)
|
||||||
|
@ -20,8 +20,9 @@ from test_framework.zgs_node import ZgsNode
|
|||||||
from test_framework.blockchain_node import BlockChainNodeType
|
from test_framework.blockchain_node import BlockChainNodeType
|
||||||
from test_framework.conflux_node import ConfluxNode, connect_sample_nodes
|
from test_framework.conflux_node import ConfluxNode, connect_sample_nodes
|
||||||
from test_framework.zg_node import ZGNode, zg_node_init_genesis
|
from test_framework.zg_node import ZGNode, zg_node_init_genesis
|
||||||
from utility.utils import PortMin, is_windows_platform, wait_until
|
from utility.utils import PortMin, is_windows_platform, wait_until, assert_equal
|
||||||
from utility.build_binary import build_cli
|
from utility.build_binary import build_cli
|
||||||
|
from utility.submission import create_submission, submit_data
|
||||||
|
|
||||||
__file_path__ = os.path.dirname(os.path.realpath(__file__))
|
__file_path__ = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
@ -40,8 +41,8 @@ class TestFramework:
|
|||||||
if "http_proxy" in os.environ:
|
if "http_proxy" in os.environ:
|
||||||
del os.environ["http_proxy"]
|
del os.environ["http_proxy"]
|
||||||
|
|
||||||
self.num_blockchain_nodes = None
|
self.num_blockchain_nodes = 1
|
||||||
self.num_nodes = None
|
self.num_nodes = 1
|
||||||
self.blockchain_nodes = []
|
self.blockchain_nodes = []
|
||||||
self.nodes = []
|
self.nodes = []
|
||||||
self.contract = None
|
self.contract = None
|
||||||
@ -53,6 +54,7 @@ class TestFramework:
|
|||||||
self.mine_period = 100
|
self.mine_period = 100
|
||||||
self.lifetime_seconds = 3600
|
self.lifetime_seconds = 3600
|
||||||
self.launch_wait_seconds = 1
|
self.launch_wait_seconds = 1
|
||||||
|
self.num_deployed_contracts = 0
|
||||||
|
|
||||||
# Set default binary path
|
# Set default binary path
|
||||||
binary_ext = ".exe" if is_windows_platform() else ""
|
binary_ext = ".exe" if is_windows_platform() else ""
|
||||||
@ -398,6 +400,31 @@ class TestFramework:
|
|||||||
|
|
||||||
return root
|
return root
|
||||||
|
|
||||||
|
def __submit_file__(self, chunk_data: bytes) -> str:
|
||||||
|
submissions, data_root = create_submission(chunk_data)
|
||||||
|
self.contract.submit(submissions)
|
||||||
|
self.num_deployed_contracts += 1
|
||||||
|
wait_until(lambda: self.contract.num_submissions() == self.num_deployed_contracts)
|
||||||
|
self.log.info("Submission completed, data root: %s, submissions(%s) = %s", data_root, len(submissions), submissions)
|
||||||
|
return data_root
|
||||||
|
|
||||||
|
def __upload_file__(self, node_index: int, random_data_size: int) -> str:
|
||||||
|
# Create submission
|
||||||
|
chunk_data = random.randbytes(random_data_size)
|
||||||
|
data_root = self.__submit_file__(chunk_data)
|
||||||
|
|
||||||
|
# Ensure log entry sync from blockchain node
|
||||||
|
client = self.nodes[node_index]
|
||||||
|
wait_until(lambda: client.zgs_get_file_info(data_root) is not None)
|
||||||
|
assert_equal(client.zgs_get_file_info(data_root)["finalized"], False)
|
||||||
|
|
||||||
|
# Upload file to storage node
|
||||||
|
segments = submit_data(client, chunk_data)
|
||||||
|
self.log.info("segments: %s", [(s["root"], s["index"], s["proof"]) for s in segments])
|
||||||
|
wait_until(lambda: client.zgs_get_file_info(data_root)["finalized"])
|
||||||
|
|
||||||
|
return data_root
|
||||||
|
|
||||||
def setup_params(self):
|
def setup_params(self):
|
||||||
self.num_blockchain_nodes = 1
|
self.num_blockchain_nodes = 1
|
||||||
self.num_nodes = 1
|
self.num_nodes = 1
|
||||||
|
@ -2,9 +2,8 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from config.node_config import ZGS_CONFIG
|
from config.node_config import ZGS_CONFIG, update_config
|
||||||
from test_framework.blockchain_node import NodeType, TestNode
|
from test_framework.blockchain_node import NodeType, TestNode
|
||||||
from config.node_config import MINER_ID
|
|
||||||
from utility.utils import (
|
from utility.utils import (
|
||||||
initialize_toml_config,
|
initialize_toml_config,
|
||||||
p2p_port,
|
p2p_port,
|
||||||
@ -12,7 +11,6 @@ from utility.utils import (
|
|||||||
blockchain_rpc_port,
|
blockchain_rpc_port,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ZgsNode(TestNode):
|
class ZgsNode(TestNode):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -48,9 +46,9 @@ class ZgsNode(TestNode):
|
|||||||
"blockchain_rpc_endpoint": f"http://127.0.0.1:{blockchain_rpc_port(0)}",
|
"blockchain_rpc_endpoint": f"http://127.0.0.1:{blockchain_rpc_port(0)}",
|
||||||
}
|
}
|
||||||
# Set configs for this specific node.
|
# Set configs for this specific node.
|
||||||
local_conf.update(indexed_config)
|
update_config(local_conf, indexed_config)
|
||||||
# Overwrite with personalized configs.
|
# Overwrite with personalized configs.
|
||||||
local_conf.update(updated_config)
|
update_config(local_conf, updated_config)
|
||||||
data_dir = os.path.join(root_dir, "zgs_node" + str(index))
|
data_dir = os.path.join(root_dir, "zgs_node" + str(index))
|
||||||
rpc_url = "http://" + local_conf["rpc_listen_address"]
|
rpc_url = "http://" + local_conf["rpc_listen_address"]
|
||||||
super().__init__(
|
super().__init__(
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import base64
|
import base64
|
||||||
|
|
||||||
from eth_utils import encode_hex, decode_hex
|
from eth_utils import decode_hex
|
||||||
from math import log2
|
from math import log2
|
||||||
from utility.merkle_tree import add_0x_prefix, Leaf, MerkleTree
|
from utility.merkle_tree import add_0x_prefix, Leaf, MerkleTree
|
||||||
from utility.spec import ENTRY_SIZE, PORA_CHUNK_SIZE
|
from utility.spec import ENTRY_SIZE, PORA_CHUNK_SIZE
|
||||||
|
@ -1,15 +1,10 @@
|
|||||||
import base64
|
import base64
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
import rtoml
|
import rtoml
|
||||||
import time
|
import time
|
||||||
import sha3
|
import sha3
|
||||||
|
|
||||||
from config.node_config import ZGS_CONFIG
|
|
||||||
from eth_utils import encode_hex
|
|
||||||
|
|
||||||
|
|
||||||
class PortMin:
|
class PortMin:
|
||||||
# Must be initialized with a unique integer for each process
|
# Must be initialized with a unique integer for each process
|
||||||
n = 11000
|
n = 11000
|
||||||
|
Loading…
Reference in New Issue
Block a user