Skip to content

Commit

Permalink
feat(core): add tests for overlay client/server with filled storage
Browse files Browse the repository at this point in the history
  • Loading branch information
pashinov committed Apr 24, 2024
1 parent 49e435f commit b6b27cf
Show file tree
Hide file tree
Showing 8 changed files with 358 additions and 37 deletions.
7 changes: 3 additions & 4 deletions core/src/block_strider/provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@ use std::sync::Arc;

use everscale_types::models::BlockId;
use futures_util::future::BoxFuture;
use tycho_block_util::archive::WithArchiveData;
use tycho_block_util::block::{BlockStuff, BlockStuffAug};
use tycho_storage::{BlockConnection, Storage};
use tycho_storage::Storage;

use crate::blockchain_client::BlockchainClient;
use crate::proto::overlay::BlockFull;
Expand Down Expand Up @@ -103,7 +102,7 @@ impl BlockProvider for BlockchainClient {
BlockFull::Empty => unreachable!(),
};

match BlockStuff::deserialize_checked(block_id, data) {
match BlockStuff::deserialize(block_id, data) {
Ok(block) => {
res.mark_response(true);
Some(Ok(BlockStuffAug::new(block, data.clone())))
Expand Down Expand Up @@ -150,7 +149,7 @@ impl BlockProvider for BlockchainClient {
block_id,
block: data,
..
} => match BlockStuff::deserialize_checked(*block_id, data) {
} => match BlockStuff::deserialize(*block_id, data) {
Ok(block) => Some(Ok(BlockStuffAug::new(block, data.clone()))),
Err(e) => {
res.mark_response(false);
Expand Down
2 changes: 1 addition & 1 deletion core/src/overlay_client/public_overlay_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ impl PublicOverlayClient {
}

pub async fn entries_removed(&self) {
self.0.overlay.entries_removed().notified().await
self.0.overlay.entries_removed().notified().await;
}
pub fn neighbour_update_interval_ms(&self) -> u64 {
self.0.settings.neighbours_update_interval
Expand Down
29 changes: 19 additions & 10 deletions core/tests/block_strider.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::collections::BTreeMap;
use std::time::Duration;

use everscale_types::models::BlockId;
use futures_util::stream::FuturesUnordered;
use futures_util::StreamExt;
use tycho_core::block_strider::provider::BlockProvider;
Expand All @@ -18,11 +17,15 @@ async fn storage_block_strider() -> anyhow::Result<()> {

let (storage, tmp_dir) = common::storage::init_storage().await?;

let block = storage.get_block(&BlockId::default()).await;
assert!(block.is_none());
let block_ids = common::storage::get_block_ids()?;
for block_id in block_ids {
if block_id.shard.is_masterchain() {
let block = storage.get_block(&block_id).await;

let next_block = storage.get_next_block(&BlockId::default()).await;
assert!(next_block.is_none());
assert!(block.is_some());
assert_eq!(&block_id, block.unwrap()?.id());
}
}

tmp_dir.close()?;

Expand All @@ -42,7 +45,7 @@ async fn overlay_block_strider() -> anyhow::Result<()> {

let (storage, tmp_dir) = common::storage::init_storage().await?;

const NODE_COUNT: usize = 5;
const NODE_COUNT: usize = 10;
let nodes = common::node::make_network(storage, NODE_COUNT);

tracing::info!("discovering nodes");
Expand Down Expand Up @@ -113,11 +116,17 @@ async fn overlay_block_strider() -> anyhow::Result<()> {
Default::default(),
);

let block = client.get_block(&BlockId::default()).await;
assert!(block.is_none());
let block_ids = common::storage::get_block_ids()?;
for block_id in block_ids {
if block_id.shard.is_masterchain() {
let block = client.get_block(&block_id).await;

assert!(block.is_some());
assert_eq!(&block_id, block.unwrap()?.id());

let block = client.get_next_block(&BlockId::default()).await;
assert!(block.is_none());
break;
}
}

tmp_dir.close()?;

Expand Down
104 changes: 104 additions & 0 deletions core/tests/common/archive.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#![allow(clippy::map_err_ignore)]

use std::collections::BTreeMap;

use anyhow::Result;
use everscale_types::cell::Load;
use everscale_types::models::{Block, BlockId, BlockIdShort, BlockProof};
use sha2::Digest;

use tycho_block_util::archive::{ArchiveEntryId, ArchiveReader};

pub struct Archive {
pub blocks: BTreeMap<BlockId, ArchiveDataEntry>,
}

impl Archive {
pub fn new(data: &[u8]) -> Result<Self> {
let reader = ArchiveReader::new(data)?;

let mut res = Archive {
blocks: Default::default(),
};

for data in reader {
let entry = data?;
match ArchiveEntryId::from_filename(entry.name)? {
ArchiveEntryId::Block(id) => {
let block = deserialize_block(&id, entry.data)?;
res.blocks.entry(id).or_default().block = Some(block);
}
ArchiveEntryId::Proof(id) if id.shard.workchain() == -1 => {
let proof = deserialize_block_proof(&id, entry.data, false)?;
res.blocks.entry(id).or_default().proof = Some(proof);
}
ArchiveEntryId::ProofLink(id) if id.shard.workchain() != -1 => {
let proof = deserialize_block_proof(&id, entry.data, true)?;
res.blocks.entry(id).or_default().proof = Some(proof);
}
_ => continue,
}
}
Ok(res)
}
}

#[derive(Default)]
pub struct ArchiveDataEntry {
pub block: Option<Block>,
pub proof: Option<BlockProof>,
}

pub(crate) fn deserialize_block(id: &BlockId, data: &[u8]) -> Result<Block, ArchiveDataError> {
let file_hash = sha2::Sha256::digest(data);
if id.file_hash.as_slice() != file_hash.as_slice() {
Err(ArchiveDataError::InvalidFileHash(id.as_short_id()))
} else {
let root = everscale_types::boc::Boc::decode(data)
.map_err(|_| ArchiveDataError::InvalidBlockData)?;
if &id.root_hash != root.repr_hash() {
return Err(ArchiveDataError::InvalidRootHash);
}

Block::load_from(&mut root.as_slice()?).map_err(|_| ArchiveDataError::InvalidBlockData)
}
}

pub(crate) fn deserialize_block_proof(
block_id: &BlockId,
data: &[u8],
is_link: bool,
) -> Result<BlockProof, ArchiveDataError> {
let root =
everscale_types::boc::Boc::decode(data).map_err(|_| ArchiveDataError::InvalidBlockProof)?;
let proof = BlockProof::load_from(&mut root.as_slice()?)
.map_err(|_| ArchiveDataError::InvalidBlockProof)?;

if &proof.proof_for != block_id {
return Err(ArchiveDataError::ProofForAnotherBlock);
}

if !block_id.shard.workchain() == -1 && !is_link {
Err(ArchiveDataError::ProofForNonMasterchainBlock)
} else {
Ok(proof)
}
}

#[derive(thiserror::Error, Debug)]
pub(crate) enum ArchiveDataError {
#[error("Invalid file hash {0}")]
InvalidFileHash(BlockIdShort),
#[error("Invalid root hash")]
InvalidRootHash,
#[error("Invalid block data")]
InvalidBlockData,
#[error("Invalid block proof")]
InvalidBlockProof,
#[error("Proof for another block")]
ProofForAnotherBlock,
#[error("Proof for non-masterchain block")]
ProofForNonMasterchainBlock,
#[error(transparent)]
TypeError(#[from] everscale_types::error::Error),
}
1 change: 1 addition & 0 deletions core/tests/common/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod archive;
pub mod node;
pub mod storage;
114 changes: 111 additions & 3 deletions core/tests/common/storage.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
use std::sync::Arc;

use anyhow::Result;
use anyhow::{Context, Result};
use bytesize::ByteSize;
use everscale_types::models::BlockId;
use tempfile::TempDir;
use tycho_storage::{Db, DbOptions, Storage};
use tycho_block_util::block::{BlockProofStuff, BlockProofStuffAug, BlockStuff, BlockStuffAug};
use tycho_storage::{BlockMetaData, Db, DbOptions, Storage};

pub(crate) async fn init_storage() -> Result<(Arc<Storage>, TempDir)> {
use crate::common::*;

pub(crate) async fn init_empty_storage() -> Result<(Arc<Storage>, TempDir)> {
let tmp_dir = tempfile::tempdir()?;
let root_path = tmp_dir.path();

Expand All @@ -26,3 +30,107 @@ pub(crate) async fn init_storage() -> Result<(Arc<Storage>, TempDir)> {

Ok((storage, tmp_dir))
}

pub(crate) fn get_block_ids() -> Result<Vec<BlockId>> {
let data = include_bytes!("../../tests/data/00001");
let archive = archive::Archive::new(data)?;

let block_ids = archive
.blocks
.into_iter()
.map(|(block_id, _)| block_id)
.collect();

Ok(block_ids)
}

pub(crate) fn get_archive() -> Result<archive::Archive> {
let data = include_bytes!("../../tests/data/00001");
let archive = archive::Archive::new(data)?;

Ok(archive)
}

pub(crate) async fn init_storage() -> Result<(Arc<Storage>, TempDir)> {
let (storage, tmp_dir) = init_empty_storage().await?;

let data = include_bytes!("../../tests/data/00001");
let provider = archive::Archive::new(data)?;

for (block_id, archive) in provider.blocks {
if block_id.shard.is_masterchain() {
let block = archive.block.unwrap();
let proof = archive.proof.unwrap();

let info = block.info.load().context("Failed to load block info")?;

let meta = BlockMetaData {
is_key_block: info.key_block,
gen_utime: info.gen_utime,
mc_ref_seqno: info
.master_ref
.map(|r| {
r.load()
.context("Failed to load master ref")
.map(|mr| mr.seqno)
})
.transpose()
.context("Failed to process master ref")?,
};

let block_data = everscale_types::boc::BocRepr::encode(&block)?;
let block_stuff =
BlockStuffAug::new(BlockStuff::with_block(block_id, block.clone()), block_data);

let block_result = storage
.block_storage()
.store_block_data(&block_stuff, meta)
.await?;

assert!(block_result.new);

let handle = storage
.block_handle_storage()
.load_handle(&block_id)?
.unwrap();

assert_eq!(handle.id(), block_stuff.data.id());

let bs = storage
.block_storage()
.load_block_data(&block_result.handle)
.await?;

assert_eq!(bs.id(), &block_id);
assert_eq!(bs.block(), &block);

let block_proof = BlockProofStuff::deserialize(
block_id,
everscale_types::boc::BocRepr::encode(&proof)?.as_slice(),
false,
)?;

let block_proof_with_data = BlockProofStuffAug::new(
block_proof.clone(),
everscale_types::boc::BocRepr::encode(&proof)?,
);

let handle = storage
.block_storage()
.store_block_proof(&block_proof_with_data, handle.into())
.await?
.handle;

let bp = storage
.block_storage()
.load_block_proof(&handle, false)
.await?;

assert_eq!(bp.is_link(), block_proof.is_link());
assert_eq!(bp.proof().root, block_proof.proof().root);
assert_eq!(bp.proof().proof_for, block_proof.proof().proof_for);
}
}

Ok((storage, tmp_dir))
}
Loading

0 comments on commit b6b27cf

Please sign in to comment.