Skip to content

Commit

Permalink
Merge pull request #57 from broxus/0xdeafbeef/push-qtkkqozzuyms
Browse files Browse the repository at this point in the history
refactor: remove unnecessary test data
  • Loading branch information
Rexagon authored May 8, 2024
2 parents 51cb78a + 8742baa commit da067e3
Show file tree
Hide file tree
Showing 8 changed files with 38 additions and 145 deletions.
4 changes: 2 additions & 2 deletions collator/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ pub async fn prepare_test_storage() -> anyhow::Result<Storage> {
let tracker = MinRefMcStateTracker::default();

// master state
let master_bytes = include_bytes!("../src/state_node/tests/data/test_state_2_master.boc");
let master_bytes = include_bytes!("../../test/test_state_2_master.boc");
let master_file_hash: HashBytes = sha2::Sha256::digest(master_bytes).into();
let master_root = Boc::decode(master_bytes)?;
let master_root_hash = *master_root.repr_hash();
Expand Down Expand Up @@ -108,7 +108,7 @@ pub async fn prepare_test_storage() -> anyhow::Result<Storage> {
.await?;

// shard state
let shard_bytes = include_bytes!("../src/state_node/tests/data/test_state_2_0:80.boc");
let shard_bytes = include_bytes!("../../test/test_state_2_0:80.boc");
let shard_root = Boc::decode(shard_bytes)?;
let shard_state = shard_root.parse::<Box<ShardStateUnsplit>>()?;
let shard_id = BlockId {
Expand Down
118 changes: 0 additions & 118 deletions core/src/block_strider/state_applier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,121 +210,3 @@ struct Inner<S> {
storage: Storage,
state_subscriber: S,
}

#[cfg(test)]
pub mod test {
use std::str::FromStr;

use everscale_types::cell::HashBytes;
use everscale_types::models::*;
use tracing_test::traced_test;
use tycho_storage::{BlockMetaData, Storage};

use super::*;
use crate::block_strider::subscriber::test::PrintSubscriber;
use crate::block_strider::{ArchiveBlockProvider, BlockStrider, PersistentBlockStriderState};

#[traced_test]
#[tokio::test]
async fn test_state_apply() -> anyhow::Result<()> {
let (provider, storage) = prepare_state_apply().await?;

let last_mc = *provider.mc_block_ids.last_key_value().unwrap().1;
let blocks = provider.blocks.keys().copied().collect::<Vec<_>>();

let block_strider = BlockStrider::builder()
.with_provider(provider)
.with_state(PersistentBlockStriderState::new(last_mc, storage.clone()))
.with_state_subscriber(Default::default(), storage.clone(), PrintSubscriber)
.build();

block_strider.run().await?;

assert_eq!(
storage.node_state().load_last_mc_block_id().unwrap(),
last_mc
);
storage
.shard_state_storage()
.load_state(&last_mc)
.await
.unwrap();

for block in &blocks {
let handle = storage.block_handle_storage().load_handle(block).unwrap();
assert!(handle.meta().is_applied());
storage
.shard_state_storage()
.load_state(block)
.await
.unwrap();
}

Ok(())
}

pub async fn prepare_state_apply() -> Result<(ArchiveBlockProvider, Storage)> {
let data = include_bytes!("../../tests/data/00001");
let provider = ArchiveBlockProvider::new(data).unwrap();

let (storage, _tmp_dir) = Storage::new_temp()?;

let master = include_bytes!("../../tests/data/everscale_zerostate.boc");
let shard = include_bytes!("../../tests/data/everscale_shard_zerostate.boc");

let master_id = BlockId {
root_hash: HashBytes::from_str(
"58ffca1a178daff705de54216e5433c9bd2e7d850070d334d38997847ab9e845",
)
.unwrap(),
file_hash: HashBytes::from_str(
"d270b87b2952b5ba7daa70aaf0a8c361befcf4d8d2db92f9640d5443070838e4",
)
.unwrap(),
shard: ShardIdent::MASTERCHAIN,
seqno: 0,
};
let master = ShardStateStuff::deserialize_zerostate(&master_id, master).unwrap();

// Parse block id
let block_id = BlockId::from_str("-1:8000000000000000:0:58ffca1a178daff705de54216e5433c9bd2e7d850070d334d38997847ab9e845:d270b87b2952b5ba7daa70aaf0a8c361befcf4d8d2db92f9640d5443070838e4")?;

// Write zerostate to db
let (handle, _) = storage.block_handle_storage().create_or_load_handle(
&block_id,
BlockMetaData::zero_state(master.state().gen_utime),
);

storage
.shard_state_storage()
.store_state(&handle, &master)
.await?;

let shard_id = BlockId {
root_hash: HashBytes::from_str(
"95f042d1bf5b99840cad3aaa698f5d7be13d9819364faf9dd43df5b5d3c2950e",
)
.unwrap(),
file_hash: HashBytes::from_str(
"97af4602a57fc884f68bb4659bab8875dc1f5e45a9fd4fbafd0c9bc10aa5067c",
)
.unwrap(),
shard: ShardIdent::BASECHAIN,
seqno: 0,
};

// store workchain zerostate
let shard = ShardStateStuff::deserialize_zerostate(&shard_id, shard).unwrap();
let (handle, _) = storage.block_handle_storage().create_or_load_handle(
&shard_id,
BlockMetaData::zero_state(shard.state().gen_utime),
);
storage
.shard_state_storage()
.store_state(&handle, &shard)
.await?;

storage.node_state().store_last_mc_block_id(&master_id);
Ok((provider, storage))
}
}
4 changes: 1 addition & 3 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,9 @@ test:

# runs all tests including ignored. Will take a lot of time to run
integration_test: prepare_integration_tests
export RUST_BACKTRACE=1
export RUST_LIB_BACKTRACE=1
#cargo test -r --all-targets --all-features --workspace -- --ignored #uncomment this when all crates will compile ˙◠˙
# for now add tests one by one
RUST_LIB_BACKTRACE=1 RUST_BACKTRACE=1 cargo test -r --package tycho-storage --lib store::shard_state::replace_transaction::test::insert_and_delete_of_several_shards -- --ignored --exact --nocapture
RUST_LIB_BACKTRACE=1 RUST_BACKTRACE=1 cargo test -r --package tycho-storage --lib store::shard_state::store_state_raw::test::insert_and_delete_of_several_shards -- --ignored --exact --nocapture

gen_network n: build_debug
#!/usr/bin/env bash
Expand Down
11 changes: 9 additions & 2 deletions storage/src/db/kv_db/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,15 +154,22 @@ impl Db {
(self.archives.cf(), "archives"),
(self.shard_states.cf(), "shard states"),
(self.cells.cf(), "cells"),
(self.temp_cells.cf(), "temp cells"),
];

let mut compaction_options = rocksdb::CompactOptions::default();
compaction_options.set_exclusive_manual_compaction(true);
compaction_options
.set_bottommost_level_compaction(rocksdb::BottommostLevelCompaction::ForceOptimized);

for (cf, title) in tables {
tracing::info!("{title} compaction started");

let instant = Instant::now();

let bound = Option::<[u8; 0]>::None;
self.raw().compact_range_cf(&cf, bound, bound);

self.raw()
.compact_range_cf_opt(&cf, bound, bound, &compaction_options);

tracing::info!(
elapsed = %humantime::format_duration(instant.elapsed()),
Expand Down
23 changes: 17 additions & 6 deletions storage/src/store/shard_state/store_state_raw.rs
Original file line number Diff line number Diff line change
Expand Up @@ -568,9 +568,9 @@ mod test {

use super::*;

#[test]
#[tokio::test]
#[ignore]
fn insert_and_delete_of_several_shards() -> anyhow::Result<()> {
async fn insert_and_delete_of_several_shards() -> Result<()> {
tycho_util::test::init_logger("insert_and_delete_of_several_shards");
let project_root = project_root()?.join(".scratch");
let integration_test_path = project_root.join("integration_tests");
Expand Down Expand Up @@ -612,7 +612,7 @@ mod test {
StoreStateRaw::new(&block_id, &db, &download_dir, &cells_storage, &tracker)
.context("Failed to create ShardStateReplaceTransaction")?;

let file = std::fs::File::open(file.path())?;
let file = File::open(file.path())?;
let mut file = BufReader::new(file);
let chunk_size = 10_000_000; // size of each chunk in bytes
let mut buffer = vec![0u8; chunk_size];
Expand All @@ -637,12 +637,19 @@ mod test {
}
tracing::info!("Finished processing all states");
tracing::info!("Starting gc");
states_gc(&cells_storage, &db)?;
states_gc(&cells_storage, &db).await?;

drop(db);
drop(cells_storage);
rocksdb::DB::destroy(
&rocksdb::Options::default(),
current_test_path.join("rocksdb"),
)?;

Ok(())
}

fn states_gc(cell_storage: &Arc<CellStorage>, db: &Db) -> anyhow::Result<()> {
async fn states_gc(cell_storage: &Arc<CellStorage>, db: &Db) -> Result<()> {
let states_iterator = db.shard_states.iterator(IteratorMode::Start);
let bump = bumpalo::Bump::new();

Expand All @@ -659,9 +666,13 @@ mod test {

// execute batch
db.raw().write_opt(batch, db.cells.write_config())?;
tracing::info!("State deleted. Progress: {deleted}/{total_states}",);
tracing::info!("State deleted. Progress: {}/{total_states}", deleted + 1);
}

// two compactions in row. First one run merge operators, second one will remove all tombstones
db.trigger_compaction().await;
db.trigger_compaction().await;

let cells_left = db.cells.iterator(IteratorMode::Start).count();
tracing::info!("States GC finished. Cells left: {cells_left}");
assert_eq!(cells_left, 0, "Gc is broken. Press F to pay respect");
Expand Down
23 changes: 9 additions & 14 deletions storage/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ struct ShardStateCombined {
}

impl ShardStateCombined {
fn from_file(path: impl AsRef<str>) -> Result<Self> {
let bytes = std::fs::read(path.as_ref())?;
fn from_bytes(bytes: &[u8]) -> Result<Self> {
let cell = Boc::decode(&bytes)?;
let state = cell.parse()?;
Ok(Self { cell, state })
Expand Down Expand Up @@ -60,10 +59,11 @@ async fn persistent_storage_everscale() -> Result<()> {
assert!(storage.node_state().load_init_mc_block_id().is_none());

// Read zerostate
let zero_state_raw = ShardStateCombined::from_file("tests/everscale_zerostate.boc")?;
let zero_state_raw =
ShardStateCombined::from_bytes(include_bytes!("../../test/test_state_2_master.boc"))?;

// Parse block id
let block_id = BlockId::from_str("-1:8000000000000000:0:58ffca1a178daff705de54216e5433c9bd2e7d850070d334d38997847ab9e845:d270b87b2952b5ba7daa70aaf0a8c361befcf4d8d2db92f9640d5443070838e4")?;
let block_id = BlockId::from_str("-1:8000000000000000:2:4557702252a8fcec88387ab78407e5116e83222b213653911f86e6504cb7aa78:e2bc83d6be6975b9c68f56c5f6d4997d2a33226bfac6a431b47874e3ba18db75")?;

// Write zerostate to db
let (handle, _) = storage.block_handle_storage().create_or_load_handle(
Expand Down Expand Up @@ -107,21 +107,21 @@ async fn persistent_storage_everscale() -> Result<()> {

storage
.persistent_state_storage()
.prepare_persistent_states_dir(&zerostate.block_id())?;
.prepare_persistent_states_dir(zerostate.block_id())?;

storage
.persistent_state_storage()
.save_state(
&zerostate.block_id(),
&zerostate.block_id(),
zerostate.block_id(),
zerostate.block_id(),
zero_state_raw.cell.repr_hash(),
)
.await?;

// Check if state exists
let exist = storage
.persistent_state_storage()
.state_exists(&zerostate.block_id(), &zerostate.block_id());
.state_exists(zerostate.block_id(), zerostate.block_id());
assert_eq!(exist, true);

// Read persistent state
Expand All @@ -130,12 +130,7 @@ async fn persistent_storage_everscale() -> Result<()> {

let persistent_state_storage = storage.persistent_state_storage();
let persistent_state_data = persistent_state_storage
.read_state_part(
&zerostate.block_id(),
&zerostate.block_id(),
offset,
max_size,
)
.read_state_part(zerostate.block_id(), zerostate.block_id(), offset, max_size)
.await
.unwrap();

Expand Down
File renamed without changes.
File renamed without changes.

0 comments on commit da067e3

Please sign in to comment.