diff --git a/pumpkin-world/src/chunk/db/informative_table.rs b/pumpkin-world/src/chunk/db/informative_table.rs index 5a1968f89..6efa8b58e 100644 --- a/pumpkin-world/src/chunk/db/informative_table.rs +++ b/pumpkin-world/src/chunk/db/informative_table.rs @@ -6,18 +6,18 @@ use std::{ use bytes::{Buf, BufMut, Bytes, BytesMut}; -use crate::chunk::{compression::Compression, db::RawChunkWritingError}; +use crate::chunk::{compression::Compression, db::ChunkStorageWritingError}; -use super::{CompressionError, RawChunkReader, RawChunkReadingError, RawChunkWriter}; +use super::{ChunkStorage, ChunkStorageReadingError, CompressionError}; -pub struct InformativeTableDB; +pub struct InformativeTable; -impl RawChunkReader for InformativeTableDB { +impl ChunkStorage for InformativeTable { fn read_raw_chunk( &self, save_file: &crate::level::LevelFolder, at: &pumpkin_util::math::vector2::Vector2, - ) -> Result, RawChunkReadingError> { + ) -> Result, ChunkStorageReadingError> { let region = (at.x >> 5, at.z >> 5); let mut region_file = OpenOptions::new() @@ -28,8 +28,8 @@ impl RawChunkReader for InformativeTableDB { .join(format!("r.{}.{}.mca", region.0, region.1)), ) .map_err(|err| match err.kind() { - std::io::ErrorKind::NotFound => RawChunkReadingError::ChunkNotExist, - kind => RawChunkReadingError::IoError(kind), + std::io::ErrorKind::NotFound => ChunkStorageReadingError::ChunkNotExist, + kind => ChunkStorageReadingError::IoError(kind), })?; let mut location_table: [u8; 4096] = [0; 4096]; @@ -38,10 +38,10 @@ impl RawChunkReader for InformativeTableDB { // fill the location and timestamp tables region_file .read_exact(&mut location_table) - .map_err(|err| RawChunkReadingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageReadingError::IoError(err.kind()))?; region_file .read_exact(&mut timestamp_table) - .map_err(|err| RawChunkReadingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageReadingError::IoError(err.kind()))?; let chunk_x = at.x & 0x1F; let chunk_z = at.z & 0x1F; @@ -54,31 +54,32 @@ impl RawChunkReader for InformativeTableDB { let size_at = location_table[table_entry as usize + 3] as usize * 4096; if offset_at == 0 && size_at == 0 { - return Err(RawChunkReadingError::ChunkNotExist); + return Err(ChunkStorageReadingError::ChunkNotExist); } // Read the file using the offset and size let mut file_buf = { region_file .seek(std::io::SeekFrom::Start(offset_at)) - .map_err(|_| RawChunkReadingError::RegionIsInvalid)?; + .map_err(|_| ChunkStorageReadingError::RegionIsInvalid)?; let mut out = vec![0; size_at]; region_file .read_exact(&mut out) - .map_err(|_| RawChunkReadingError::RegionIsInvalid)?; + .map_err(|_| ChunkStorageReadingError::RegionIsInvalid)?; out }; let mut header: Bytes = file_buf.drain(0..5).collect(); if header.remaining() != 5 { - return Err(RawChunkReadingError::InvalidHeader); + return Err(ChunkStorageReadingError::InvalidHeader); } let size = header.get_u32(); let compression = header.get_u8(); - let compression = Compression::from_byte(compression) - .map_err(|_| RawChunkReadingError::Compression(CompressionError::UnknownCompression))?; + let compression = Compression::from_byte(compression).map_err(|_| { + ChunkStorageReadingError::Compression(CompressionError::UnknownCompression) + })?; // size includes the compression scheme byte, so we need to subtract 1 let chunk_data: Vec = file_buf.drain(0..size as usize - 1).collect(); @@ -86,22 +87,20 @@ impl RawChunkReader for InformativeTableDB { let decompressed_chunk = if let Some(compression) = compression { compression .decompress_data(&chunk_data) - .map_err(RawChunkReadingError::Compression)? + .map_err(ChunkStorageReadingError::Compression)? } else { chunk_data }; Ok(decompressed_chunk) } -} -impl RawChunkWriter for InformativeTableDB { fn write_raw_chunk( &self, chunk: Vec, level_folder: &crate::level::LevelFolder, at: &pumpkin_util::math::vector2::Vector2, - ) -> Result<(), super::RawChunkWritingError> { + ) -> Result<(), super::ChunkStorageWritingError> { let region = (at.x >> 5, at.z >> 5); let mut region_file = OpenOptions::new() @@ -114,13 +113,13 @@ impl RawChunkWriter for InformativeTableDB { .region_folder .join(format!("./r.{}.{}.mca", region.0, region.1)), ) - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; // Compress chunk data let compression = Compression::ZLib; let compressed_data = compression .compress_data(&chunk, 6) - .map_err(RawChunkWritingError::Compression)?; + .map_err(ChunkStorageWritingError::Compression)?; // Length of compressed data + compression type let length = compressed_data.len() as u32 + 1; @@ -142,17 +141,17 @@ impl RawChunkWriter for InformativeTableDB { let file_meta = region_file .metadata() - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; // The header consists of 8 KiB of data // Try to fill the location and timestamp tables if they already exist if file_meta.len() >= 8192 { region_file .read_exact(&mut location_table) - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; region_file .read_exact(&mut timestamp_table) - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; } // Get location table index @@ -200,17 +199,17 @@ impl RawChunkWriter for InformativeTableDB { region_file.seek(SeekFrom::Start(0)).unwrap(); region_file .write_all(&[location_table, timestamp_table].concat()) - .map_err(|e| RawChunkWritingError::IoError(e.kind()))?; + .map_err(|e| ChunkStorageWritingError::IoError(e.kind()))?; // Seek to where the chunk is located region_file .seek(SeekFrom::Start(chunk_data_location * 4096)) - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; // Write header and payload region_file .write_all(&chunk_payload) - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; // Calculate padding to fill the sectors // (length + 4) 3 bits for length and 1 for compression type + payload length @@ -219,17 +218,17 @@ impl RawChunkWriter for InformativeTableDB { // Write padding region_file .write_all(&vec![0u8; padding as usize]) - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; region_file .flush() - .map_err(|err| RawChunkWritingError::IoError(err.kind()))?; + .map_err(|err| ChunkStorageWritingError::IoError(err.kind()))?; Ok(()) } } -impl InformativeTableDB { +impl InformativeTable { /// Returns the next free writable sector /// The sector is absolute which means it always has a spacing of 2 sectors fn find_free_sector(&self, location_table: &[u8; 4096], sector_size: usize) -> usize { diff --git a/pumpkin-world/src/chunk/db/mod.rs b/pumpkin-world/src/chunk/db/mod.rs index 762e4da43..e07292812 100644 --- a/pumpkin-world/src/chunk/db/mod.rs +++ b/pumpkin-world/src/chunk/db/mod.rs @@ -7,25 +7,23 @@ use crate::level::LevelFolder; use super::{compression::CompressionError, ChunkParsingError}; -pub trait RawChunkReader: Sync + Send { +pub trait ChunkStorage: Sync + Send { fn read_raw_chunk( &self, save_file: &LevelFolder, at: &Vector2, - ) -> Result, RawChunkReadingError>; -} + ) -> Result, ChunkStorageReadingError>; -pub trait RawChunkWriter: Send + Sync { fn write_raw_chunk( &self, chunk: Vec, level_folder: &LevelFolder, at: &Vector2, - ) -> Result<(), RawChunkWritingError>; + ) -> Result<(), ChunkStorageWritingError>; } #[derive(Error, Debug)] -pub enum RawChunkReadingError { +pub enum ChunkStorageReadingError { #[error("Io error: {0}")] IoError(std::io::ErrorKind), #[error("Invalid header")] @@ -41,7 +39,7 @@ pub enum RawChunkReadingError { } #[derive(Error, Debug)] -pub enum RawChunkWritingError { +pub enum ChunkStorageWritingError { #[error("Io error: {0}")] IoError(std::io::ErrorKind), #[error("Compression error {0}")] diff --git a/pumpkin-world/src/chunk/format/anvil.rs b/pumpkin-world/src/chunk/format/anvil.rs index aba96c6a3..f4a17e7d6 100644 --- a/pumpkin-world/src/chunk/format/anvil.rs +++ b/pumpkin-world/src/chunk/format/anvil.rs @@ -14,7 +14,7 @@ use crate::chunk::{ }; use crate::coordinates::{ChunkRelativeBlockCoordinates, Height}; -use super::{ChunkReader, ChunkReadingError, ChunkWriter, ChunkWritingError}; +use super::{ChunkFormat, ChunkReadingError, ChunkWritingError}; #[derive(Clone, Default)] pub struct AnvilChunkFormat; @@ -98,7 +98,7 @@ pub enum ChunkSerializingError { ErrorSerializingChunk(fastnbt::error::Error), } -impl ChunkReader for AnvilChunkFormat { +impl ChunkFormat for AnvilChunkFormat { fn read_chunk( &self, chunk_bytes: Vec, @@ -202,10 +202,8 @@ impl ChunkReader for AnvilChunkFormat { position: *at, }) } -} -impl ChunkWriter for AnvilChunkFormat { - fn write_chunk( + fn save_chunk( &self, chunk_data: &ChunkData, _at: &Vector2, diff --git a/pumpkin-world/src/chunk/format/mod.rs b/pumpkin-world/src/chunk/format/mod.rs index 6a9e62643..ce6d99102 100644 --- a/pumpkin-world/src/chunk/format/mod.rs +++ b/pumpkin-world/src/chunk/format/mod.rs @@ -5,16 +5,14 @@ use thiserror::Error; use super::{compression::CompressionError, ChunkData, ChunkParsingError}; -pub trait ChunkReader: Sync + Send { +pub trait ChunkFormat: Sync + Send { fn read_chunk( &self, chunk_bytes: Vec, at: &Vector2, ) -> Result; -} -pub trait ChunkWriter: Send + Sync { - fn write_chunk( + fn save_chunk( &self, chunk_data: &ChunkData, at: &Vector2, diff --git a/pumpkin-world/src/level.rs b/pumpkin-world/src/level.rs index 8556080d8..4f7da083e 100644 --- a/pumpkin-world/src/level.rs +++ b/pumpkin-world/src/level.rs @@ -11,8 +11,8 @@ use tokio::{ use crate::{ chunk::{ - db::{informative_table::InformativeTableDB, RawChunkReader, RawChunkWriter}, - format::{anvil::AnvilChunkFormat, ChunkReader, ChunkReadingError, ChunkWriter}, + db::{informative_table::InformativeTable, ChunkStorage}, + format::{anvil::AnvilChunkFormat, ChunkFormat, ChunkReadingError}, ChunkData, ChunkParsingError, }, generation::{get_world_gen, Seed, WorldGenerator}, @@ -36,10 +36,8 @@ pub struct Level { level_folder: LevelFolder, loaded_chunks: Arc, Arc>>>, chunk_watchers: Arc, usize>>, - chunk_reader: Arc, - chunk_writer: Arc, - raw_chunk_reader: Arc, - raw_chunk_writer: Arc, + chunk_format: Arc, + chunk_storage: Arc, world_gen: Arc, // Gets unlocked when dropped // TODO: Make this a trait @@ -80,10 +78,8 @@ impl Level { world_gen, world_info_writer: Arc::new(AnvilLevelInfo), level_folder, - chunk_reader: Arc::new(AnvilChunkFormat), - chunk_writer: Arc::new(AnvilChunkFormat), - raw_chunk_reader: Arc::new(InformativeTableDB), - raw_chunk_writer: Arc::new(InformativeTableDB), + chunk_format: Arc::new(AnvilChunkFormat), + chunk_storage: Arc::new(InformativeTable), loaded_chunks: Arc::new(DashMap::new()), chunk_watchers: Arc::new(DashMap::new()), level_info, @@ -210,9 +206,9 @@ impl Level { } pub async fn write_chunk(&self, chunk_to_write: (Vector2, Arc>)) { - if let Err(error) = self.raw_chunk_writer.write_raw_chunk( - self.chunk_writer - .write_chunk(&*chunk_to_write.1.read().await, &chunk_to_write.0) + if let Err(error) = self.chunk_storage.write_raw_chunk( + self.chunk_format + .save_chunk(&*chunk_to_write.1.read().await, &chunk_to_write.0) .unwrap(), &self.level_folder, &chunk_to_write.0, @@ -222,8 +218,8 @@ impl Level { } fn load_chunk_from_save( - raw_chunk_reader: Arc, - chunk_reader: Arc, + raw_chunk_reader: Arc, + chunk_reader: Arc, save_file: &LevelFolder, chunk_pos: Vector2, ) -> Result>>, ChunkReadingError> { @@ -266,10 +262,8 @@ impl Level { chunks.par_iter().for_each(|at| { let channel = channel.clone(); let loaded_chunks = self.loaded_chunks.clone(); - let chunk_reader = self.chunk_reader.clone(); - let chunk_writer = self.chunk_writer.clone(); - let raw_chunk_reader = self.raw_chunk_reader.clone(); - let raw_chunk_writer = self.raw_chunk_writer.clone(); + let chunk_format = self.chunk_format.clone(); + let chunk_storage = self.chunk_storage.clone(); let level_folder = self.level_folder.clone(); let world_gen = self.world_gen.clone(); let chunk_pos = *at; @@ -279,17 +273,17 @@ impl Level { .map(|entry| entry.value().clone()) .unwrap_or_else(|| { let loaded_chunk = match Self::load_chunk_from_save( - raw_chunk_reader, - chunk_reader, + chunk_storage.clone(), + chunk_format.clone(), &level_folder, chunk_pos, ) { Ok(chunk) => { // Save new Chunk if let Some(chunk) = &chunk { - if let Err(error) = raw_chunk_writer.write_raw_chunk( - chunk_writer - .write_chunk(&chunk.blocking_read(), &chunk_pos) + if let Err(error) = chunk_storage.write_raw_chunk( + chunk_format + .save_chunk(&chunk.blocking_read(), &chunk_pos) .unwrap(), &level_folder, &chunk_pos,