From e823b3e8158577b494828026ab1ef251968752aa Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 23 Aug 2025 17:35:59 -0300 Subject: [PATCH 01/11] feat: player data storage initialized --- Cargo.toml | 5 +- src/bin/Cargo.toml | 1 + .../play_packets/player_loaded.rs | 72 +++++++++++++++---- src/bin/src/systems/new_connections.rs | 3 +- src/lib/core/Cargo.toml | 2 + src/lib/core/src/data/mod.rs | 1 + src/lib/core/src/data/player.rs | 29 ++++++++ src/lib/core/src/lib.rs | 1 + src/lib/core/src/transform/position.rs | 4 +- src/lib/player_state/Cargo.toml | 37 ++++++++++ src/lib/player_state/src/data.rs | 13 ++++ src/lib/player_state/src/errors.rs | 39 ++++++++++ src/lib/player_state/src/lib.rs | 2 + src/lib/player_state/src/storage.rs | 48 +++++++++++++ src/lib/world/Cargo.toml | 1 + src/lib/world/src/lib.rs | 11 ++- 16 files changed, 249 insertions(+), 20 deletions(-) create mode 100644 src/lib/core/src/data/mod.rs create mode 100644 src/lib/core/src/data/player.rs create mode 100644 src/lib/player_state/Cargo.toml create mode 100644 src/lib/player_state/src/data.rs create mode 100644 src/lib/player_state/src/errors.rs create mode 100644 src/lib/player_state/src/lib.rs create mode 100644 src/lib/player_state/src/storage.rs diff --git a/Cargo.toml b/Cargo.toml index 8b3b143f..c9721fcb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,15 +14,12 @@ resolver = "2" members = [ "src/bin", "src/lib/adapters/anvil", - "src/lib/adapters/anvil", - "src/lib/adapters/nbt", "src/lib/adapters/nbt", "src/lib/commands", "src/lib/default_commands", "src/lib/core", "src/lib/core/state", "src/lib/derive_macros", - "src/lib/derive_macros", "src/lib/net", "src/lib/net/crates/codec", "src/lib/net/crates/encryption", @@ -39,6 +36,7 @@ members = [ "src/lib/inventories", "src/lib/registry", "src/lib/scheduler", + "src/lib/player_state", ] #================== Lints ==================# @@ -108,6 +106,7 @@ ferrumc-scheduler = { path = "src/lib/scheduler" } ferrumc-state = { path = "src/lib/core/state" } ferrumc-storage = { path = "src/lib/storage" } ferrumc-text = { path = "src/lib/text" } +ferrumc-playerstate = { path = "src/lib/player_state" } ferrumc-threadpool = { path = "src/lib/utils/threadpool" } ferrumc-utils = { path = "src/lib/utils" } ferrumc-world = { path = "src/lib/world" } diff --git a/src/bin/Cargo.toml b/src/bin/Cargo.toml index c7c75772..b3048dc2 100644 --- a/src/bin/Cargo.toml +++ b/src/bin/Cargo.toml @@ -33,6 +33,7 @@ ferrumc-threadpool = { workspace = true } ferrumc-inventories = { workspace = true } once_cell = { workspace = true } serde_json = { workspace = true } +ferrumc-playerstate = { workspace = true } tracing = { workspace = true } clap = { workspace = true, features = ["derive", "env"] } diff --git a/src/bin/src/packet_handlers/play_packets/player_loaded.rs b/src/bin/src/packet_handlers/play_packets/player_loaded.rs index 48dce958..194f4823 100644 --- a/src/bin/src/packet_handlers/play_packets/player_loaded.rs +++ b/src/bin/src/packet_handlers/play_packets/player_loaded.rs @@ -1,4 +1,6 @@ use bevy_ecs::prelude::{Entity, Query, Res}; +use ferrumc_core::data::player::PlayerData; +use ferrumc_core::identity::player_identity::PlayerIdentity; use ferrumc_core::transform::position::Position; use ferrumc_net::connection::StreamWriter; use ferrumc_net::packets::outgoing::synchronize_player_position::SynchronizePlayerPositionPacket; @@ -10,10 +12,10 @@ use tracing::warn; pub fn handle( ev: Res, state: Res, - query: Query<(Entity, &Position, &StreamWriter)>, + mut query: Query<(Entity, &PlayerIdentity, &mut PlayerData, &StreamWriter)>, ) { for (_, player) in ev.0.try_iter() { - let Ok((entity, player_pos, conn)) = query.get(player) else { + let Ok((entity, player_identity, mut player_data, conn)) = query.get_mut(player) else { warn!("Player position not found in query."); continue; }; @@ -24,10 +26,56 @@ pub fn handle( ); continue; } + + // Default player data + *player_data = PlayerData::new( + player_identity.uuid.as_u128(), + Position::default(), + "overworld", + ); + + // Save the player's position in the world + if let Ok(loaded) = state + .0 + .world + .players_state + .load_player_state(player_identity.uuid.as_u128()) + { + match loaded { + Some(loaded_data) => { + *player_data = loaded_data; + tracing::info!( + "Loaded player state for {}: position=({}, {}, {}), dimension={}", + player_data.uuid, + player_data.pos.x, + player_data.pos.y, + player_data.pos.z, + player_data.dimension + ); + } + None => { + if let Err(e) = state.0.world.players_state.save_player_state(&player_data) { + tracing::error!( + "Failed to save player state for {}: {:?}", + player_identity.username, + e + ); + } + } + } + } else { + if let Err(e) = state.0.world.players_state.save_player_state(&player_data) { + tracing::error!( + "Failed to save player state for {}: {:?}", + player_identity.username, + e + ); + } + } let head_block = state.0.world.get_block_and_fetch( - player_pos.x as i32, - player_pos.y as i32, - player_pos.z as i32, + player_data.pos.x as i32, + player_data.pos.y as i32, + player_data.pos.z as i32, "overworld", ); if let Ok(head_block) = head_block { @@ -35,17 +83,17 @@ pub fn handle( tracing::info!( "Player {} loaded at position: ({}, {}, {})", player, - player_pos.x, - player_pos.y, - player_pos.z + player_data.pos.x, + player_data.pos.y, + player_data.pos.z ); } else { tracing::info!( "Player {} loaded at position: ({}, {}, {}) with head block: {:?}", player, - player_pos.x, - player_pos.y, - player_pos.z, + player_data.pos.x, + player_data.pos.y, + player_data.pos.z, head_block ); // Teleport the player to the world center if their head block is not air @@ -66,7 +114,7 @@ pub fn handle( } else { warn!( "Failed to fetch head block for player {} at position: ({}, {}, {})", - player, player_pos.x, player_pos.y, player_pos.z + player, player_data.pos.x, player_data.pos.y, player_data.pos.z ); } } diff --git a/src/bin/src/systems/new_connections.rs b/src/bin/src/systems/new_connections.rs index ec1cd3cd..c57f2c9b 100644 --- a/src/bin/src/systems/new_connections.rs +++ b/src/bin/src/systems/new_connections.rs @@ -1,6 +1,6 @@ use bevy_ecs::prelude::{Commands, Res, Resource}; use crossbeam_channel::Receiver; -use ferrumc_core::chunks::chunk_receiver::ChunkReceiver; +use ferrumc_core::{chunks::chunk_receiver::ChunkReceiver, data::player::PlayerData}; use ferrumc_core::conn::keepalive::KeepAliveTracker; use ferrumc_core::transform::grounded::OnGround; use ferrumc_core::transform::position::Position; @@ -28,6 +28,7 @@ pub fn accept_new_connections( let entity = cmd.spawn(( new_connection.stream, Position::default(), + PlayerData::default(), ChunkReceiver::default(), Rotation::default(), OnGround::default(), diff --git a/src/lib/core/Cargo.toml b/src/lib/core/Cargo.toml index 1a9e9855..8caadbdb 100644 --- a/src/lib/core/Cargo.toml +++ b/src/lib/core/Cargo.toml @@ -13,6 +13,8 @@ ferrumc-text = { workspace = true } ferrumc-net-codec = { workspace = true } uuid = { workspace = true } crossbeam-queue = { workspace = true } +serde = { workspace = true } +bitcode = { workspace = true } [dev-dependencies] criterion = { workspace = true } diff --git a/src/lib/core/src/data/mod.rs b/src/lib/core/src/data/mod.rs new file mode 100644 index 00000000..f28d7c20 --- /dev/null +++ b/src/lib/core/src/data/mod.rs @@ -0,0 +1 @@ +pub mod player; diff --git a/src/lib/core/src/data/player.rs b/src/lib/core/src/data/player.rs new file mode 100644 index 00000000..2915acd3 --- /dev/null +++ b/src/lib/core/src/data/player.rs @@ -0,0 +1,29 @@ +use bevy_ecs::component::Component; +use bitcode::{Decode, Encode}; +use serde::{Deserialize, Serialize}; + +use crate::transform::position::Position; + +// https://minecraft.fandom.com/wiki/Player.dat_format +#[derive(Serialize, Deserialize, Debug, Encode, Decode, Component, typename::TypeName)] +pub struct PlayerData { + pub uuid: u128, + pub pos: Position, + pub dimension: String, +} + +impl Default for PlayerData { + fn default() -> Self { + Self::new(0, Position::default(), "overworld") + } +} + +impl PlayerData { + pub fn new(uuid: u128, pos: Position, dimension: &str) -> Self { + Self { + uuid, + pos, + dimension: dimension.to_string(), + } + } +} diff --git a/src/lib/core/src/lib.rs b/src/lib/core/src/lib.rs index 35c28021..bc34bb1e 100644 --- a/src/lib/core/src/lib.rs +++ b/src/lib/core/src/lib.rs @@ -8,3 +8,4 @@ pub mod identity; pub mod mq; pub mod state; pub mod transform; +pub mod data; diff --git a/src/lib/core/src/transform/position.rs b/src/lib/core/src/transform/position.rs index 7605f66e..2ac8ed5b 100644 --- a/src/lib/core/src/transform/position.rs +++ b/src/lib/core/src/transform/position.rs @@ -1,9 +1,11 @@ use bevy_ecs::prelude::Component; +use bitcode::{Decode, Encode}; use ferrumc_net_codec::net_types::network_position::NetworkPosition; +use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Display, Formatter}; use typename::TypeName; -#[derive(TypeName, Component)] +#[derive(TypeName, Component, Serialize, Deserialize, Encode, Decode)] pub struct Position { pub x: f64, pub y: f64, diff --git a/src/lib/player_state/Cargo.toml b/src/lib/player_state/Cargo.toml new file mode 100644 index 00000000..44ce01c2 --- /dev/null +++ b/src/lib/player_state/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "ferrumc-playerstate" +version = "0.1.0" +edition = "2024" + +[dependencies] +thiserror = { workspace = true } +ferrumc-storage = { workspace = true } +ferrumc-config = { workspace = true } +ferrumc-core = { workspace = true } +tracing = { workspace = true } +ferrumc-net-codec = { workspace = true } +serde = { workspace = true } +serde_derive = { workspace = true } +macro_rules_attribute = { workspace = true } +bitcode_derive = { workspace = true } +bitcode = { workspace = true } +deepsize = { workspace = true } +ferrumc-nbt = { workspace = true } +ferrumc-macros = { workspace = true } +ferrumc-anvil = { workspace = true } +rayon = { workspace = true } +ferrumc-general-purpose = { workspace = true } +lazy_static = { workspace = true } +bzip2 = { workspace = true } +serde_json = { workspace = true } +indicatif = { workspace = true } +wyhash = { workspace = true } +moka = { workspace = true, features = ["sync"] } +ahash = { workspace = true } +rand = { workspace = true } +yazi = { workspace = true } +ferrumc-threadpool = { workspace = true } +lz4_flex = { workspace = true } + +[lints] +workspace = true diff --git a/src/lib/player_state/src/data.rs b/src/lib/player_state/src/data.rs new file mode 100644 index 00000000..df701ac7 --- /dev/null +++ b/src/lib/player_state/src/data.rs @@ -0,0 +1,13 @@ +// use bitcode::{Decode, Encode}; +// use serde::{Deserialize, Serialize}; + +// // https://minecraft.fandom.com/wiki/Player.dat_format +// #[derive(Serialize, Deserialize, Debug, Encode, Decode)] +// pub struct PlayerData { +// pub uuid: u128, +// pub username: String, +// pub x: f64, +// pub y: f64, +// pub z: f64, +// pub dimension: String, +// } diff --git a/src/lib/player_state/src/errors.rs b/src/lib/player_state/src/errors.rs new file mode 100644 index 00000000..4250a446 --- /dev/null +++ b/src/lib/player_state/src/errors.rs @@ -0,0 +1,39 @@ +use ferrumc_storage::errors::StorageError; +use thiserror::Error; +use yazi::Error; + +#[derive(Debug, Error)] +pub enum PlayerDataError { + #[error("Compression error: {0}")] + CompressionError(String), + #[error("A database error occurred from the playerstate crate: {0}")] + DatabaseError(StorageError), + #[error("Some kind of IO error occurred: {0}")] + GenericIOError(String), +} + +impl From for PlayerDataError { + fn from(err: StorageError) -> Self { + PlayerDataError::DatabaseError(err) + } +} + +impl From for PlayerDataError { + fn from(e: yazi::Error) -> Self { + match e { + Error::Underflow => { + PlayerDataError::CompressionError("Underflow error during compression".to_string()) + } + Error::InvalidBitstream => PlayerDataError::CompressionError( + "Invalid bitstream error during compression".to_string(), + ), + Error::Overflow => { + PlayerDataError::CompressionError("Overflow error during compression".to_string()) + } + Error::Finished => { + PlayerDataError::CompressionError("Finished error during compression".to_string()) + } + Error::Io(io_err) => PlayerDataError::GenericIOError(io_err.to_string()), + } + } +} diff --git a/src/lib/player_state/src/lib.rs b/src/lib/player_state/src/lib.rs new file mode 100644 index 00000000..dca9fe7f --- /dev/null +++ b/src/lib/player_state/src/lib.rs @@ -0,0 +1,2 @@ +pub mod errors; +pub mod storage; diff --git a/src/lib/player_state/src/storage.rs b/src/lib/player_state/src/storage.rs new file mode 100644 index 00000000..92ad38ce --- /dev/null +++ b/src/lib/player_state/src/storage.rs @@ -0,0 +1,48 @@ +use std::sync::Arc; + +use ferrumc_core::data::player::PlayerData; +use ferrumc_storage::{errors::StorageError, lmdb::LmdbBackend}; +use yazi::CompressionLevel; + +use crate::errors::PlayerDataError; + +// Table name for player state data in LMDB +const TABLE_NAME: &str = "playerdata"; + +#[derive(Clone)] +pub struct PlayerStateStorage { + storage_backend: Arc, +} + +impl PlayerStateStorage { + pub fn new(storage_backend: Arc) -> Self { + Self { storage_backend } + } + + pub fn save_player_state(&self, state: &PlayerData) -> Result<(), PlayerDataError> { + if !self.storage_backend.table_exists(TABLE_NAME.to_string())? { + self.storage_backend.create_table(TABLE_NAME.to_string())?; + } + let key = state.uuid; + + let encoded = yazi::compress( + &bitcode::encode(state), + yazi::Format::Zlib, + CompressionLevel::BestSpeed, + )?; + self.storage_backend + .upsert(TABLE_NAME.to_string(), key, encoded)?; + Ok(()) + } + + pub fn load_player_state(&self, key: u128) -> Result, PlayerDataError> { + if let Some(bytes) = self.storage_backend.get(TABLE_NAME.to_string(), key)? { + let (data, _) = yazi::decompress(bytes.as_slice(), yazi::Format::Zlib)?; + let player: PlayerData = bitcode::decode(&data) + .map_err(|_| StorageError::ReadError("Failed to decode PlayerState".into()))?; + Ok(Some(player)) + } else { + Ok(None) + } + } +} diff --git a/src/lib/world/Cargo.toml b/src/lib/world/Cargo.toml index d2852e7f..9a35e44b 100644 --- a/src/lib/world/Cargo.toml +++ b/src/lib/world/Cargo.toml @@ -32,6 +32,7 @@ rand = { workspace = true } yazi = { workspace = true } ferrumc-threadpool = { workspace = true } lz4_flex = { workspace = true } +ferrumc-playerstate = { workspace = true } [[bench]] name = "world_bench" diff --git a/src/lib/world/src/lib.rs b/src/lib/world/src/lib.rs index 029f268e..795e729a 100644 --- a/src/lib/world/src/lib.rs +++ b/src/lib/world/src/lib.rs @@ -12,6 +12,7 @@ use crate::errors::WorldError; use deepsize::DeepSizeOf; use ferrumc_config::server_config::get_global_config; use ferrumc_general_purpose::paths::get_root_path; +use ferrumc_playerstate::storage::PlayerStateStorage; use ferrumc_storage::lmdb::LmdbBackend; use moka::sync::Cache; use std::fs::create_dir_all; @@ -23,8 +24,9 @@ use tracing::{error, trace, warn}; #[derive(Clone)] pub struct World { - storage_backend: LmdbBackend, + storage_backend: Arc, cache: Cache<(i32, i32, String), Arc>, + pub players_state: PlayerStateStorage, } fn check_config_validity() -> Result<(), WorldError> { @@ -88,8 +90,10 @@ impl World { if backend_path.is_relative() { backend_path = get_root_path().join(backend_path); } - let storage_backend = - LmdbBackend::initialize(Some(backend_path)).expect("Failed to initialize database"); + let storage_backend = Arc::new( + LmdbBackend::initialize(Some(backend_path)).expect("Failed to initialize database"), + ); + let players_state = PlayerStateStorage::new(Arc::clone(&storage_backend)); if get_global_config().database.cache_ttl != 0 && get_global_config().database.cache_capacity == 0 @@ -112,6 +116,7 @@ impl World { World { storage_backend, cache, + players_state, } } } From cae8c5499ef80266a4d72b2d4df95facdd23cdfe Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 23 Aug 2025 18:11:40 -0300 Subject: [PATCH 02/11] fix: cargo fmt and cargo audit --- src/bin/src/systems/new_connections.rs | 2 +- src/lib/core/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bin/src/systems/new_connections.rs b/src/bin/src/systems/new_connections.rs index c57f2c9b..4f132484 100644 --- a/src/bin/src/systems/new_connections.rs +++ b/src/bin/src/systems/new_connections.rs @@ -1,12 +1,12 @@ use bevy_ecs::prelude::{Commands, Res, Resource}; use crossbeam_channel::Receiver; -use ferrumc_core::{chunks::chunk_receiver::ChunkReceiver, data::player::PlayerData}; use ferrumc_core::conn::keepalive::KeepAliveTracker; use ferrumc_core::transform::grounded::OnGround; use ferrumc_core::transform::position::Position; use ferrumc_core::transform::rotation::Rotation; use ferrumc_inventories::hotbar::Hotbar; use ferrumc_inventories::inventory::Inventory; +use ferrumc_core::{chunks::chunk_receiver::ChunkReceiver, data::player::PlayerData}; use ferrumc_net::connection::NewConnection; use ferrumc_state::GlobalStateResource; use std::time::Instant; diff --git a/src/lib/core/src/lib.rs b/src/lib/core/src/lib.rs index bc34bb1e..9c041dd3 100644 --- a/src/lib/core/src/lib.rs +++ b/src/lib/core/src/lib.rs @@ -4,8 +4,8 @@ pub mod errors; pub mod chunks; pub mod collisions; pub mod conn; +pub mod data; pub mod identity; pub mod mq; pub mod state; pub mod transform; -pub mod data; From 2eeab0e879a6693619dd99c1129ab9aa227153d3 Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 23 Aug 2025 18:19:04 -0300 Subject: [PATCH 03/11] fix: cargo clippy --- .../packet_handlers/play_packets/player_loaded.rs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/bin/src/packet_handlers/play_packets/player_loaded.rs b/src/bin/src/packet_handlers/play_packets/player_loaded.rs index 194f4823..e182a151 100644 --- a/src/bin/src/packet_handlers/play_packets/player_loaded.rs +++ b/src/bin/src/packet_handlers/play_packets/player_loaded.rs @@ -63,14 +63,12 @@ pub fn handle( } } } - } else { - if let Err(e) = state.0.world.players_state.save_player_state(&player_data) { - tracing::error!( - "Failed to save player state for {}: {:?}", - player_identity.username, - e - ); - } + } else if let Err(e) = state.0.world.players_state.save_player_state(&player_data) { + tracing::error!( + "Failed to save player state for {}: {:?}", + player_identity.username, + e + ); } let head_block = state.0.world.get_block_and_fetch( player_data.pos.x as i32, From 4f8db95f89aef8fa3b6f43f1b09070782922f650 Mon Sep 17 00:00:00 2001 From: unknown Date: Sun, 24 Aug 2025 17:06:34 -0300 Subject: [PATCH 04/11] feat: added sqlite support --- Cargo.toml | 1 + src/lib/player_state/src/storage.rs | 9 +- src/lib/storage/Cargo.toml | 5 +- src/lib/storage/src/database.rs | 29 +++ src/lib/storage/src/lib.rs | 2 + src/lib/storage/src/lmdb.rs | 192 ++++++++--------- src/lib/storage/src/sqlite.rs | 314 ++++++++++++++++++++++++++++ src/lib/world/src/db_functions.rs | 13 +- src/lib/world/src/importing.rs | 3 +- 9 files changed, 460 insertions(+), 108 deletions(-) create mode 100644 src/lib/storage/src/database.rs create mode 100644 src/lib/storage/src/sqlite.rs diff --git a/Cargo.toml b/Cargo.toml index c9721fcb..3d0394e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -188,6 +188,7 @@ lz4_flex = "0.11.5" # Database heed = "0.22.0" moka = "0.12.10" +rusqlite = { version = "0.37.0", features = ["bundled", "serde_json"] } # CLI clap = "4.5.45" diff --git a/src/lib/player_state/src/storage.rs b/src/lib/player_state/src/storage.rs index 92ad38ce..0f9a4f31 100644 --- a/src/lib/player_state/src/storage.rs +++ b/src/lib/player_state/src/storage.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use ferrumc_core::data::player::PlayerData; -use ferrumc_storage::{errors::StorageError, lmdb::LmdbBackend}; +use ferrumc_storage::{database::Database, errors::StorageError, lmdb::LmdbBackend}; use yazi::CompressionLevel; use crate::errors::PlayerDataError; @@ -21,7 +21,7 @@ impl PlayerStateStorage { pub fn save_player_state(&self, state: &PlayerData) -> Result<(), PlayerDataError> { if !self.storage_backend.table_exists(TABLE_NAME.to_string())? { - self.storage_backend.create_table(TABLE_NAME.to_string())?; + self.storage_backend.create_table(TABLE_NAME)?; } let key = state.uuid; @@ -30,13 +30,12 @@ impl PlayerStateStorage { yazi::Format::Zlib, CompressionLevel::BestSpeed, )?; - self.storage_backend - .upsert(TABLE_NAME.to_string(), key, encoded)?; + self.storage_backend.upsert(TABLE_NAME, key, encoded)?; Ok(()) } pub fn load_player_state(&self, key: u128) -> Result, PlayerDataError> { - if let Some(bytes) = self.storage_backend.get(TABLE_NAME.to_string(), key)? { + if let Some(bytes) = self.storage_backend.get(TABLE_NAME, key)? { let (data, _) = yazi::decompress(bytes.as_slice(), yazi::Format::Zlib)?; let player: PlayerData = bitcode::decode(&data) .map_err(|_| StorageError::ReadError("Failed to decode PlayerState".into()))?; diff --git a/src/lib/storage/Cargo.toml b/src/lib/storage/Cargo.toml index 0485e12d..e9c9cc1b 100644 --- a/src/lib/storage/Cargo.toml +++ b/src/lib/storage/Cargo.toml @@ -13,7 +13,10 @@ rand = { workspace = true } heed = { workspace = true } page_size = { workspace = true } parking_lot = { workspace = true } - +rusqlite = { workspace = true } +serde = { workspace = true } +serde_derive = { workspace = true } +serde_json = { workspace = true } [dev-dependencies] criterion = { workspace = true } diff --git a/src/lib/storage/src/database.rs b/src/lib/storage/src/database.rs new file mode 100644 index 00000000..86fa77fa --- /dev/null +++ b/src/lib/storage/src/database.rs @@ -0,0 +1,29 @@ +use crate::errors::StorageError; + +pub trait Database { + type Key; + type Value; + + fn create_table(&self, table: &str) -> Result<(), StorageError>; + fn insert(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError>; + fn get(&self, table: &str, key: Self::Key) -> Result, StorageError>; + fn delete(&self, table: &str, key: Self::Key) -> Result<(), StorageError>; + fn update(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError>; + fn upsert(&self, table: &str, key: Self::Key, value: Self::Value) + -> Result; + fn batch_insert( + &self, + table: &str, + data: Vec<(Self::Key, Self::Value)>, + ) -> Result<(), StorageError>; + fn batch_get( + &self, + table: &str, + keys: Vec, + ) -> Result>, StorageError>; + fn batch_upsert( + &self, + table: &str, + data: Vec<(Self::Key, Self::Value)>, + ) -> Result<(), StorageError>; +} diff --git a/src/lib/storage/src/lib.rs b/src/lib/storage/src/lib.rs index 4e34e8d0..348f34b5 100644 --- a/src/lib/storage/src/lib.rs +++ b/src/lib/storage/src/lib.rs @@ -1,2 +1,4 @@ +pub mod database; pub mod errors; pub mod lmdb; +pub mod sqlite; diff --git a/src/lib/storage/src/lmdb.rs b/src/lib/storage/src/lmdb.rs index 0d652ede..b1dc9cba 100644 --- a/src/lib/storage/src/lmdb.rs +++ b/src/lib/storage/src/lmdb.rs @@ -1,8 +1,9 @@ +use crate::database::Database; use crate::errors::StorageError; use heed; use heed::byteorder::BigEndian; use heed::types::{Bytes, U128}; -use heed::{Database, Env, EnvOpenOptions, WithoutTls}; +use heed::{Env, EnvOpenOptions, WithoutTls}; use parking_lot::Mutex; use std::collections::HashMap; use std::path::PathBuf; @@ -60,11 +61,57 @@ impl LmdbBackend { } } - pub fn insert(&self, table: String, key: u128, value: Vec) -> Result<(), StorageError> { + pub fn exists(&self, table: String, key: u128) -> Result { + let env = self.env.lock(); + let ro_txn = env.read_txn()?; + let db: heed::Database, Bytes> = env + .open_database(&ro_txn, Some(&table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; + Ok(db.get(&ro_txn, &key)?.is_some()) + } + + pub fn table_exists(&self, table: String) -> Result { + let env = self.env.lock(); + let ro_txn = env.read_txn()?; + let db = env.open_database::, Bytes>(&ro_txn, Some(&table))?; + Ok(db.is_some()) + } + + pub fn details(&self) -> String { + format!("LMDB (heed 0.20.5): {:?}", self.env.lock().info()) + } + + pub fn flush(&self) -> Result<(), StorageError> { + let env = self.env.lock(); + env.clear_stale_readers()?; + env.force_sync()?; + Ok(()) + } + + pub fn close(&self) -> Result<(), StorageError> { + self.flush()?; + Ok(()) + } +} + +impl Database for LmdbBackend { + type Key = u128; + type Value = Vec; + + fn create_table(&self, table: &str) -> Result<(), StorageError> { + let env = self.env.lock(); + let mut rw_txn = env.write_txn()?; + env.create_database::, Bytes>(&mut rw_txn, Some(table))?; + rw_txn.commit()?; + Ok(()) + } + + fn insert(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError> { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; - let db: Database, Bytes> = - env.create_database(&mut rw_txn, Some(&table))?; + let db: heed::Database, Bytes> = + env.open_database(&rw_txn, Some(table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; if db.get(&rw_txn, &key)?.is_some() { return Err(StorageError::KeyExists(key as u64)); } @@ -73,12 +120,12 @@ impl LmdbBackend { Ok(()) } - pub fn get(&self, table: String, key: u128) -> Result>, StorageError> { + fn get(&self, table: &str, key: Self::Key) -> Result, StorageError> { let env = self.env.lock(); let ro_txn = env.read_txn()?; - let db: Database, Bytes> = env - .open_database(&ro_txn, Some(&table))? - .ok_or(StorageError::TableError("Table not found".to_string()))?; + let db: heed::Database, Bytes> = + env.open_database(&ro_txn, Some(table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; let value = db.get(&ro_txn, &key)?; if let Some(v) = value { Ok(Some(v.to_vec())) @@ -87,12 +134,12 @@ impl LmdbBackend { } } - pub fn delete(&self, table: String, key: u128) -> Result<(), StorageError> { + fn delete(&self, table: &str, key: Self::Key) -> Result<(), StorageError> { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; - let db: Database, Bytes> = env - .open_database(&rw_txn, Some(&table))? - .ok_or(StorageError::TableError("Table not found".to_string()))?; + let db: heed::Database, Bytes> = + env.open_database(&rw_txn, Some(table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; if db.get(&rw_txn, &key)?.is_none() { return Err(StorageError::KeyNotFound(key as u64)); } @@ -101,12 +148,12 @@ impl LmdbBackend { Ok(()) } - pub fn update(&self, table: String, key: u128, value: Vec) -> Result<(), StorageError> { + fn update(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError> { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; - let db: Database, Bytes> = env - .open_database(&rw_txn, Some(&table))? - .ok_or(StorageError::TableError("Table not found".to_string()))?; + let db: heed::Database, Bytes> = + env.open_database(&rw_txn, Some(table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; if db.get(&rw_txn, &key)?.is_none() { return Err(StorageError::KeyNotFound(key as u64)); } @@ -115,27 +162,32 @@ impl LmdbBackend { Ok(()) } - pub fn upsert(&self, table: String, key: u128, value: Vec) -> Result { + fn upsert( + &self, + table: &str, + key: Self::Key, + value: Self::Value, + ) -> Result { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; - let db: Database, Bytes> = env - .open_database(&rw_txn, Some(&table))? - .ok_or(StorageError::TableError("Table not found".to_string()))?; + let db: heed::Database, Bytes> = + env.open_database(&rw_txn, Some(table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; db.put(&mut rw_txn, &key, &value)?; rw_txn.commit()?; Ok(true) } - pub fn batch_upsert( + fn batch_upsert( &self, - table: String, - data: Vec<(u128, Vec)>, + table: &str, + data: Vec<(Self::Key, Self::Value)>, ) -> Result<(), StorageError> { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; // Open or create the database for the given table - let db = env.create_database::, Bytes>(&mut rw_txn, Some(&table))?; + let db = env.create_database::, Bytes>(&mut rw_txn, Some(table))?; // Create a map of keys and their associated values let keymap: HashMap> = data.iter().map(|(k, v)| (*k, v)).collect(); @@ -161,34 +213,14 @@ impl LmdbBackend { Ok(()) } - pub fn exists(&self, table: String, key: u128) -> Result { - let env = self.env.lock(); - let ro_txn = env.read_txn()?; - let db: Database, Bytes> = env - .open_database(&ro_txn, Some(&table))? - .ok_or(StorageError::TableError("Table not found".to_string()))?; - Ok(db.get(&ro_txn, &key)?.is_some()) - } - - pub fn table_exists(&self, table: String) -> Result { - let env = self.env.lock(); - let ro_txn = env.read_txn()?; - let db = env.open_database::, Bytes>(&ro_txn, Some(&table))?; - Ok(db.is_some()) - } - - pub fn details(&self) -> String { - format!("LMDB (heed 0.20.5): {:?}", self.env.lock().info()) - } - - pub fn batch_insert( + fn batch_insert( &self, - table: String, - data: Vec<(u128, Vec)>, + table: &str, + data: Vec<(Self::Key, Self::Value)>, ) -> Result<(), StorageError> { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; - let db = env.create_database::, Bytes>(&mut rw_txn, Some(&table))?; + let db = env.create_database::, Bytes>(&mut rw_txn, Some(table))?; let keymap: HashMap> = data.iter().map(|(k, v)| (*k, v)).collect(); let mut sorted_keys: Vec = keymap.keys().cloned().collect(); @@ -204,16 +236,16 @@ impl LmdbBackend { Ok(()) } - pub fn batch_get( + fn batch_get( &self, - table: String, - keys: Vec, - ) -> Result>>, StorageError> { + table: &str, + keys: Vec, + ) -> Result>, StorageError> { let env = self.env.lock(); let ro_txn = env.read_txn()?; - let db: Database, Bytes> = env - .open_database(&ro_txn, Some(&table))? - .ok_or(StorageError::TableError("Table not found".to_string()))?; + let db: heed::Database, Bytes> = + env.open_database(&ro_txn, Some(table))? + .ok_or(StorageError::TableError("Table not found".to_string()))?; let mut values = Vec::new(); for key in keys { let value = db.get(&ro_txn, &key)?; @@ -225,26 +257,6 @@ impl LmdbBackend { } Ok(values) } - - pub fn flush(&self) -> Result<(), StorageError> { - let env = self.env.lock(); - env.clear_stale_readers()?; - env.force_sync()?; - Ok(()) - } - - pub fn create_table(&self, table: String) -> Result<(), StorageError> { - let env = self.env.lock(); - let mut rw_txn = env.write_txn()?; - env.create_database::, Bytes>(&mut rw_txn, Some(&table))?; - rw_txn.commit()?; - Ok(()) - } - - pub fn close(&self) -> Result<(), StorageError> { - self.flush()?; - Ok(()) - } } #[cfg(test)] @@ -266,13 +278,11 @@ mod tests { let path = tempdir().unwrap().keep(); { let backend = LmdbBackend::initialize(Some(path.clone())).unwrap(); - backend.create_table("test_table".to_string()).unwrap(); + backend.create_table("test_table").unwrap(); let key = 12345678901234567890u128; let value = vec![1, 2, 3, 4, 5]; - backend - .insert("test_table".to_string(), key, value.clone()) - .unwrap(); - let retrieved_value = backend.get("test_table".to_string(), key).unwrap(); + backend.insert("test_table", key, value.clone()).unwrap(); + let retrieved_value = backend.get("test_table", key).unwrap(); assert_eq!(retrieved_value, Some(value)); } remove_dir_all(path).unwrap(); @@ -283,16 +293,14 @@ mod tests { let path = tempdir().unwrap().keep(); { let backend = LmdbBackend::initialize(Some(path.clone())).unwrap(); - backend.create_table("test_table".to_string()).unwrap(); + backend.create_table("test_table").unwrap(); let data = vec![ (12345678901234567890u128, vec![1, 2, 3]), (12345678901234567891u128, vec![4, 5, 6]), ]; - backend - .batch_insert("test_table".to_string(), data.clone()) - .unwrap(); + backend.batch_insert("test_table", data.clone()).unwrap(); for (key, value) in data { - let retrieved_value = backend.get("test_table".to_string(), key).unwrap(); + let retrieved_value = backend.get("test_table", key).unwrap(); assert_eq!(retrieved_value, Some(value)); } } @@ -304,7 +312,7 @@ mod tests { let path = tempdir().unwrap().keep(); { let backend = LmdbBackend::initialize(Some(path.clone())).unwrap(); - backend.create_table("test_table".to_string()).unwrap(); + backend.create_table("test_table").unwrap(); let mut threads = vec![]; for thread_iter in 0..10 { let handle = std::thread::spawn({ @@ -313,9 +321,7 @@ mod tests { for iter in 0..100 { let key = hash_2_to_u128(iter, thread_iter); let value = vec![rand::random::(); 10]; - backend - .insert("test_table".to_string(), key, value) - .unwrap(); + backend.insert("test_table", key, value).unwrap(); } } }); @@ -333,14 +339,12 @@ mod tests { let path = tempdir().unwrap().keep(); { let backend = LmdbBackend::initialize(Some(path.clone())).unwrap(); - backend.create_table("test_table".to_string()).unwrap(); + backend.create_table("test_table").unwrap(); for thread_iter in 0..10 { for iter in 0..100 { let value = vec![rand::random::(); 10]; let key = hash_2_to_u128(iter, thread_iter); - backend - .insert("test_table".to_string(), key, value) - .unwrap(); + backend.insert("test_table", key, value).unwrap(); } } let mut threads = vec![]; @@ -350,7 +354,7 @@ mod tests { move || { for iter in 0..100 { let key = hash_2_to_u128(iter, thread_iter); - let _ = backend.get("test_table".to_string(), key).unwrap(); + let _ = backend.get("test_table", key).unwrap(); } } }); diff --git a/src/lib/storage/src/sqlite.rs b/src/lib/storage/src/sqlite.rs new file mode 100644 index 00000000..7ce2e5d8 --- /dev/null +++ b/src/lib/storage/src/sqlite.rs @@ -0,0 +1,314 @@ +use rusqlite::{params, params_from_iter, Connection}; +use serde::{de::DeserializeOwned, Serialize}; +use serde_json::Value; +use std::{collections::HashMap, marker::PhantomData, path::PathBuf}; + +use crate::{database::Database, errors::StorageError}; + +// TODO: Implement proper error mapping +impl From for StorageError { + fn from(err: rusqlite::Error) -> Self { + match err { + _ => StorageError::DatabaseError(err.to_string()), + } + } +} + +pub struct SqliteDatabase { + store_path: PathBuf, + _marker: PhantomData, +} + +impl SqliteDatabase { + pub fn initialize( + store_path: Option, + storage_name: &str, + ) -> Result { + let Some(checked_path) = store_path else { + return Err(StorageError::InvalidPath); + }; + if !checked_path.exists() { + std::fs::create_dir_all(&checked_path)?; + } + Ok(Self { + store_path: checked_path.join(storage_name), + _marker: PhantomData, + }) + } + + fn open_conn(&self) -> Result { + Ok(Connection::open(self.store_path.as_path())?) + } +} + +impl Database for SqliteDatabase +where + T: Serialize + DeserializeOwned, +{ + type Key = u128; + type Value = T; + + fn create_table(&self, table: &str) -> Result<(), StorageError> { + let conn = self.open_conn()?; + let sql = format!( + "CREATE TABLE IF NOT EXISTS \"{}\" (key TEXT PRIMARY KEY, value JSON NOT NULL)", + table + ); + conn.execute(&sql, [])?; + Ok(()) + } + + fn insert(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError> { + let conn = self.open_conn()?; + let json_val: Value = + serde_json::to_value(&value).map_err(|e| StorageError::DatabaseError(e.to_string()))?; + let sql = format!("INSERT INTO \"{}\" (key, value) VALUES (?1, ?2)", table); + conn.execute(&sql, params![key.to_string(), json_val])?; + Ok(()) + } + + fn get(&self, table: &str, key: Self::Key) -> Result, StorageError> { + let conn = self.open_conn()?; + let sql = format!("SELECT value FROM \"{}\" WHERE key = ?1 LIMIT 1", table); + let mut stmt = conn.prepare(&sql)?; + let mut rows = stmt.query(params![key.to_string()])?; + if let Some(row) = rows.next()? { + let json_val: Value = row.get(0)?; + let v: T = serde_json::from_value(json_val) + .map_err(|e| StorageError::DatabaseError(e.to_string()))?; + Ok(Some(v)) + } else { + Ok(None) + } + } + + fn delete(&self, table: &str, key: Self::Key) -> Result<(), StorageError> { + let conn = self.open_conn()?; + let sql = format!("DELETE FROM \"{}\" WHERE key = ?1", table); + conn.execute(&sql, params![key.to_string()])?; + Ok(()) + } + + fn update(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError> { + let conn = self.open_conn()?; + let json_val: Value = + serde_json::to_value(&value).map_err(|e| StorageError::DatabaseError(e.to_string()))?; + let sql = format!("UPDATE \"{}\" SET value = ?1 WHERE key = ?2", table); + conn.execute(&sql, params![json_val, key.to_string()])?; + Ok(()) + } + + fn upsert( + &self, + table: &str, + key: Self::Key, + value: Self::Value, + ) -> Result { + let conn = self.open_conn()?; + let json_val: Value = + serde_json::to_value(&value).map_err(|e| StorageError::DatabaseError(e.to_string()))?; + let sql = format!( + "INSERT INTO \"{t}\" (key, value) VALUES (?1, ?2) + ON CONFLICT(key) DO UPDATE SET value = excluded.value", + t = table + ); + conn.execute(&sql, params![key.to_string(), json_val])?; + Ok(true) + } + + fn batch_insert( + &self, + table: &str, + data: Vec<(Self::Key, Self::Value)>, + ) -> Result<(), StorageError> { + if data.is_empty() { + return Ok(()); + } + let mut conn = self.open_conn()?; + let tx = conn.transaction()?; + let sql = format!("INSERT INTO \"{}\" (key, value) VALUES (?1, ?2)", table); + { + let mut stmt = tx.prepare(&sql)?; + for (k, v) in data { + let json_val: Value = serde_json::to_value(&v) + .map_err(|e| StorageError::DatabaseError(e.to_string()))?; + stmt.execute(params![k.to_string(), json_val])?; + } + } + tx.commit()?; + Ok(()) + } + + fn batch_get( + &self, + table: &str, + keys: Vec, + ) -> Result>, StorageError> { + if keys.is_empty() { + return Ok(Vec::new()); + } + let conn = self.open_conn()?; + let placeholders = std::iter::repeat("?") + .take(keys.len()) + .collect::>() + .join(","); + let query_sql = format!( + "SELECT key, value FROM \"{}\" WHERE key IN ({})", + table, placeholders + ); + let mut stmt = conn.prepare(&query_sql)?; + let key_strings = keys.iter().map(|k| k.to_string()); + let mut rows = stmt.query(params_from_iter(key_strings))?; + + let mut found: HashMap = HashMap::new(); + while let Some(row) = rows.next()? { + let key_str: String = row.get(0)?; + let json_val: serde_json::Value = row.get(1)?; + found.insert(key_str, json_val); + } + + let mut result = Vec::with_capacity(keys.len()); + for k in keys { + let ks = k.to_string(); + if let Some(json_val) = found.remove(&ks) { + let v: T = serde_json::from_value(json_val) + .map_err(|e| StorageError::DatabaseError(e.to_string()))?; + result.push(Some(v)); + } else { + result.push(None); + } + } + + Ok(result) + } + + fn batch_upsert( + &self, + table: &str, + data: Vec<(Self::Key, Self::Value)>, + ) -> Result<(), StorageError> { + if data.is_empty() { + return Ok(()); + } + let mut conn = self.open_conn()?; + let tx = conn.transaction()?; + let sql = format!( + "INSERT INTO \"{t}\" (key, value) VALUES (?1, ?2) + ON CONFLICT(key) DO UPDATE SET value = excluded.value", + t = table + ); + { + let mut stmt = tx.prepare(&sql)?; + for (k, v) in data { + let json_val: Value = serde_json::to_value(&v) + .map_err(|e| StorageError::DatabaseError(e.to_string()))?; + stmt.execute(params![k.to_string(), json_val])?; + } + } + tx.commit()?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::fs::remove_dir_all; + + use super::*; + use serde::Deserialize; + use tempfile::tempdir; + + #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] + struct TestData { + id: u32, + name: String, + } + + fn setup_db() -> (SqliteDatabase, String, tempfile::TempDir) { + let dir = tempdir().unwrap(); + let db_path = dir.path().join("test.db"); + let db: SqliteDatabase = + SqliteDatabase::initialize(Some(db_path.clone()), "test.db").unwrap(); + let table = "test_table".to_string(); + db.create_table(&table).unwrap(); + (db, table, dir) + } + + #[test] + fn test_insert_and_get() { + let (db, table, db_path) = setup_db(); + + let data1 = TestData { + id: 1, + name: "Alice".into(), + }; + let data2 = TestData { + id: 2, + name: "Bob".into(), + }; + + db.insert(&table, 1001, data1.clone()).unwrap(); + db.insert(&table, 1002, data2.clone()).unwrap(); + + assert_eq!(db.get(&table, 1001).unwrap(), Some(data1)); + assert_eq!(db.get(&table, 1002).unwrap(), Some(data2)); + assert_eq!(db.get(&table, 9999).unwrap(), None); + + remove_dir_all(db_path).unwrap(); + } + + #[test] + fn test_update() { + let (db, table, db_path) = setup_db(); + + let data = TestData { + id: 1, + name: "Alice".into(), + }; + db.insert(&table, 1001, data.clone()).unwrap(); + + let updated = TestData { + id: 1, + name: "Alice Updated".into(), + }; + db.update(&table, 1001, updated.clone()).unwrap(); + + assert_eq!(db.get(&table, 1001).unwrap(), Some(updated)); + remove_dir_all(db_path).unwrap(); + } + + #[test] + fn test_upsert() { + let (db, table, db_path) = setup_db(); + + let data = TestData { + id: 1, + name: "Alice".into(), + }; + db.upsert(&table, 1001, data.clone()).unwrap(); + assert_eq!(db.get(&table, 1001).unwrap(), Some(data.clone())); + + let updated = TestData { + id: 1, + name: "Alice Upserted".into(), + }; + db.upsert(&table, 1001, updated.clone()).unwrap(); + assert_eq!(db.get(&table, 1001).unwrap(), Some(updated)); + remove_dir_all(db_path).unwrap(); + } + + #[test] + fn test_delete() { + let (db, table, db_path) = setup_db(); + + let data = TestData { + id: 1, + name: "Alice".into(), + }; + db.insert(&table, 1001, data.clone()).unwrap(); + + db.delete(&table, 1001).unwrap(); + assert_eq!(db.get(&table, 1001).unwrap(), None); + remove_dir_all(db_path).unwrap(); + } +} diff --git a/src/lib/world/src/db_functions.rs b/src/lib/world/src/db_functions.rs index c2af0de2..c62866d1 100644 --- a/src/lib/world/src/db_functions.rs +++ b/src/lib/world/src/db_functions.rs @@ -5,6 +5,7 @@ use crate::errors::WorldError::CorruptedChunkData; use crate::warn; use crate::World; use ferrumc_config::server_config::get_global_config; +use ferrumc_storage::database::Database; use std::hash::Hasher; use std::sync::Arc; use tracing::trace; @@ -119,7 +120,7 @@ impl World { pub(crate) fn save_chunk_internal(world: &World, chunk: &Chunk) -> Result<(), WorldError> { if !world.storage_backend.table_exists("chunks".to_string())? { - world.storage_backend.create_table("chunks".to_string())?; + world.storage_backend.create_table("chunks")?; } let as_bytes = yazi::compress( &bitcode::encode(chunk), @@ -127,9 +128,7 @@ pub(crate) fn save_chunk_internal(world: &World, chunk: &Chunk) -> Result<(), Wo CompressionLevel::BestSpeed, )?; let digest = create_key(chunk.dimension.as_str(), chunk.x, chunk.z); - world - .storage_backend - .upsert("chunks".to_string(), digest, as_bytes)?; + world.storage_backend.upsert("chunks", digest, as_bytes)?; Ok(()) } @@ -140,7 +139,7 @@ pub(crate) fn load_chunk_internal( dimension: &str, ) -> Result { let digest = create_key(dimension, x, z); - match world.storage_backend.get("chunks".to_string(), digest)? { + match world.storage_backend.get("chunks", digest)? { Some(compressed) => { let (data, checksum) = yazi::decompress(compressed.as_slice(), yazi::Format::Zlib)?; if get_global_config().database.verify_chunk_data { @@ -171,7 +170,7 @@ pub(crate) fn load_chunk_batch_internal( .collect(); world .storage_backend - .batch_get("chunks".to_string(), digests)? + .batch_get("chunks", digests)? .iter() .map(|chunk| match chunk { Some(compressed) => { @@ -215,7 +214,7 @@ pub(crate) fn delete_chunk_internal( dimension: &str, ) -> Result<(), WorldError> { let digest = create_key(dimension, x, z); - world.storage_backend.delete("chunks".to_string(), digest)?; + world.storage_backend.delete("chunks", digest)?; Ok(()) } diff --git a/src/lib/world/src/importing.rs b/src/lib/world/src/importing.rs index 2f37cbdc..a64e79b8 100644 --- a/src/lib/world/src/importing.rs +++ b/src/lib/world/src/importing.rs @@ -2,6 +2,7 @@ use crate::errors::WorldError; use crate::vanilla_chunk_format::VanillaChunk; use crate::World; use ferrumc_anvil::load_anvil_file; +use ferrumc_storage::database::Database; use ferrumc_threadpool::ThreadPool; use indicatif::{ProgressBar, ProgressStyle}; use rayon::prelude::*; @@ -51,7 +52,7 @@ impl World { progress.set_message("Setting up database and preparing import..."); - self.storage_backend.create_table("chunks".to_string())?; + self.storage_backend.create_table("chunks")?; let start = std::time::Instant::now(); From ed2e35387980878617cedd38c9a98b2b332ec7fc Mon Sep 17 00:00:00 2001 From: unknown Date: Sun, 24 Aug 2025 17:10:08 -0300 Subject: [PATCH 05/11] fix: cargo clippy --- src/lib/storage/src/sqlite.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/lib/storage/src/sqlite.rs b/src/lib/storage/src/sqlite.rs index 7ce2e5d8..c9cfa3e2 100644 --- a/src/lib/storage/src/sqlite.rs +++ b/src/lib/storage/src/sqlite.rs @@ -8,9 +8,7 @@ use crate::{database::Database, errors::StorageError}; // TODO: Implement proper error mapping impl From for StorageError { fn from(err: rusqlite::Error) -> Self { - match err { - _ => StorageError::DatabaseError(err.to_string()), - } + StorageError::DatabaseError(err.to_string()) } } @@ -148,8 +146,7 @@ where return Ok(Vec::new()); } let conn = self.open_conn()?; - let placeholders = std::iter::repeat("?") - .take(keys.len()) + let placeholders = std::iter::repeat_n("?", keys.len()) .collect::>() .join(","); let query_sql = format!( From 67a9bf56bf3cc55de4b7fec80a1e0269db9c8625 Mon Sep 17 00:00:00 2001 From: unknown Date: Sun, 24 Aug 2025 17:12:21 -0300 Subject: [PATCH 06/11] fix: benches test of storage --- src/lib/storage/src/benches/db.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/lib/storage/src/benches/db.rs b/src/lib/storage/src/benches/db.rs index a6d444e6..45c28c31 100644 --- a/src/lib/storage/src/benches/db.rs +++ b/src/lib/storage/src/benches/db.rs @@ -1,4 +1,4 @@ -use ferrumc_storage::lmdb::LmdbBackend; +use ferrumc_storage::{database::Database, lmdb::LmdbBackend}; use rand::Rng; use std::collections::HashSet; @@ -30,14 +30,14 @@ pub(crate) fn db_benches(c: &mut criterion::Criterion) { let db = LmdbBackend::initialize(Some(tempdir)).unwrap(); - db.create_table("insert_test".to_string()).unwrap(); + db.create_table("insert_test").unwrap(); let mut insert_group = c.benchmark_group("Insert"); insert_group.bench_function("512b".to_string(), |b| { b.iter(|| { db.insert( - "insert_test".to_string(), + "insert_test", generate_random_key(&mut used_keys), generate_random_data(512), ) @@ -48,7 +48,7 @@ pub(crate) fn db_benches(c: &mut criterion::Criterion) { insert_group.bench_function("1kb".to_string(), |b| { b.iter(|| { db.insert( - "insert_test".to_string(), + "insert_test", generate_random_key(&mut used_keys), generate_random_data(1024), ) @@ -59,7 +59,7 @@ pub(crate) fn db_benches(c: &mut criterion::Criterion) { insert_group.bench_function("4kb".to_string(), |b| { b.iter(|| { db.insert( - "insert_test".to_string(), + "insert_test", generate_random_key(&mut used_keys), generate_random_data(4096), ) @@ -71,20 +71,20 @@ pub(crate) fn db_benches(c: &mut criterion::Criterion) { let mut read_group = c.benchmark_group("Read"); - db.create_table("read_test".to_string()).unwrap(); + db.create_table("read_test").unwrap(); let keys_512b = (0..1000) .map(|_| generate_random_key(&mut used_keys)) .collect::>(); for key in keys_512b.iter() { - db.insert("read_test".to_string(), *key, generate_random_data(512)) + db.insert("read_test", *key, generate_random_data(512)) .unwrap(); } read_group.bench_function("512b".to_string(), |b| { b.iter(|| { - db.get("read_test".to_string(), select_random(keys_512b.clone())) + db.get("read_test", select_random(keys_512b.clone())) .unwrap(); }) }); @@ -94,13 +94,13 @@ pub(crate) fn db_benches(c: &mut criterion::Criterion) { .collect::>(); for key in keys_1kb.iter() { - db.insert("read_test".to_string(), *key, generate_random_data(1024)) + db.insert("read_test", *key, generate_random_data(1024)) .unwrap(); } read_group.bench_function("1kb".to_string(), |b| { b.iter(|| { - db.get("read_test".to_string(), select_random(keys_1kb.clone())) + db.get("read_test", select_random(keys_1kb.clone())) .unwrap(); }) }); @@ -110,13 +110,13 @@ pub(crate) fn db_benches(c: &mut criterion::Criterion) { .collect::>(); for key in keys_4kb.iter() { - db.insert("read_test".to_string(), *key, generate_random_data(4096)) + db.insert("read_test", *key, generate_random_data(4096)) .unwrap(); } read_group.bench_function("4kb".to_string(), |b| { b.iter(|| { - db.get("read_test".to_string(), select_random(keys_4kb.clone())) + db.get("read_test", select_random(keys_4kb.clone())) .unwrap(); }) }); From 4fff8d35a1f6c1edb0b80dd3b04b47a90f33b93a Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 13 Sep 2025 13:46:57 -0300 Subject: [PATCH 07/11] chore: changed to sqlite --- Cargo.toml | 12 ++- src/bin/Cargo.toml | 1 - src/bin/src/main.rs | 2 + .../play_packets/player_loaded.rs | 14 +-- src/lib/core/src/data/player.rs | 10 +-- src/lib/core/src/transform/position.rs | 2 +- src/lib/core/state/Cargo.toml | 1 + src/lib/core/state/src/lib.rs | 3 + src/lib/core/state/src/player_state.rs | 22 +++++ src/lib/player_state/Cargo.toml | 37 -------- src/lib/player_state/src/data.rs | 13 --- src/lib/player_state/src/errors.rs | 39 --------- src/lib/player_state/src/lib.rs | 2 - src/lib/player_state/src/storage.rs | 47 ---------- src/lib/storage/src/database.rs | 2 +- src/lib/storage/src/lmdb.rs | 2 +- src/lib/storage/src/sqlite.rs | 85 +++++++++++++------ src/lib/world/Cargo.toml | 2 +- src/lib/world/src/db_functions.rs | 2 +- src/lib/world/src/errors.rs | 22 +++++ src/lib/world/src/lib.rs | 15 ++-- src/lib/world/src/player_state.rs | 23 +++++ 22 files changed, 166 insertions(+), 192 deletions(-) create mode 100644 src/lib/core/state/src/player_state.rs delete mode 100644 src/lib/player_state/Cargo.toml delete mode 100644 src/lib/player_state/src/data.rs delete mode 100644 src/lib/player_state/src/errors.rs delete mode 100644 src/lib/player_state/src/lib.rs delete mode 100644 src/lib/player_state/src/storage.rs create mode 100644 src/lib/world/src/player_state.rs diff --git a/Cargo.toml b/Cargo.toml index 3d0394e8..d896d95f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,7 +36,6 @@ members = [ "src/lib/inventories", "src/lib/registry", "src/lib/scheduler", - "src/lib/player_state", ] #================== Lints ==================# @@ -106,7 +105,6 @@ ferrumc-scheduler = { path = "src/lib/scheduler" } ferrumc-state = { path = "src/lib/core/state" } ferrumc-storage = { path = "src/lib/storage" } ferrumc-text = { path = "src/lib/text" } -ferrumc-playerstate = { path = "src/lib/player_state" } ferrumc-threadpool = { path = "src/lib/utils/threadpool" } ferrumc-utils = { path = "src/lib/utils" } ferrumc-world = { path = "src/lib/world" } @@ -114,7 +112,15 @@ ferrumc-world-gen = { path = "src/lib/world_gen" } ferrumc-inventories = { path = "src/lib/inventories" } # Asynchronous -tokio = { version = "1.47.1", features = ["macros", "net", "rt", "sync", "time", "io-util", "test-util"], default-features = false } +tokio = { version = "1.47.1", features = [ + "macros", + "net", + "rt", + "sync", + "time", + "io-util", + "test-util", +], default-features = false } # Logging tracing = "0.1.41" diff --git a/src/bin/Cargo.toml b/src/bin/Cargo.toml index b3048dc2..c7c75772 100644 --- a/src/bin/Cargo.toml +++ b/src/bin/Cargo.toml @@ -33,7 +33,6 @@ ferrumc-threadpool = { workspace = true } ferrumc-inventories = { workspace = true } once_cell = { workspace = true } serde_json = { workspace = true } -ferrumc-playerstate = { workspace = true } tracing = { workspace = true } clap = { workspace = true, features = ["derive", "env"] } diff --git a/src/bin/src/main.rs b/src/bin/src/main.rs index 38731ff0..569591bf 100644 --- a/src/bin/src/main.rs +++ b/src/bin/src/main.rs @@ -6,6 +6,7 @@ use ferrumc_config::server_config::get_global_config; use ferrumc_config::whitelist::create_whitelist; use ferrumc_general_purpose::paths::get_root_path; use ferrumc_state::player_list::PlayerList; +use ferrumc_state::player_state::PlayerState; use ferrumc_state::{GlobalState, ServerState}; use ferrumc_threadpool::ThreadPool; use ferrumc_world::World; @@ -159,6 +160,7 @@ fn create_state(start_time: Instant) -> Result { world: World::new(&get_global_config().database.db_path), terrain_generator: WorldGenerator::new(0), shut_down: false.into(), + player_state: PlayerState::default(), players: PlayerList::default(), thread_pool: ThreadPool::new(), start_time, diff --git a/src/bin/src/packet_handlers/play_packets/player_loaded.rs b/src/bin/src/packet_handlers/play_packets/player_loaded.rs index e182a151..09fed5a1 100644 --- a/src/bin/src/packet_handlers/play_packets/player_loaded.rs +++ b/src/bin/src/packet_handlers/play_packets/player_loaded.rs @@ -29,7 +29,6 @@ pub fn handle( // Default player data *player_data = PlayerData::new( - player_identity.uuid.as_u128(), Position::default(), "overworld", ); @@ -38,7 +37,6 @@ pub fn handle( if let Ok(loaded) = state .0 .world - .players_state .load_player_state(player_identity.uuid.as_u128()) { match loaded { @@ -46,7 +44,7 @@ pub fn handle( *player_data = loaded_data; tracing::info!( "Loaded player state for {}: position=({}, {}, {}), dimension={}", - player_data.uuid, + player_identity.uuid.as_u128(), player_data.pos.x, player_data.pos.y, player_data.pos.z, @@ -54,19 +52,21 @@ pub fn handle( ); } None => { - if let Err(e) = state.0.world.players_state.save_player_state(&player_data) { + if let Err(e) = state.0.world.save_player_state(player_identity.uuid.as_u128(), &player_data) { tracing::error!( - "Failed to save player state for {}: {:?}", + "Failed to save player state for {} ({}): {:?}", player_identity.username, + player_identity.uuid.as_u128(), e ); } } } - } else if let Err(e) = state.0.world.players_state.save_player_state(&player_data) { + } else if let Err(e) = state.0.world.save_player_state(player_identity.uuid.as_u128(), &player_data) { tracing::error!( - "Failed to save player state for {}: {:?}", + "Failed to save player state for {} ({}): {:?}", player_identity.username, + player_identity.uuid.as_u128(), e ); } diff --git a/src/lib/core/src/data/player.rs b/src/lib/core/src/data/player.rs index 2915acd3..a5b66632 100644 --- a/src/lib/core/src/data/player.rs +++ b/src/lib/core/src/data/player.rs @@ -5,23 +5,23 @@ use serde::{Deserialize, Serialize}; use crate::transform::position::Position; // https://minecraft.fandom.com/wiki/Player.dat_format -#[derive(Serialize, Deserialize, Debug, Encode, Decode, Component, typename::TypeName)] +#[derive( + Serialize, Deserialize, Clone, Debug, Encode, Decode, Component, typename::TypeName, PartialEq, +)] pub struct PlayerData { - pub uuid: u128, pub pos: Position, pub dimension: String, } impl Default for PlayerData { fn default() -> Self { - Self::new(0, Position::default(), "overworld") + Self::new(Position::default(), "overworld") } } impl PlayerData { - pub fn new(uuid: u128, pos: Position, dimension: &str) -> Self { + pub fn new(pos: Position, dimension: &str) -> Self { Self { - uuid, pos, dimension: dimension.to_string(), } diff --git a/src/lib/core/src/transform/position.rs b/src/lib/core/src/transform/position.rs index 2ac8ed5b..642b24d6 100644 --- a/src/lib/core/src/transform/position.rs +++ b/src/lib/core/src/transform/position.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Display, Formatter}; use typename::TypeName; -#[derive(TypeName, Component, Serialize, Deserialize, Encode, Decode)] +#[derive(TypeName, Component, Serialize, Deserialize, Encode, Decode, Clone, PartialEq)] pub struct Position { pub x: f64, pub y: f64, diff --git a/src/lib/core/state/Cargo.toml b/src/lib/core/state/Cargo.toml index 19324db6..f53540c2 100644 --- a/src/lib/core/state/Cargo.toml +++ b/src/lib/core/state/Cargo.toml @@ -10,3 +10,4 @@ ferrumc-world-gen = { workspace = true } dashmap = { workspace = true } ferrumc-threadpool = { workspace = true } crossbeam-queue = { workspace = true } +ferrumc-core = { workspace = true } diff --git a/src/lib/core/state/src/lib.rs b/src/lib/core/state/src/lib.rs index e319f468..9f25d685 100644 --- a/src/lib/core/state/src/lib.rs +++ b/src/lib/core/state/src/lib.rs @@ -1,6 +1,8 @@ pub mod player_list; +pub mod player_state; use crate::player_list::PlayerList; +use crate::player_state::PlayerState; use bevy_ecs::prelude::Resource; use ferrumc_threadpool::ThreadPool; use ferrumc_world::World; @@ -12,6 +14,7 @@ use std::time::Instant; pub struct ServerState { pub world: World, pub terrain_generator: WorldGenerator, + pub player_state: PlayerState, pub shut_down: AtomicBool, pub players: PlayerList, // (UUID, Username) pub thread_pool: ThreadPool, diff --git a/src/lib/core/state/src/player_state.rs b/src/lib/core/state/src/player_state.rs new file mode 100644 index 00000000..41ab86c2 --- /dev/null +++ b/src/lib/core/state/src/player_state.rs @@ -0,0 +1,22 @@ +use bevy_ecs::entity::Entity; +use dashmap::DashMap; +use ferrumc_core::data::player::PlayerData; + +#[derive(Debug, Default)] +pub struct PlayerState { + player_data: DashMap, +} + +impl PlayerState { + pub fn is_connected(&self, entity: Entity) -> bool { + self.player_data.contains_key(&entity) + } + + pub fn disconnect(&self, entity: Entity) { + self.player_data.remove(&entity); + } + + pub fn connect(&self, entity: Entity, data: PlayerData) { + self.player_data.insert(entity, data); + } +} diff --git a/src/lib/player_state/Cargo.toml b/src/lib/player_state/Cargo.toml deleted file mode 100644 index 44ce01c2..00000000 --- a/src/lib/player_state/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "ferrumc-playerstate" -version = "0.1.0" -edition = "2024" - -[dependencies] -thiserror = { workspace = true } -ferrumc-storage = { workspace = true } -ferrumc-config = { workspace = true } -ferrumc-core = { workspace = true } -tracing = { workspace = true } -ferrumc-net-codec = { workspace = true } -serde = { workspace = true } -serde_derive = { workspace = true } -macro_rules_attribute = { workspace = true } -bitcode_derive = { workspace = true } -bitcode = { workspace = true } -deepsize = { workspace = true } -ferrumc-nbt = { workspace = true } -ferrumc-macros = { workspace = true } -ferrumc-anvil = { workspace = true } -rayon = { workspace = true } -ferrumc-general-purpose = { workspace = true } -lazy_static = { workspace = true } -bzip2 = { workspace = true } -serde_json = { workspace = true } -indicatif = { workspace = true } -wyhash = { workspace = true } -moka = { workspace = true, features = ["sync"] } -ahash = { workspace = true } -rand = { workspace = true } -yazi = { workspace = true } -ferrumc-threadpool = { workspace = true } -lz4_flex = { workspace = true } - -[lints] -workspace = true diff --git a/src/lib/player_state/src/data.rs b/src/lib/player_state/src/data.rs deleted file mode 100644 index df701ac7..00000000 --- a/src/lib/player_state/src/data.rs +++ /dev/null @@ -1,13 +0,0 @@ -// use bitcode::{Decode, Encode}; -// use serde::{Deserialize, Serialize}; - -// // https://minecraft.fandom.com/wiki/Player.dat_format -// #[derive(Serialize, Deserialize, Debug, Encode, Decode)] -// pub struct PlayerData { -// pub uuid: u128, -// pub username: String, -// pub x: f64, -// pub y: f64, -// pub z: f64, -// pub dimension: String, -// } diff --git a/src/lib/player_state/src/errors.rs b/src/lib/player_state/src/errors.rs deleted file mode 100644 index 4250a446..00000000 --- a/src/lib/player_state/src/errors.rs +++ /dev/null @@ -1,39 +0,0 @@ -use ferrumc_storage::errors::StorageError; -use thiserror::Error; -use yazi::Error; - -#[derive(Debug, Error)] -pub enum PlayerDataError { - #[error("Compression error: {0}")] - CompressionError(String), - #[error("A database error occurred from the playerstate crate: {0}")] - DatabaseError(StorageError), - #[error("Some kind of IO error occurred: {0}")] - GenericIOError(String), -} - -impl From for PlayerDataError { - fn from(err: StorageError) -> Self { - PlayerDataError::DatabaseError(err) - } -} - -impl From for PlayerDataError { - fn from(e: yazi::Error) -> Self { - match e { - Error::Underflow => { - PlayerDataError::CompressionError("Underflow error during compression".to_string()) - } - Error::InvalidBitstream => PlayerDataError::CompressionError( - "Invalid bitstream error during compression".to_string(), - ), - Error::Overflow => { - PlayerDataError::CompressionError("Overflow error during compression".to_string()) - } - Error::Finished => { - PlayerDataError::CompressionError("Finished error during compression".to_string()) - } - Error::Io(io_err) => PlayerDataError::GenericIOError(io_err.to_string()), - } - } -} diff --git a/src/lib/player_state/src/lib.rs b/src/lib/player_state/src/lib.rs deleted file mode 100644 index dca9fe7f..00000000 --- a/src/lib/player_state/src/lib.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod errors; -pub mod storage; diff --git a/src/lib/player_state/src/storage.rs b/src/lib/player_state/src/storage.rs deleted file mode 100644 index 0f9a4f31..00000000 --- a/src/lib/player_state/src/storage.rs +++ /dev/null @@ -1,47 +0,0 @@ -use std::sync::Arc; - -use ferrumc_core::data::player::PlayerData; -use ferrumc_storage::{database::Database, errors::StorageError, lmdb::LmdbBackend}; -use yazi::CompressionLevel; - -use crate::errors::PlayerDataError; - -// Table name for player state data in LMDB -const TABLE_NAME: &str = "playerdata"; - -#[derive(Clone)] -pub struct PlayerStateStorage { - storage_backend: Arc, -} - -impl PlayerStateStorage { - pub fn new(storage_backend: Arc) -> Self { - Self { storage_backend } - } - - pub fn save_player_state(&self, state: &PlayerData) -> Result<(), PlayerDataError> { - if !self.storage_backend.table_exists(TABLE_NAME.to_string())? { - self.storage_backend.create_table(TABLE_NAME)?; - } - let key = state.uuid; - - let encoded = yazi::compress( - &bitcode::encode(state), - yazi::Format::Zlib, - CompressionLevel::BestSpeed, - )?; - self.storage_backend.upsert(TABLE_NAME, key, encoded)?; - Ok(()) - } - - pub fn load_player_state(&self, key: u128) -> Result, PlayerDataError> { - if let Some(bytes) = self.storage_backend.get(TABLE_NAME, key)? { - let (data, _) = yazi::decompress(bytes.as_slice(), yazi::Format::Zlib)?; - let player: PlayerData = bitcode::decode(&data) - .map_err(|_| StorageError::ReadError("Failed to decode PlayerState".into()))?; - Ok(Some(player)) - } else { - Ok(None) - } - } -} diff --git a/src/lib/storage/src/database.rs b/src/lib/storage/src/database.rs index 86fa77fa..5ea3d96b 100644 --- a/src/lib/storage/src/database.rs +++ b/src/lib/storage/src/database.rs @@ -9,7 +9,7 @@ pub trait Database { fn get(&self, table: &str, key: Self::Key) -> Result, StorageError>; fn delete(&self, table: &str, key: Self::Key) -> Result<(), StorageError>; fn update(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError>; - fn upsert(&self, table: &str, key: Self::Key, value: Self::Value) + fn upsert(&self, table: &str, key: Self::Key, value: &Self::Value) -> Result; fn batch_insert( &self, diff --git a/src/lib/storage/src/lmdb.rs b/src/lib/storage/src/lmdb.rs index b1dc9cba..d6c86ea0 100644 --- a/src/lib/storage/src/lmdb.rs +++ b/src/lib/storage/src/lmdb.rs @@ -166,7 +166,7 @@ impl Database for LmdbBackend { &self, table: &str, key: Self::Key, - value: Self::Value, + value: &Self::Value, ) -> Result { let env = self.env.lock(); let mut rw_txn = env.write_txn()?; diff --git a/src/lib/storage/src/sqlite.rs b/src/lib/storage/src/sqlite.rs index c9cfa3e2..44b4799a 100644 --- a/src/lib/storage/src/sqlite.rs +++ b/src/lib/storage/src/sqlite.rs @@ -12,6 +12,7 @@ impl From for StorageError { } } +#[derive(Debug, Clone)] pub struct SqliteDatabase { store_path: PathBuf, _marker: PhantomData, @@ -28,8 +29,9 @@ impl SqliteDatabase { if !checked_path.exists() { std::fs::create_dir_all(&checked_path)?; } + let checked_path = checked_path.join(storage_name); Ok(Self { - store_path: checked_path.join(storage_name), + store_path: checked_path, _marker: PhantomData, }) } @@ -100,7 +102,7 @@ where &self, table: &str, key: Self::Key, - value: Self::Value, + value: &Self::Value, ) -> Result { let conn = self.open_conn()?; let json_val: Value = @@ -215,17 +217,24 @@ mod tests { use serde::Deserialize; use tempfile::tempdir; - #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] + #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] struct TestData { - id: u32, - name: String, + pub pos: Position, + pub dimension: String, + } + + #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] + pub struct Position { + pub x: f64, + pub y: f64, + pub z: f64, } fn setup_db() -> (SqliteDatabase, String, tempfile::TempDir) { let dir = tempdir().unwrap(); - let db_path = dir.path().join("test.db"); + let db_path = dir.path(); let db: SqliteDatabase = - SqliteDatabase::initialize(Some(db_path.clone()), "test.db").unwrap(); + SqliteDatabase::initialize(Some(PathBuf::from(db_path)), "test.db").unwrap(); let table = "test_table".to_string(); db.create_table(&table).unwrap(); (db, table, dir) @@ -234,14 +243,19 @@ mod tests { #[test] fn test_insert_and_get() { let (db, table, db_path) = setup_db(); + let pos = Position { + x: 0.0, + y: 64.0, + z: 0.0, + }; let data1 = TestData { - id: 1, - name: "Alice".into(), + dimension: "Nether".into(), + pos: pos.clone(), }; let data2 = TestData { - id: 2, - name: "Bob".into(), + dimension: "Overworld".into(), + pos: pos.clone(), }; db.insert(&table, 1001, data1.clone()).unwrap(); @@ -258,16 +272,21 @@ mod tests { fn test_update() { let (db, table, db_path) = setup_db(); + let pos = Position { + x: 0.0, + y: 64.0, + z: 0.0, + }; let data = TestData { - id: 1, - name: "Alice".into(), + dimension: "Nether".into(), + pos: pos.clone(), }; - db.insert(&table, 1001, data.clone()).unwrap(); - let updated = TestData { - id: 1, - name: "Alice Updated".into(), + dimension: "Overworld".into(), + pos: pos.clone(), }; + db.insert(&table, 1001, data.clone()).unwrap(); + db.update(&table, 1001, updated.clone()).unwrap(); assert_eq!(db.get(&table, 1001).unwrap(), Some(updated)); @@ -278,18 +297,23 @@ mod tests { fn test_upsert() { let (db, table, db_path) = setup_db(); + let pos = Position { + x: 0.0, + y: 64.0, + z: 0.0, + }; let data = TestData { - id: 1, - name: "Alice".into(), + dimension: "Nether".into(), + pos: pos.clone(), }; - db.upsert(&table, 1001, data.clone()).unwrap(); - assert_eq!(db.get(&table, 1001).unwrap(), Some(data.clone())); - let updated = TestData { - id: 1, - name: "Alice Upserted".into(), + dimension: "Overworld".into(), + pos: pos.clone(), }; - db.upsert(&table, 1001, updated.clone()).unwrap(); + db.upsert(&table, 1001, &data.clone()).unwrap(); + assert_eq!(db.get(&table, 1001).unwrap(), Some(data.clone())); + + db.upsert(&table, 1001, &updated.clone()).unwrap(); assert_eq!(db.get(&table, 1001).unwrap(), Some(updated)); remove_dir_all(db_path).unwrap(); } @@ -298,11 +322,16 @@ mod tests { fn test_delete() { let (db, table, db_path) = setup_db(); + let pos = Position { + x: 0.0, + y: 64.0, + z: 0.0, + }; let data = TestData { - id: 1, - name: "Alice".into(), + dimension: "Nether".into(), + pos: pos.clone(), }; - db.insert(&table, 1001, data.clone()).unwrap(); + db.insert(&table, 1001, data).unwrap(); db.delete(&table, 1001).unwrap(); assert_eq!(db.get(&table, 1001).unwrap(), None); diff --git a/src/lib/world/Cargo.toml b/src/lib/world/Cargo.toml index 9a35e44b..df544d92 100644 --- a/src/lib/world/Cargo.toml +++ b/src/lib/world/Cargo.toml @@ -32,7 +32,7 @@ rand = { workspace = true } yazi = { workspace = true } ferrumc-threadpool = { workspace = true } lz4_flex = { workspace = true } -ferrumc-playerstate = { workspace = true } +ferrumc-core = { workspace = true } [[bench]] name = "world_bench" diff --git a/src/lib/world/src/db_functions.rs b/src/lib/world/src/db_functions.rs index c62866d1..a46bbf48 100644 --- a/src/lib/world/src/db_functions.rs +++ b/src/lib/world/src/db_functions.rs @@ -128,7 +128,7 @@ pub(crate) fn save_chunk_internal(world: &World, chunk: &Chunk) -> Result<(), Wo CompressionLevel::BestSpeed, )?; let digest = create_key(chunk.dimension.as_str(), chunk.x, chunk.z); - world.storage_backend.upsert("chunks", digest, as_bytes)?; + world.storage_backend.upsert("chunks", digest, &as_bytes)?; Ok(()) } diff --git a/src/lib/world/src/errors.rs b/src/lib/world/src/errors.rs index eb096648..2d5ab835 100644 --- a/src/lib/world/src/errors.rs +++ b/src/lib/world/src/errors.rs @@ -36,6 +36,8 @@ pub enum WorldError { ChunkNotFound, #[error("Anvil Decode Error: {0}")] AnvilDecodeError(AnvilError), + #[error("Player Data Error: {0}")] + PlayerDataError(PlayerDataError), #[error("Missing block mapping: {0}")] MissingBlockMapping(BlockData), #[error("Invalid memory map size: {0}")] @@ -105,3 +107,23 @@ impl From for WorldError { } } } + +impl From for WorldError { + fn from(e: PlayerDataError) -> Self { + WorldError::PlayerDataError(e) + } +} + +#[derive(Debug, Error)] +pub enum PlayerDataError { + #[error("A database error occurred from the playerstate crate: {0}")] + DatabaseError(StorageError), + #[error("Some kind of IO error occurred: {0}")] + GenericIOError(String), +} + +impl From for PlayerDataError { + fn from(err: StorageError) -> Self { + PlayerDataError::DatabaseError(err) + } +} diff --git a/src/lib/world/src/lib.rs b/src/lib/world/src/lib.rs index 795e729a..7306f5a3 100644 --- a/src/lib/world/src/lib.rs +++ b/src/lib/world/src/lib.rs @@ -5,15 +5,17 @@ pub mod edit_batch; pub mod edits; pub mod errors; mod importing; +mod player_state; pub mod vanilla_chunk_format; use crate::chunk_format::Chunk; use crate::errors::WorldError; use deepsize::DeepSizeOf; use ferrumc_config::server_config::get_global_config; +use ferrumc_core::data::player::PlayerData; use ferrumc_general_purpose::paths::get_root_path; -use ferrumc_playerstate::storage::PlayerStateStorage; use ferrumc_storage::lmdb::LmdbBackend; +use ferrumc_storage::sqlite::SqliteDatabase; use moka::sync::Cache; use std::fs::create_dir_all; use std::path::{Path, PathBuf}; @@ -25,8 +27,8 @@ use tracing::{error, trace, warn}; #[derive(Clone)] pub struct World { storage_backend: Arc, + player_state_backend: SqliteDatabase, cache: Cache<(i32, i32, String), Arc>, - pub players_state: PlayerStateStorage, } fn check_config_validity() -> Result<(), WorldError> { @@ -91,9 +93,12 @@ impl World { backend_path = get_root_path().join(backend_path); } let storage_backend = Arc::new( - LmdbBackend::initialize(Some(backend_path)).expect("Failed to initialize database"), + LmdbBackend::initialize(Some(backend_path.clone())) + .expect("Failed to initialize database"), ); - let players_state = PlayerStateStorage::new(Arc::clone(&storage_backend)); + + let player_state_backend = SqliteDatabase::initialize(Some(backend_path), "playerlist.db") + .expect("Failed to initialize storage backend"); if get_global_config().database.cache_ttl != 0 && get_global_config().database.cache_capacity == 0 @@ -116,7 +121,7 @@ impl World { World { storage_backend, cache, - players_state, + player_state_backend, } } } diff --git a/src/lib/world/src/player_state.rs b/src/lib/world/src/player_state.rs new file mode 100644 index 00000000..119acb69 --- /dev/null +++ b/src/lib/world/src/player_state.rs @@ -0,0 +1,23 @@ +use ferrumc_core::data::player::PlayerData; +use ferrumc_storage::database::Database; + +use crate::{errors::PlayerDataError, World}; + +// Table name for player state data in SQLite +const TABLE_NAME: &str = "playerdata"; + +impl World { + pub fn save_player_state(&self, key: u128, state: &PlayerData) -> Result<(), PlayerDataError> { + self.player_state_backend.create_table(TABLE_NAME)?; + self.player_state_backend.upsert(TABLE_NAME, key, state)?; + Ok(()) + } + + pub fn load_player_state(&self, key: u128) -> Result, PlayerDataError> { + if let Some(player) = self.player_state_backend.get(TABLE_NAME, key)? { + Ok(Some(player)) + } else { + Ok(None) + } + } +} From d9b0bfa8d33fc126845002901bb513390cab662d Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 13 Sep 2025 13:59:51 -0300 Subject: [PATCH 08/11] fix: cargo audit error --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index d896d95f..895c501f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -124,7 +124,7 @@ tokio = { version = "1.47.1", features = [ # Logging tracing = "0.1.41" -tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +tracing-subscriber = { version = ">=0.3.20", features = ["env-filter"] } tracing-appender = "0.2.3" log = "0.4.27" console-subscriber = "0.4.1" From 400d746dc5cd62941f14432cf0121402f7b70a6e Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 13 Sep 2025 14:01:19 -0300 Subject: [PATCH 09/11] fix: cargo fmt --- .../play_packets/player_loaded.rs | 17 +++++++++++------ src/bin/src/systems/new_connections.rs | 2 +- src/lib/storage/src/database.rs | 8 ++++++-- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/bin/src/packet_handlers/play_packets/player_loaded.rs b/src/bin/src/packet_handlers/play_packets/player_loaded.rs index 09fed5a1..e74ebfb9 100644 --- a/src/bin/src/packet_handlers/play_packets/player_loaded.rs +++ b/src/bin/src/packet_handlers/play_packets/player_loaded.rs @@ -28,10 +28,7 @@ pub fn handle( } // Default player data - *player_data = PlayerData::new( - Position::default(), - "overworld", - ); + *player_data = PlayerData::new(Position::default(), "overworld"); // Save the player's position in the world if let Ok(loaded) = state @@ -52,7 +49,11 @@ pub fn handle( ); } None => { - if let Err(e) = state.0.world.save_player_state(player_identity.uuid.as_u128(), &player_data) { + if let Err(e) = state + .0 + .world + .save_player_state(player_identity.uuid.as_u128(), &player_data) + { tracing::error!( "Failed to save player state for {} ({}): {:?}", player_identity.username, @@ -62,7 +63,11 @@ pub fn handle( } } } - } else if let Err(e) = state.0.world.save_player_state(player_identity.uuid.as_u128(), &player_data) { + } else if let Err(e) = state + .0 + .world + .save_player_state(player_identity.uuid.as_u128(), &player_data) + { tracing::error!( "Failed to save player state for {} ({}): {:?}", player_identity.username, diff --git a/src/bin/src/systems/new_connections.rs b/src/bin/src/systems/new_connections.rs index 4f132484..2615a30e 100644 --- a/src/bin/src/systems/new_connections.rs +++ b/src/bin/src/systems/new_connections.rs @@ -4,9 +4,9 @@ use ferrumc_core::conn::keepalive::KeepAliveTracker; use ferrumc_core::transform::grounded::OnGround; use ferrumc_core::transform::position::Position; use ferrumc_core::transform::rotation::Rotation; +use ferrumc_core::{chunks::chunk_receiver::ChunkReceiver, data::player::PlayerData}; use ferrumc_inventories::hotbar::Hotbar; use ferrumc_inventories::inventory::Inventory; -use ferrumc_core::{chunks::chunk_receiver::ChunkReceiver, data::player::PlayerData}; use ferrumc_net::connection::NewConnection; use ferrumc_state::GlobalStateResource; use std::time::Instant; diff --git a/src/lib/storage/src/database.rs b/src/lib/storage/src/database.rs index 5ea3d96b..bd487c4f 100644 --- a/src/lib/storage/src/database.rs +++ b/src/lib/storage/src/database.rs @@ -9,8 +9,12 @@ pub trait Database { fn get(&self, table: &str, key: Self::Key) -> Result, StorageError>; fn delete(&self, table: &str, key: Self::Key) -> Result<(), StorageError>; fn update(&self, table: &str, key: Self::Key, value: Self::Value) -> Result<(), StorageError>; - fn upsert(&self, table: &str, key: Self::Key, value: &Self::Value) - -> Result; + fn upsert( + &self, + table: &str, + key: Self::Key, + value: &Self::Value, + ) -> Result; fn batch_insert( &self, table: &str, From b7bea431f10cb145e5da0e66b5273f7c7ee6f077 Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 13 Sep 2025 14:04:29 -0300 Subject: [PATCH 10/11] fix: cargo clippy --- src/lib/storage/src/sqlite.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/storage/src/sqlite.rs b/src/lib/storage/src/sqlite.rs index 44b4799a..aaab27c2 100644 --- a/src/lib/storage/src/sqlite.rs +++ b/src/lib/storage/src/sqlite.rs @@ -106,7 +106,7 @@ where ) -> Result { let conn = self.open_conn()?; let json_val: Value = - serde_json::to_value(&value).map_err(|e| StorageError::DatabaseError(e.to_string()))?; + serde_json::to_value(value).map_err(|e| StorageError::DatabaseError(e.to_string()))?; let sql = format!( "INSERT INTO \"{t}\" (key, value) VALUES (?1, ?2) ON CONFLICT(key) DO UPDATE SET value = excluded.value", From a2b92fffc295723e9b5c2aea909f306089babc6c Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 13 Sep 2025 14:06:14 -0300 Subject: [PATCH 11/11] fix: cargo clippy --- src/lib/storage/src/lmdb.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/storage/src/lmdb.rs b/src/lib/storage/src/lmdb.rs index d6c86ea0..24c9b416 100644 --- a/src/lib/storage/src/lmdb.rs +++ b/src/lib/storage/src/lmdb.rs @@ -173,7 +173,7 @@ impl Database for LmdbBackend { let db: heed::Database, Bytes> = env.open_database(&rw_txn, Some(table))? .ok_or(StorageError::TableError("Table not found".to_string()))?; - db.put(&mut rw_txn, &key, &value)?; + db.put(&mut rw_txn, &key, value)?; rw_txn.commit()?; Ok(true) }