Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ libc = { version = "0.2.150", optional = true }
bincode = { version = "2.0.1", default-features = false, features = ["alloc", "derive", "std"], optional = true }
borsh = { version = "1.5", default-features = false, features = ["derive"] }
hex = { version = "0.4.3", optional = true }
rusqlite = { version = "0.31", features = ["bundled"], optional = true }

# optional wasm feature
wasm-bindgen = { version ="0.2.100", optional = true }
Expand All @@ -33,3 +34,4 @@ default = ["std", "extras"]
std = ["libc", "hex", "bincode"]
wasm = ["wasm-bindgen", "js-sys"]
extras = []
hash-idx = ["std", "rusqlite"]
92 changes: 89 additions & 3 deletions src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ pub struct Database<H: NodeHasher> {
pub(crate) header: Arc<Mutex<DatabaseHeader>>,
pub(crate) file: Arc<Box<dyn StorageBackend>>,
pub config: Configuration<H>,
pub(crate) path: Option<String>,
}

#[derive(Clone, Encode, Decode, Debug, Eq, PartialEq, Hash)]
Expand Down Expand Up @@ -101,6 +102,10 @@ impl DatabaseHeader {

impl Database<Sha256Hasher> {
pub fn open(path: &str) -> Result<Self> {
Self::open_with_config(path, Configuration::standard())
}

pub fn open_with_config(path: &str, config: Configuration<Sha256Hasher>) -> Result<Self> {
let mut opts = OpenOptions::new();
opts.read(true).write(true).create(true);

Expand All @@ -112,9 +117,10 @@ impl Database<Sha256Hasher> {
}

let file = opts.open(path).map_err(crate::Error::IO)?;
let config = Configuration::standard();
let backend = FileBackend::new(file)?;
Self::new(Box::new(backend), config)
let mut db = Self::new(Box::new(backend), config)?;
db.path = Some(path.to_string());
Ok(db)
}

pub fn open_read_only(path: &str) -> Result<Self> {
Expand All @@ -123,7 +129,9 @@ impl Database<Sha256Hasher> {
.open(path)
.map_err(crate::Error::IO)?;
let config = Configuration::standard();
Self::new(Box::new(FileBackend::read_only(file)), config)
let mut db = Self::new(Box::new(FileBackend::read_only(file)), config)?;
db.path = Some(path.to_string());
Ok(db)
}

pub fn memory() -> Result<Self> {
Expand All @@ -150,6 +158,7 @@ impl<H: NodeHasher> Database<H> {
header: Arc::new(Mutex::new(header)),
file: Arc::new(file),
config,
path: None,
};

if !has_header {
Expand Down Expand Up @@ -212,9 +221,86 @@ impl<H: NodeHasher> Database<H> {
*header = DatabaseHeader::new();
self.write_header(&header)?;
self.file.set_len(header.len())?;
Self::cleanup_hash_indexes(&self.path, 0);
Ok(())
}

/// Deletes hash index sidecar files whose root offset >= min_offset.
/// Pass min_offset=0 to delete all index files.
pub fn cleanup_hash_indexes(db_path: &Option<String>, min_offset: u64) {
let db_path = match db_path {
Some(p) => p,
None => return,
};
let path = std::path::Path::new(db_path);
let stem = match path.file_stem().and_then(|s| s.to_str()) {
Some(s) => s.to_string(),
None => return,
};
let parent = path.parent().unwrap_or(std::path::Path::new("."));
let prefix = format!("{}.", stem);
let suffix = ".hidx.sqlite";

if let Ok(entries) = std::fs::read_dir(parent) {
for entry in entries.flatten() {
let name = entry.file_name();
let name_str = match name.to_str() {
Some(s) => s,
None => continue,
};
if let Some(rest) = name_str.strip_prefix(&prefix) {
if let Some(offset_str) = rest.strip_suffix(suffix) {
if let Ok(offset) = offset_str.parse::<u64>() {
if offset >= min_offset {
let _ = std::fs::remove_file(entry.path());
}
}
}
}
}
}
}

/// Deletes all hash index sidecar files except those belonging to the given snapshots.
#[cfg(feature = "hash-idx")]
pub fn retain_hash_indexes(&self, keep: &[&ReadTransaction<H>]) {
let db_path = match &self.path {
Some(p) => p,
None => return,
};
let path = std::path::Path::new(db_path);
let stem = match path.file_stem().and_then(|s| s.to_str()) {
Some(s) => s.to_string(),
None => return,
};
let parent = path.parent().unwrap_or(std::path::Path::new("."));
let prefix = format!("{}.", stem);
let suffix = ".hidx.sqlite";

let keep_offsets: std::collections::HashSet<u64> = keep.iter()
.map(|tx| tx.root_offset())
.collect();

if let Ok(entries) = std::fs::read_dir(parent) {
for entry in entries.flatten() {
let name = entry.file_name();
let name_str = match name.to_str() {
Some(s) => s,
None => continue,
};
if let Some(rest) = name_str.strip_prefix(&prefix) {
if let Some(offset_str) = rest.strip_suffix(suffix) {
if let Ok(offset) = offset_str.parse::<u64>() {
if !keep_offsets.contains(&offset) {
let _ = std::fs::remove_file(entry.path());
}
}
}
}
}
}
}

pub fn begin_write(&self) -> Result<WriteTransaction<'_, H>> {
Ok(WriteTransaction::new(self))
}
Expand Down
7 changes: 7 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ pub type Result<T> = core::result::Result<T, Error>;
#[derive(Clone, Debug)]
pub struct Configuration<Hasher: NodeHasher> {
pub cache_size: usize,
pub auto_hash_index: bool,
_marker: PhantomData<Hasher>,
}

Expand Down Expand Up @@ -100,6 +101,7 @@ impl<Hasher: NodeHasher> Configuration<Hasher> {
pub fn new() -> Self {
Self {
cache_size: DEFAULT_CACHE_SIZE,
auto_hash_index: false,
_marker: PhantomData,
}
}
Expand All @@ -108,6 +110,11 @@ impl<Hasher: NodeHasher> Configuration<Hasher> {
self.cache_size = size;
self
}

pub fn with_auto_hash_index(mut self, enabled: bool) -> Self {
self.auto_hash_index = enabled;
self
}
}

pub trait NodeHasher: Clone {
Expand Down
Loading
Loading