Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

*When editing this file, please respect a line length of 100.*

## [0.36.0] - 2026-05-13

### Added
- Added `streaming_decrypt_with_batch_size` and `DecryptionStream::new_with_batch_size` for
per-stream tuning of decrypt chunk fetch batches.
- Added `DEFAULT_STREAM_DECRYPT_BATCH_SIZE` and `stream_decrypt_batch_size` to expose the default
stream decrypt batch size.

### Changed
- `streaming_decrypt` continues to use `STREAM_DECRYPT_BATCH_SIZE`, but invalid values and `0` now
fall back to the default batch size.

## [0.35.0] - 2026-03-13

### Added
Expand Down
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ license = "GPL-3.0"
name = "self_encryption"
readme = "README.md"
repository = "https://github.com/maidsafe/self_encryption"
version = "0.35.0"
version = "0.36.0"

[features]
default = []
Expand Down
2 changes: 1 addition & 1 deletion nodejs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ crate-type = ["cdylib"]
hex = "0.4.3"
napi = { version = "2.12.2", default-features = false, features = ["napi4", "napi6", "tokio_rt", "serde-json"] }
napi-derive = "2.12.2"
self_encryption = { version = "0.35.0", path = ".." }
self_encryption = { version = "0.36.0", path = ".." }

[build-dependencies]
napi-build = "2.0.1"
4 changes: 2 additions & 2 deletions src/data_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ impl DataMap {
/// The algorithm requires this to be a sorted list to allow get_pad_iv_key to obtain the
/// correct pre-encryption hashes for decryption/encryption.
pub fn new(mut keys: Vec<ChunkInfo>) -> Self {
keys.sort_by(|a, b| a.index.cmp(&b.index));
keys.sort_by_key(|a| a.index);
Self {
chunk_identifiers: keys,
child: None,
Expand All @@ -57,7 +57,7 @@ impl DataMap {

/// Creates a new DataMap with a specified child value
pub fn with_child(mut keys: Vec<ChunkInfo>, child: usize) -> Self {
keys.sort_by(|a, b| a.index.cmp(&b.index));
keys.sort_by_key(|a| a.index);
Self {
chunk_identifiers: keys,
child: Some(child),
Expand Down
89 changes: 85 additions & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ pub use xor_name::XorName;
pub use self::{
data_map::{ChunkInfo, DataMap},
error::{Error, Result},
stream_decrypt::{streaming_decrypt, DecryptionStream},
stream_decrypt::{streaming_decrypt, streaming_decrypt_with_batch_size, DecryptionStream},
stream_encrypt::{stream_encrypt, ChunkStream, EncryptionStream},
};
use bytes::Bytes;
Expand All @@ -124,13 +124,28 @@ pub use xor_name;
/// Batch size for streaming decrypt chunk fetching.
///
/// Can be overridden by the `STREAM_DECRYPT_BATCH_SIZE` environment variable.
/// Invalid values and `0` fall back to [`DEFAULT_STREAM_DECRYPT_BATCH_SIZE`].
pub static STREAM_DECRYPT_BATCH_SIZE: LazyLock<usize> = LazyLock::new(|| {
std::env::var("STREAM_DECRYPT_BATCH_SIZE")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(10)
.filter(|n| *n > 0)
.unwrap_or(DEFAULT_STREAM_DECRYPT_BATCH_SIZE)
});

/// Default batch size for streaming decrypt chunk fetching.
pub const DEFAULT_STREAM_DECRYPT_BATCH_SIZE: usize = 10;

/// Read the current streaming decrypt batch size.
///
/// This reads the legacy `STREAM_DECRYPT_BATCH_SIZE` environment variable on
/// first use, falling back to [`DEFAULT_STREAM_DECRYPT_BATCH_SIZE`]. New code
/// that needs explicit per-stream tuning should use
/// [`streaming_decrypt_with_batch_size`].
pub fn stream_decrypt_batch_size() -> usize {
*STREAM_DECRYPT_BATCH_SIZE
}

/// The minimum size (before compression) of data to be self-encrypted, defined as 3B.
pub const MIN_ENCRYPTABLE_BYTES: usize = 3 * MIN_CHUNK_SIZE;

Expand Down Expand Up @@ -547,8 +562,7 @@ where

if !missing_hashes.is_empty() {
let new_chunks = get_chunk_parallel(&missing_hashes)?;
for ((_i, hash), (_j, chunk_data)) in missing_hashes.iter().zip(new_chunks.into_iter())
{
for ((_i, hash), (_j, chunk_data)) in missing_hashes.iter().zip(new_chunks) {
let _ = chunk_cache.insert(*hash, chunk_data);
}
}
Expand Down Expand Up @@ -642,6 +656,7 @@ mod tests {
use crate::test_helpers::random_bytes;
use std::{
io::Write,
process::Command,
sync::{Arc, Mutex},
};
use tempfile::NamedTempFile;
Expand All @@ -668,6 +683,72 @@ mod tests {
DataMap::new(chunks)
}

fn assert_stream_decrypt_batch_size_from_env(
env_value: Option<&str>,
expected: usize,
) -> Result<()> {
let mut child = Command::new(std::env::current_exe()?);
let _ = child
.arg("--ignored")
.arg("--exact")
.arg("tests::stream_decrypt_batch_size_env_child")
.arg("--nocapture")
.env(
"SELF_ENCRYPTION_STREAM_DECRYPT_BATCH_SIZE_EXPECTED",
expected.to_string(),
);

if let Some(value) = env_value {
let _ = child.env("STREAM_DECRYPT_BATCH_SIZE", value);
} else {
let _ = child.env_remove("STREAM_DECRYPT_BATCH_SIZE");
}

let output = child.output()?;
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);

assert!(
output.status.success(),
"child env test failed\nstdout:\n{}\nstderr:\n{}",
stdout,
stderr
);
assert!(
stdout.contains("stream_decrypt_batch_size_env_child observed"),
"child env test did not run\nstdout:\n{}\nstderr:\n{}",
stdout,
stderr
);
Ok(())
}

#[test]
fn test_stream_decrypt_batch_size_env_fallbacks() -> Result<()> {
assert_stream_decrypt_batch_size_from_env(None, DEFAULT_STREAM_DECRYPT_BATCH_SIZE)?;
assert_stream_decrypt_batch_size_from_env(
Some("not-a-number"),
DEFAULT_STREAM_DECRYPT_BATCH_SIZE,
)?;
assert_stream_decrypt_batch_size_from_env(Some("0"), DEFAULT_STREAM_DECRYPT_BATCH_SIZE)?;
assert_stream_decrypt_batch_size_from_env(Some("64"), 64)?;
Ok(())
}

#[test]
#[ignore]
fn stream_decrypt_batch_size_env_child() -> Result<()> {
let expected = match std::env::var("SELF_ENCRYPTION_STREAM_DECRYPT_BATCH_SIZE_EXPECTED") {
Ok(expected) => expected.parse::<usize>()?,
Err(_) => return Ok(()),
};

let observed = stream_decrypt_batch_size();
println!("stream_decrypt_batch_size_env_child observed {observed}");
assert_eq!(observed, expected);
Ok(())
}

#[test]
fn test_multiple_levels_of_shrinking() -> Result<()> {
// Create a temp file with random data
Expand Down
Loading
Loading