From a69452da5d644e2b7d087b1c5702926ba99a5c52 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 8 Apr 2024 10:29:42 +0300 Subject: [PATCH] do not write u64_le size prefix this makes this fully incompatible with 0.12 and requires a new version! --- examples/encode_decode_async.rs | 3 +- examples/encode_decode_sync.rs | 2 +- src/io/fsm.rs | 38 ++++++++++-------------- src/io/sync.rs | 38 ++++++++++++------------ src/lib.rs | 5 ++-- src/rec.rs | 7 +++-- src/tests.rs | 52 +++++++++++++-------------------- src/tests2.rs | 26 ++++++----------- 8 files changed, 73 insertions(+), 98 deletions(-) diff --git a/examples/encode_decode_async.rs b/examples/encode_decode_async.rs index 3bd84c8..a9c7ede 100644 --- a/examples/encode_decode_async.rs +++ b/examples/encode_decode_async.rs @@ -16,7 +16,7 @@ const BLOCK_SIZE: BlockSize = BlockSize::from_chunk_log(4); #[tokio::main] async fn main() -> io::Result<()> { // The file we want to serve - let mut file = tokio::fs::File::open("video.mp4").await?; + let mut file = iroh_io::File::open("video.mp4".into()).await?; // Create an outboard for the file, using the current size let mut ob = PreOrderOutboard::::create(&mut file, BLOCK_SIZE).await?; // Encode the first 100000 bytes of the file @@ -24,7 +24,6 @@ async fn main() -> io::Result<()> { let ranges = round_up_to_chunks(&ranges); // Stream of data to client. Needs to implement `io::Write`. We just use a vec here. let mut to_client = BytesMut::new(); - let file = iroh_io::File::open("video.mp4".into()).await?; encode_ranges_validated(file, &mut ob, &ranges, &mut to_client).await?; // Stream of data from client. Needs to implement `io::Read`. We just wrap the vec in a cursor. diff --git a/examples/encode_decode_sync.rs b/examples/encode_decode_sync.rs index a64825d..aa84568 100644 --- a/examples/encode_decode_sync.rs +++ b/examples/encode_decode_sync.rs @@ -35,7 +35,7 @@ fn main() -> io::Result<()> { root, data: vec![], }; - decode_ranges(&ranges, from_server, &mut decoded, &mut ob)?; + decode_ranges(from_server, &ranges, &mut decoded, &mut ob)?; // the first 100000 bytes of the file should now be in `decoded` // in addition, the required part of the tree to validate that the data is diff --git a/src/io/fsm.rs b/src/io/fsm.rs index 0053b23..79903b9 100644 --- a/src/io/fsm.rs +++ b/src/io/fsm.rs @@ -20,7 +20,7 @@ use blake3::guts::parent_cv; use bytes::{Bytes, BytesMut}; use iroh_io::AsyncStreamWriter; use smallvec::SmallVec; -use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncWrite, AsyncWriteExt}; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}; pub use super::BaoContentItem; use crate::{ @@ -104,16 +104,11 @@ pub trait CreateOutboard { /// This requires the outboard to have a default implementation, which is /// the case for the memory implementations. #[allow(async_fn_in_trait)] - async fn create( - mut data: impl AsyncRead + AsyncSeek + Unpin, - block_size: BlockSize, - ) -> io::Result + async fn create(mut data: impl AsyncSliceReader, block_size: BlockSize) -> io::Result where Self: Default + Sized, { - use tokio::io::AsyncSeekExt; - let size = data.seek(io::SeekFrom::End(0)).await?; - data.rewind().await?; + let size = data.len().await?; Self::create_sized(data, size, block_size).await } @@ -121,7 +116,7 @@ pub trait CreateOutboard { /// have a default implementation, which is the case for the memory /// implementations. fn create_sized( - data: impl AsyncRead + Unpin, + data: impl AsyncSliceReader, size: u64, block_size: BlockSize, ) -> impl Future> @@ -135,7 +130,7 @@ pub trait CreateOutboard { /// such as a file based one. It also does not require [AsyncSeek] on the data. /// /// It will however only include data up the the current tree size. - fn init_from(&mut self, data: impl AsyncRead + Unpin) -> impl Future>; + fn init_from(&mut self, data: impl AsyncSliceReader) -> impl Future>; } impl<'b, O: Outboard> Outboard for &'b mut O { @@ -235,7 +230,7 @@ impl OutboardMut for PostOrderOutboard { impl CreateOutboard for PreOrderOutboard { async fn create_sized( - data: impl AsyncRead + Unpin, + data: impl AsyncSliceReader, size: u64, block_size: BlockSize, ) -> io::Result @@ -250,7 +245,7 @@ impl CreateOutboard for PreOrderOutboard { Ok(res) } - async fn init_from(&mut self, data: impl AsyncRead + Unpin) -> io::Result<()> { + async fn init_from(&mut self, data: impl AsyncSliceReader) -> io::Result<()> { let mut this = self; let root = outboard(data, this.tree, &mut this).await?; this.root = root; @@ -261,7 +256,7 @@ impl CreateOutboard for PreOrderOutboard { impl CreateOutboard for PostOrderOutboard { async fn create_sized( - data: impl AsyncRead + Unpin, + data: impl AsyncSliceReader, size: u64, block_size: BlockSize, ) -> io::Result @@ -276,7 +271,7 @@ impl CreateOutboard for PostOrderOutboard { Ok(res) } - async fn init_from(&mut self, data: impl AsyncRead + Unpin) -> io::Result<()> { + async fn init_from(&mut self, data: impl AsyncSliceReader) -> io::Result<()> { let mut this = self; let root = outboard(data, this.tree, &mut this).await?; this.root = root; @@ -477,8 +472,6 @@ where { let mut encoded = encoded; let tree = outboard.tree(); - // write header - encoded.write(tree.size.to_le_bytes().as_slice()).await?; for item in tree.ranges_pre_order_chunks_iter_ref(ranges, 0) { match item { BaoChunk::Parent { node, .. } => { @@ -530,8 +523,6 @@ where let mut encoded = encoded; let tree = outboard.tree(); let ranges = truncate_ranges(ranges, tree.size()); - // write header - encoded.write(tree.size.to_le_bytes().as_slice()).await?; for item in tree.ranges_pre_order_chunks_iter_ref(ranges, 0) { match item { BaoChunk::Parent { @@ -648,7 +639,7 @@ fn read_parent(buf: &[u8]) -> (blake3::Hash, blake3::Hash) { /// Unlike [outboard_post_order], this will work with any outboard /// implementation, but it is not guaranteed that writes are sequential. pub async fn outboard( - data: impl AsyncRead + Unpin, + data: impl AsyncSliceReader, tree: BaoTree, mut outboard: impl OutboardMut, ) -> io::Result { @@ -660,11 +651,12 @@ pub async fn outboard( /// Internal helper for [outboard_post_order]. This takes a buffer of the chunk group size. async fn outboard_impl( tree: BaoTree, - mut data: impl AsyncRead + Unpin, + mut data: impl AsyncSliceReader, mut outboard: impl OutboardMut, buffer: &mut [u8], ) -> io::Result { // do not allocate for small trees + let mut offset: u64 = 0; let mut stack = SmallVec::<[blake3::Hash; 10]>::new(); debug_assert!(buffer.len() == tree.chunk_group_bytes()); for item in tree.post_order_chunks_iter() { @@ -682,9 +674,9 @@ async fn outboard_impl( start_chunk, .. } => { - let buf = &mut buffer[..size]; - data.read_exact(buf).await?; - let hash = hash_subtree(start_chunk.0, buf, is_root); + let buf = data.read_at(offset, size).await?; + offset += size as u64; + let hash = hash_subtree(start_chunk.0, &buf, is_root); stack.push(hash); } } diff --git a/src/io/sync.rs b/src/io/sync.rs index c7f63dd..b30815f 100644 --- a/src/io/sync.rs +++ b/src/io/sync.rs @@ -3,7 +3,7 @@ //! The traits to perform positioned io are re-exported from //! [positioned-io](https://crates.io/crates/positioned-io). use std::{ - io::{self, Read, Seek, Write}, + io::{self, Read, Write}, result, }; @@ -73,19 +73,23 @@ pub trait OutboardMut: Sized { /// In complex real applications, you might want to do this manually. pub trait CreateOutboard { /// Create an outboard from a data source. - fn create(mut data: impl Read + Seek, block_size: BlockSize) -> io::Result + fn create(data: impl ReadAt + Size, block_size: BlockSize) -> io::Result where Self: Default + Sized, { - let size = data.seek(io::SeekFrom::End(0))?; - data.rewind()?; + let Some(size) = data.size()? else { + return Err(io::Error::new( + io::ErrorKind::UnexpectedEof, + "unable to measure the size", + )); + }; Self::create_sized(data, size, block_size) } /// create an outboard from a data source. This requires the outboard to /// have a default implementation, which is the case for the memory /// implementations. - fn create_sized(data: impl Read, size: u64, block_size: BlockSize) -> io::Result + fn create_sized(data: impl ReadAt, size: u64, block_size: BlockSize) -> io::Result where Self: Default + Sized; @@ -96,7 +100,7 @@ pub trait CreateOutboard { /// such as a file based one. It also does not require [Seek] on the data. /// /// It will however only include data up the the current tree size. - fn init_from(&mut self, data: impl Read) -> io::Result<()>; + fn init_from(&mut self, data: impl ReadAt) -> io::Result<()>; } impl OutboardMut for &mut O { @@ -172,7 +176,7 @@ impl OutboardMut for PreOrderOutboard { } impl CreateOutboard for PreOrderOutboard { - fn create_sized(data: impl Read, size: u64, block_size: BlockSize) -> io::Result + fn create_sized(data: impl ReadAt, size: u64, block_size: BlockSize) -> io::Result where Self: Default + Sized, { @@ -186,7 +190,7 @@ impl CreateOutboard for PreOrderOutboard { Ok(res) } - fn init_from(&mut self, data: impl Read) -> io::Result<()> { + fn init_from(&mut self, data: impl ReadAt) -> io::Result<()> { let mut this = self; let root = outboard(data, this.tree, &mut this)?; this.root = root; @@ -196,7 +200,7 @@ impl CreateOutboard for PreOrderOutboard { } impl CreateOutboard for PostOrderOutboard { - fn create_sized(data: impl Read, size: u64, block_size: BlockSize) -> io::Result + fn create_sized(data: impl ReadAt, size: u64, block_size: BlockSize) -> io::Result where Self: Default + Sized, { @@ -210,7 +214,7 @@ impl CreateOutboard for PostOrderOutboard { Ok(res) } - fn init_from(&mut self, data: impl Read) -> io::Result<()> { + fn init_from(&mut self, data: impl ReadAt) -> io::Result<()> { let mut this = self; let root = outboard(data, this.tree, &mut this)?; this.root = root; @@ -387,8 +391,6 @@ pub fn encode_ranges( let mut encoded = encoded; let tree = outboard.tree(); let mut buffer = vec![0u8; tree.chunk_group_bytes()]; - // write header - encoded.write_all(tree.size.to_le_bytes().as_slice())?; for item in tree.ranges_pre_order_chunks_iter_ref(ranges, 0) { match item { BaoChunk::Parent { node, .. } => { @@ -431,8 +433,6 @@ pub fn encode_ranges_validated( let mut out_buf = Vec::new(); // canonicalize ranges let ranges = truncate_ranges(ranges, tree.size()); - // write header - encoded.write_all(tree.size.to_le_bytes().as_slice())?; for item in tree.ranges_pre_order_chunks_iter_ref(ranges, 0) { match item { BaoChunk::Parent { @@ -504,8 +504,8 @@ pub fn encode_ranges_validated( /// If you do not want to update an outboard, use [super::outboard::EmptyOutboard] as /// the outboard. pub fn decode_ranges( - ranges: &ChunkRangesRef, encoded: R, + ranges: &ChunkRangesRef, mut target: W, mut outboard: O, ) -> std::result::Result<(), DecodeError> @@ -533,7 +533,7 @@ where /// Unlike [outboard_post_order], this will work with any outboard /// implementation, but it is not guaranteed that writes are sequential. pub fn outboard( - data: impl Read, + data: impl ReadAt, tree: BaoTree, mut outboard: impl OutboardMut, ) -> io::Result { @@ -545,12 +545,13 @@ pub fn outboard( /// Internal helper for [outboard_post_order]. This takes a buffer of the chunk group size. fn outboard_impl( tree: BaoTree, - mut data: impl Read, + data: impl ReadAt, mut outboard: impl OutboardMut, buffer: &mut [u8], ) -> io::Result { // do not allocate for small trees let mut stack = SmallVec::<[blake3::Hash; 10]>::new(); + let mut offset = 0; debug_assert!(buffer.len() == tree.chunk_group_bytes()); for item in tree.post_order_chunks_iter() { match item { @@ -568,7 +569,8 @@ fn outboard_impl( .. } => { let buf = &mut buffer[..size]; - data.read_exact(buf)?; + data.read_exact_at(offset, buf)?; + offset += size as u64; let hash = hash_subtree(start_chunk.0, buf, is_root); stack.push(hash); } diff --git a/src/lib.rs b/src/lib.rs index cab43ca..2a34163 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -65,13 +65,12 @@ //! knowledge of the tree geometry (total data size and block size). A common //! way to get this information is to have the block size as a common parameter //! of both sides, and send the total data size as a prefix of the encoded data. +//! E.g. the original bao crate uses a little endian u64 as the prefix. //! //! This function will perform validation in any case, there is no variant //! that skips validation since that would defeat the purpose of verified //! streaming. //! -//! The original bao crate uses a little endian u64 as the prefix. -//! //! ## Simple end to end example //! //! ```no_run @@ -112,7 +111,7 @@ //! root, //! data: vec![], //! }; -//! decode_ranges(&ranges, from_server, &mut decoded, &mut ob)?; +//! decode_ranges(from_server, &ranges, &mut decoded, &mut ob)?; //! //! // the first 100000 bytes of the file should now be in `decoded` //! // in addition, the required part of the tree to validate that the data is diff --git a/src/rec.rs b/src/rec.rs index 5244729..dc30ee4 100644 --- a/src/rec.rs +++ b/src/rec.rs @@ -425,7 +425,6 @@ mod test_support { ) -> (Vec, blake3::Hash) { let mut res = Vec::new(); let size = data.len() as u64; - res.extend_from_slice(&size.to_le_bytes()); // canonicalize the ranges let ranges = truncate_ranges(ranges, size); let hash = encode_selected_rec( @@ -534,7 +533,8 @@ mod tests { let chunk_start = ChunkNum::full_chunks(start as u64); let chunk_end = ChunkNum::chunks(end as u64).max(chunk_start + 1); let ranges = ChunkRanges::from(chunk_start..chunk_end); - let actual_encoded = encode_ranges_reference(&data, &ranges, BlockSize::ZERO).0; + let mut actual_encoded = encode_ranges_reference(&data, &ranges, BlockSize::ZERO).0; + actual_encoded.splice(..0, size.to_le_bytes().into_iter()); prop_assert_eq!(expected_encoded, actual_encoded); } @@ -549,7 +549,8 @@ mod tests { let chunk_start = ChunkNum::full_chunks(start as u64); let chunk_end = ChunkNum::chunks(end as u64).max(chunk_start + 1); let ranges = ChunkRanges::from(chunk_start..chunk_end); - let (encoded, hash) = encode_ranges_reference(&data, &ranges, BlockSize::ZERO); + let (mut encoded, hash) = encode_ranges_reference(&data, &ranges, BlockSize::ZERO); + encoded.splice(..0, size.to_le_bytes().into_iter()); let bao_hash = bao::Hash::from(*hash.as_bytes()); let mut decoder = bao::decode::SliceDecoder::new(Cursor::new(&encoded), &bao_hash, start as u64, 1); diff --git a/src/tests.rs b/src/tests.rs index f7055ef..41532e0 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -29,13 +29,6 @@ use super::{ BaoTree, BlockSize, TreeNode, }; -fn read_len(mut from: impl std::io::Read) -> std::io::Result { - let mut buf = [0; 8]; - from.read_exact(&mut buf)?; - let len = u64::from_le_bytes(buf); - Ok(len) -} - /// Compute the blake3 hash for the given data, /// /// using blake3_hash_inner which is used in hash_block. @@ -50,7 +43,7 @@ fn bao_tree_blake3_impl(data: Vec) -> (blake3::Hash, blake3::Hash) { } /// Computes a reference pre order outboard using the bao crate (chunk_group_log = 0) and then flips it to a post-order outboard. -fn post_order_outboard_reference(data: &[u8]) -> PostOrderMemOutboard { +fn post_order_outboard_bao(data: &[u8]) -> PostOrderMemOutboard { let mut outboard = Vec::new(); let cursor = Cursor::new(&mut outboard); let mut encoder = bao::encode::Encoder::new_outboard(cursor); @@ -67,7 +60,7 @@ fn post_order_outboard_reference(data: &[u8]) -> PostOrderMemOutboard { pre.flip() } -fn encode_slice_reference(data: &[u8], chunk_range: Range) -> (Vec, blake3::Hash) { +fn encode_slice_bao(data: &[u8], chunk_range: Range) -> (Vec, blake3::Hash) { let (outboard, hash) = bao::encode::outboard(data); let slice_start = chunk_range.start.to_bytes(); let slice_len = (chunk_range.end - chunk_range.start).to_bytes(); @@ -79,6 +72,7 @@ fn encode_slice_reference(data: &[u8], chunk_range: Range) -> (Vec ); let mut res = Vec::new(); encoder.read_to_end(&mut res).unwrap(); + res.splice(..8, []); let hash = blake3::Hash::from(*hash.as_bytes()); (res, hash) } @@ -88,7 +82,8 @@ fn bao_tree_encode_slice_comparison_impl(data: Vec, mut range: Range, mut range: Range, range: Range) { + let tree = BaoTree::new(data.len() as u64, BlockSize::ZERO); let range = ChunkNum(range.start)..ChunkNum(range.end); - let (encoded, root) = encode_slice_reference(&data, range.clone()); + let (encoded, root) = encode_slice_bao(&data, range.clone()); let expected = data; let ranges = ChunkRanges::from(range); let mut ec = Cursor::new(encoded); - for item in decode_ranges_into_chunks(root, BlockSize::ZERO, &mut ec, &ranges).unwrap() { + for item in decode_ranges_into_chunks(root, tree, &mut ec, &ranges).unwrap() { let (pos, slice) = item.unwrap(); let pos = pos.try_into().unwrap(); assert_eq!(expected[pos..pos + slice.len()], *slice); @@ -131,21 +127,19 @@ fn bao_tree_decode_slice_iter_impl(data: Vec, range: Range) { #[cfg(feature = "tokio_fsm")] mod fsm_tests { - use tokio::io::AsyncReadExt; use super::*; use crate::{io::fsm::*, rec::make_test_data}; /// range is a range of chunks. Just using u64 for convenience in tests async fn bao_tree_decode_slice_fsm_impl(data: Vec, range: Range) { + let tree = BaoTree::new(data.len() as u64, BlockSize::ZERO); let range = ChunkNum(range.start)..ChunkNum(range.end); - let (encoded, root) = encode_slice_reference(&data, range.clone()); + let (encoded, root) = encode_slice_bao(&data, range.clone()); let expected = data; let ranges = ChunkRanges::from(range); - let mut encoded = Cursor::new(encoded); - let size = encoded.read_u64_le().await.unwrap(); - let mut reading = - ResponseDecoder::new(root, ranges, BaoTree::new(size, BlockSize::ZERO), encoded); + let encoded = Cursor::new(encoded); + let mut reading = ResponseDecoder::new(root, ranges, tree, encoded); while let ResponseDecoderNext::More((next_state, item)) = reading.next().await { if let BaoContentItem::Leaf(Leaf { offset, data }) = item.unwrap() { let pos = offset.try_into().unwrap(); @@ -182,7 +176,7 @@ mod fsm_tests { } fn bao_tree_outboard_comparison_impl(data: Vec) { - let post1 = post_order_outboard_reference(&data); + let post1 = post_order_outboard_bao(&data); // let (expected, expected_hash) = post_order_outboard_reference_2(&data); let post2 = PostOrderMemOutboard::create(&data, BlockSize::ZERO); assert_eq!(post1, post2); @@ -234,9 +228,8 @@ fn bao_tree_slice_roundtrip_test(data: Vec, mut range: Range, bloc let expected = data.clone(); let mut all_ranges: range_collections::RangeSet<[u64; 2]> = RangeSet2::empty(); let mut ec = Cursor::new(encoded); - for item in - decode_ranges_into_chunks(root, block_size, &mut ec, &ChunkRanges::from(range)).unwrap() - { + let tree = BaoTree::new(data.len() as u64, block_size); + for item in decode_ranges_into_chunks(root, tree, &mut ec, &ChunkRanges::from(range)).unwrap() { let (pos, slice) = item.unwrap(); // compute all data ranges all_ranges |= RangeSet2::from(pos..pos + (slice.len() as u64)); @@ -509,12 +502,10 @@ fn test_pre_order_outboard_fast() { /// Decode encoded ranges given the root hash pub fn decode_ranges_into_chunks<'a>( root: blake3::Hash, - block_size: BlockSize, - mut encoded: impl Read + 'a, + tree: BaoTree, + encoded: impl Read + 'a, ranges: &'a ChunkRangesRef, ) -> std::io::Result)>> + 'a> { - let size = read_len(&mut encoded)?; - let tree = BaoTree::new(size, block_size); let iter = DecodeResponseIter::new(root, tree, encoded, ranges); Ok(iter.filter_map(|item| match item { Ok(item) => { @@ -691,7 +682,6 @@ fn encode_selected_reference( ranges: &ChunkRangesRef, ) -> (blake3::Hash, Vec) { let mut res = Vec::new(); - res.extend_from_slice(&(data.len() as u64).to_le_bytes()); let max_skip_level = block_size.to_u32(); let ranges = truncate_ranges(ranges, data.len() as u64); let hash = encode_selected_rec( @@ -725,15 +715,15 @@ fn encode_single_chunk_large() { // check the expected size for various ranges let ranges = ChunkRanges::from(..ChunkNum(1)); let encoded = get_encoded(&ranges); - assert_eq!(encoded.len(), 8 + 15 * 64 + 1024); + assert_eq!(encoded.len(), 15 * 64 + 1024); let ranges = ChunkRanges::from(ChunkNum(1000)..ChunkNum(1001)); let encoded = get_encoded(&ranges); - assert_eq!(encoded.len(), 8 + 15 * 64 + 1024); + assert_eq!(encoded.len(), 15 * 64 + 1024); let ranges = ChunkRanges::from(ChunkNum(3000)..ChunkNum(3001)); let encoded = get_encoded(&ranges); - assert_eq!(encoded.len(), 8 + 15 * 64 + 1024); + assert_eq!(encoded.len(), 15 * 64 + 1024); } fn last_chunk(size: u64) -> Range { @@ -1016,7 +1006,7 @@ proptest! { #[test] fn flip(len in 0usize..100000) { let data = make_test_data(len); - let post = post_order_outboard_reference(&data); + let post = post_order_outboard_bao(&data); prop_assert_eq!(&post, &post.flip().flip()); } diff --git a/src/tests2.rs b/src/tests2.rs index a7f7606..d2e5874 100644 --- a/src/tests2.rs +++ b/src/tests2.rs @@ -12,7 +12,6 @@ use range_collections::{RangeSet2, RangeSetRef}; use smallvec::SmallVec; use std::ops::Range; use test_strategy::proptest; -use tokio::io::AsyncReadExt; use crate::io::outboard::PreOrderMemOutboard; use crate::io::BaoContentItem; @@ -29,13 +28,6 @@ use crate::{ BaoTree, BlockSize, ChunkNum, TreeNode, }; -fn read_len(mut from: impl std::io::Read) -> std::io::Result { - let mut buf = [0; 8]; - from.read_exact(&mut buf)?; - let len = u64::from_le_bytes(buf); - Ok(len) -} - #[cfg(feature = "validate")] use futures_lite::StreamExt; @@ -482,11 +474,11 @@ fn encode_decode_full_sync_impl( (Vec, PostOrderMemOutboard), ) { let ranges = ChunkRanges::all(); + let size = outboard.tree.size; let mut encoded = Vec::new(); crate::io::sync::encode_ranges_validated(data, &outboard, &ChunkRanges::all(), &mut encoded) .unwrap(); - let mut encoded_read = std::io::Cursor::new(encoded); - let size = read_len(&mut encoded_read).unwrap(); + let encoded_read = std::io::Cursor::new(encoded); let tree = BaoTree::new(size, outboard.tree().block_size()); let mut decoded = Vec::new(); let mut ob_res = PostOrderMemOutboard { @@ -494,7 +486,7 @@ fn encode_decode_full_sync_impl( tree, data: vec![0; tree.outboard_size().try_into().unwrap()], }; - crate::io::sync::decode_ranges(&ranges, encoded_read, &mut decoded, &mut ob_res).unwrap(); + crate::io::sync::decode_ranges(encoded_read, &ranges, &mut decoded, &mut ob_res).unwrap(); ((decoded, ob_res), (data.to_vec(), outboard)) } @@ -508,6 +500,7 @@ async fn encode_decode_full_fsm_impl( (Vec, PostOrderMemOutboard), (Vec, PostOrderMemOutboard), ) { + let size = outboard.tree.size; let mut outboard = outboard; let ranges = ChunkRanges::all(); let mut encoded = Vec::new(); @@ -520,8 +513,7 @@ async fn encode_decode_full_fsm_impl( .await .unwrap(); - let mut read_encoded = std::io::Cursor::new(encoded); - let size = read_encoded.read_u64_le().await.unwrap(); + let read_encoded = std::io::Cursor::new(encoded); let mut ob_res = { let tree = BaoTree::new(size, outboard.tree().block_size()); let root = outboard.root(); @@ -546,10 +538,10 @@ fn encode_decode_partial_sync_impl( ranges: &ChunkRangesRef, ) -> bool { let mut encoded = Vec::new(); + let size = outboard.tree.size; crate::io::sync::encode_ranges_validated(data, &outboard, ranges, &mut encoded).unwrap(); let expected_data = data; - let mut encoded_read = std::io::Cursor::new(encoded); - let size = read_len(&mut encoded_read).unwrap(); + let encoded_read = std::io::Cursor::new(encoded); let tree = BaoTree::new(size, outboard.tree.block_size); let iter = crate::io::sync::DecodeResponseIter::new(outboard.root, tree, encoded_read, ranges); for item in iter { @@ -585,6 +577,7 @@ async fn encode_decode_partial_fsm_impl( outboard: PostOrderMemOutboard, ranges: ChunkRanges, ) -> bool { + let size = outboard.tree.size; let mut encoded = Vec::new(); let mut outboard = outboard; crate::io::fsm::encode_ranges_validated( @@ -596,8 +589,7 @@ async fn encode_decode_partial_fsm_impl( .await .unwrap(); let expected_data = data; - let mut encoded_read = std::io::Cursor::new(encoded); - let size = encoded_read.read_u64_le().await.unwrap(); + let encoded_read = std::io::Cursor::new(encoded); let mut reading = crate::io::fsm::ResponseDecoder::new( outboard.root, ranges,