Skip to content

Commit

Permalink
Array retrieve_ methods now return Vec<> instead of Box<[]>
Browse files Browse the repository at this point in the history
  • Loading branch information
LDeakin committed Jan 20, 2024
1 parent d08767d commit 3023a5c
Show file tree
Hide file tree
Showing 7 changed files with 53 additions and 68 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

### Changed
- **Breaking**: `Array` `retrieve_` methods now return `Vec<u8>`/`Vec<T>` instead of `Box<[u8]>`/`Box<[T]>`
- This avoids potential internal reallocations

## [0.10.0] - 2024-01-17

### Changed
Expand Down
1 change: 0 additions & 1 deletion src/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -832,7 +832,6 @@ mod tests {
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, // 6
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, // 7
]
.into()
);
}

Expand Down
48 changes: 19 additions & 29 deletions src/array/array_async_readable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,11 @@ macro_rules! array_async_retrieve_elements {
bytes.len() / core::mem::size_of::<T>(),
bytes.len(),
)
}
.into_boxed_slice())
})
} else {
let elements = safe_transmute::transmute_many_permissive::<T>(&bytes)
.map_err(TransmuteError::from)?
.to_vec()
.into_boxed_slice();
.to_vec();
Ok(elements)
}
}
Expand All @@ -66,7 +64,7 @@ macro_rules! array_async_retrieve_ndarray {
let length = elements.len();
ndarray::ArrayD::<T>::from_shape_vec(
super::iter_u64_to_usize($shape.iter()),
elements.into_vec(),
elements,
)
.map_err(|_| {
ArrayError::CodecError(crate::array::codec::CodecError::UnexpectedChunkDecodedSize(
Expand Down Expand Up @@ -104,10 +102,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
///
/// # Panics
/// Panics if the number of elements in the chunk exceeds `usize::MAX`.
pub async fn async_retrieve_chunk(
&self,
chunk_indices: &[u64],
) -> Result<Box<[u8]>, ArrayError> {
pub async fn async_retrieve_chunk(&self, chunk_indices: &[u64]) -> Result<Vec<u8>, ArrayError> {
let storage_handle = Arc::new(StorageHandle::new(&*self.storage));
let storage_transformer = self
.storage_transformers()
Expand All @@ -130,7 +125,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
let chunk_decoded_size =
chunk_representation.num_elements_usize() * chunk_representation.data_type().size();
if chunk_decoded.len() == chunk_decoded_size {
Ok(chunk_decoded.into_boxed_slice())
Ok(chunk_decoded)
} else {
Err(ArrayError::UnexpectedChunkDecodedSize(
chunk_decoded.len(),
Expand All @@ -139,9 +134,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
}
} else {
let fill_value = chunk_representation.fill_value().as_ne_bytes();
Ok(fill_value
.repeat(chunk_representation.num_elements_usize())
.into_boxed_slice())
Ok(fill_value.repeat(chunk_representation.num_elements_usize()))
}
}

Expand All @@ -157,7 +150,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
pub async fn async_retrieve_chunk_elements<T: TriviallyTransmutable + Send + Sync>(
&self,
chunk_indices: &[u64],
) -> Result<Box<[T]>, ArrayError> {
) -> Result<Vec<T>, ArrayError> {
array_async_retrieve_elements!(self, async_retrieve_chunk(chunk_indices))
}

Expand Down Expand Up @@ -195,10 +188,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
///
/// # Panics
/// Panics if the number of elements in the chunk exceeds `usize::MAX`.
pub async fn async_retrieve_chunks(
&self,
chunks: &ArraySubset,
) -> Result<Box<[u8]>, ArrayError> {
pub async fn async_retrieve_chunks(&self, chunks: &ArraySubset) -> Result<Vec<u8>, ArrayError> {
if chunks.dimensionality() != self.chunk_grid().dimensionality() {
return Err(ArrayError::InvalidArraySubset(
chunks.clone(),
Expand All @@ -211,7 +201,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
// Retrieve chunk bytes
let num_chunks = chunks.num_elements();
match num_chunks {
0 => Ok(vec![].into_boxed_slice()),
0 => Ok(vec![]),
1 => {
let chunk_indices = chunks.start();
self.async_retrieve_chunk(chunk_indices).await
Expand Down Expand Up @@ -242,7 +232,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
}
#[allow(clippy::transmute_undefined_repr)]
let output: Vec<u8> = unsafe { core::mem::transmute(output) };
Ok(output.into_boxed_slice())
Ok(output)
}
}
}
Expand All @@ -254,7 +244,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
pub async fn async_retrieve_chunks_elements<T: TriviallyTransmutable + Send + Sync>(
&self,
chunks: &ArraySubset,
) -> Result<Box<[T]>, ArrayError> {
) -> Result<Vec<T>, ArrayError> {
array_async_retrieve_elements!(self, async_retrieve_chunks(chunks))
}

Expand Down Expand Up @@ -337,7 +327,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
pub async fn async_retrieve_array_subset(
&self,
array_subset: &ArraySubset,
) -> Result<Box<[u8]>, ArrayError> {
) -> Result<Vec<u8>, ArrayError> {
if array_subset.dimensionality() != self.chunk_grid().dimensionality() {
return Err(ArrayError::InvalidArraySubset(
array_subset.clone(),
Expand All @@ -357,7 +347,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
// Retrieve chunk bytes
let num_chunks = chunks.num_elements();
match num_chunks {
0 => Ok(vec![].into_boxed_slice()),
0 => Ok(vec![]),
1 => {
let chunk_indices = chunks.start();
let chunk_subset = self.chunk_subset(chunk_indices).unwrap();
Expand All @@ -384,7 +374,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
.await?;
#[allow(clippy::transmute_undefined_repr)]
let output: Vec<u8> = unsafe { core::mem::transmute(output) };
Ok(output.into_boxed_slice())
Ok(output)
}
}
_ => {
Expand Down Expand Up @@ -481,7 +471,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
}
#[allow(clippy::transmute_undefined_repr)]
let output: Vec<u8> = unsafe { core::mem::transmute(output) };
Ok(output.into_boxed_slice())
Ok(output)
}
}
}
Expand All @@ -498,7 +488,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
pub async fn async_retrieve_array_subset_elements<T: TriviallyTransmutable + Send + Sync>(
&self,
array_subset: &ArraySubset,
) -> Result<Box<[T]>, ArrayError> {
) -> Result<Vec<T>, ArrayError> {
array_async_retrieve_elements!(self, async_retrieve_array_subset(array_subset))
}

Expand Down Expand Up @@ -539,7 +529,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
&self,
chunk_indices: &[u64],
chunk_subset: &ArraySubset,
) -> Result<Box<[u8]>, ArrayError> {
) -> Result<Vec<u8>, ArrayError> {
let chunk_representation = self.chunk_array_representation(chunk_indices)?;
if !chunk_subset.inbounds(chunk_representation.shape()) {
return Err(ArrayError::InvalidArraySubset(
Expand Down Expand Up @@ -567,7 +557,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
let total_size = decoded_bytes.iter().map(Vec::len).sum::<usize>();
let expected_size = chunk_subset.num_elements_usize() * self.data_type().size();
if total_size == chunk_subset.num_elements_usize() * self.data_type().size() {
Ok(decoded_bytes.concat().into_boxed_slice())
Ok(decoded_bytes.concat())
} else {
Err(ArrayError::UnexpectedChunkDecodedSize(
total_size,
Expand All @@ -588,7 +578,7 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits> Array<TStorage> {
&self,
chunk_indices: &[u64],
chunk_subset: &ArraySubset,
) -> Result<Box<[T]>, ArrayError> {
) -> Result<Vec<T>, ArrayError> {
array_async_retrieve_elements!(
self,
async_retrieve_chunk_subset(chunk_indices, chunk_subset)
Expand Down
3 changes: 1 addition & 2 deletions src/array/array_async_readable_writable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,8 +244,7 @@ impl<TStorage: ?Sized + AsyncReadableWritableStorageTraits> Array<TStorage> {
}

// Store the updated chunk
self.async_store_chunk(chunk_indices, chunk_bytes.into_vec())
.await
self.async_store_chunk(chunk_indices, chunk_bytes).await
}
} else {
Err(ArrayError::InvalidChunkGridIndicesError(
Expand Down
Loading

0 comments on commit 3023a5c

Please sign in to comment.