Skip to content

Commit

Permalink
Fix unsafe_op_in_unsafe_fn lint
Browse files Browse the repository at this point in the history
- Enable `unsafe_op_in_unsafe_fn` for all crates in workspace
- Fix some non-exhaustive safety invariants
- Start adding safety docs to internal unsafe
- Apply some other fmt/clippy inconsistencies noticed in rust 2024
  • Loading branch information
LDeakin committed Nov 28, 2024
1 parent 09d6aac commit 7da0d30
Show file tree
Hide file tree
Showing 20 changed files with 210 additions and 142 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
- Bump `zfp-sys` to 0.3.0

### Fixed
- Fix `unsafe_op_in_unsafe_fn` in lint

## [0.18.0] - 2024-11-23

### Announcements
Expand Down
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ unused_variables = "warn"
dead_code = "warn"
missing_docs = "warn"
unreachable_pub = "warn"
unsafe_op_in_unsafe_fn = "warn"

[workspace.lints.clippy]
pedantic = { level = "warn", priority = -1 }
Expand Down
33 changes: 17 additions & 16 deletions zarrs/src/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -785,22 +785,23 @@ impl<TStorage: ?Sized> Array<TStorage> {
/// # Errors
/// Returns a [`ArrayMetadataV2ToV3ConversionError`] if the metadata is not compatible with Zarr V3 metadata.
pub fn to_v3(self) -> Result<Self, ArrayMetadataV2ToV3ConversionError> {
if let ArrayMetadata::V2(metadata) = self.metadata {
let metadata: ArrayMetadata = array_metadata_v2_to_v3(&metadata)?.into();
Ok(Self {
storage: self.storage,
path: self.path,
data_type: self.data_type,
chunk_grid: self.chunk_grid,
chunk_key_encoding: self.chunk_key_encoding,
fill_value: self.fill_value,
codecs: self.codecs,
storage_transformers: self.storage_transformers,
dimension_names: self.dimension_names,
metadata,
})
} else {
Ok(self)
match self.metadata {
ArrayMetadata::V2(metadata) => {
let metadata: ArrayMetadata = array_metadata_v2_to_v3(&metadata)?.into();
Ok(Self {
storage: self.storage,
path: self.path,
data_type: self.data_type,
chunk_grid: self.chunk_grid,
chunk_key_encoding: self.chunk_key_encoding,
fill_value: self.fill_value,
codecs: self.codecs,
storage_transformers: self.storage_transformers,
dimension_names: self.dimension_names,
metadata,
})
}
ArrayMetadata::V3(_) => Ok(self),
}
}
}
Expand Down
66 changes: 37 additions & 29 deletions zarrs/src/array/array_async_readable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -360,25 +360,29 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits + 'static> Array<TStorage> {
if let Some(chunk_encoded) = chunk_encoded {
let chunk_encoded: Vec<u8> = chunk_encoded.into();
let chunk_representation = self.chunk_array_representation(chunk_indices)?;
self.codecs()
.decode_into(
Cow::Owned(chunk_encoded),
&chunk_representation,
unsafe {
self.codecs()
.decode_into(
Cow::Owned(chunk_encoded),
&chunk_representation,
output,
output_shape,
output_subset,
options,
)
.map_err(ArrayError::CodecError)
}
} else {
unsafe {
copy_fill_value_into(
self.data_type(),
self.fill_value(),
output,
output_shape,
output_subset,
options,
)
.map_err(ArrayError::CodecError)
} else {
copy_fill_value_into(
self.data_type(),
self.fill_value(),
output,
output_shape,
output_subset,
)
.map_err(ArrayError::CodecError)
}
}
}

Expand Down Expand Up @@ -790,14 +794,16 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits + 'static> Array<TStorage> {
&& chunk_subset.shape() == chunk_representation.shape_u64()
{
// Fast path if `chunk_subset` encompasses the whole chunk
self.async_retrieve_chunk_into(
chunk_indices,
output,
output_shape,
output_subset,
options,
)
.await
unsafe {
self.async_retrieve_chunk_into(
chunk_indices,
output,
output_shape,
output_subset,
options,
)
.await
}
} else {
let storage_handle = Arc::new(StorageHandle::new(self.storage.clone()));
let storage_transformer = self
Expand All @@ -809,13 +815,15 @@ impl<TStorage: ?Sized + AsyncReadableStorageTraits + 'static> Array<TStorage> {
self.chunk_key(chunk_indices),
));

Ok(self
.codecs
.clone()
.async_partial_decoder(input_handle, &chunk_representation, options)
.await?
.partial_decode_into(chunk_subset, output, output_shape, output_subset, options)
.await?)
unsafe {
self.codecs
.clone()
.async_partial_decoder(input_handle, &chunk_representation, options)
.await?
.partial_decode_into(chunk_subset, output, output_shape, output_subset, options)
.await?;
}
Ok(())
}
}

Expand Down
51 changes: 35 additions & 16 deletions zarrs/src/array/array_sync_readable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,24 +481,27 @@ impl<TStorage: ?Sized + ReadableStorageTraits + 'static> Array<TStorage> {
if let Some(chunk_encoded) = chunk_encoded {
let chunk_encoded: Vec<u8> = chunk_encoded.into();
let chunk_representation = self.chunk_array_representation(chunk_indices)?;
self.codecs()
.decode_into(
unsafe {
self.codecs().decode_into(
Cow::Owned(chunk_encoded),
&chunk_representation,
output,
output_shape,
output_subset,
options,
)
.map_err(ArrayError::CodecError)
}
.map_err(ArrayError::CodecError)
} else {
copy_fill_value_into(
self.data_type(),
self.fill_value(),
output,
output_shape,
output_subset,
)
unsafe {
copy_fill_value_into(
self.data_type(),
self.fill_value(),
output,
output_shape,
output_subset,
)
}
.map_err(ArrayError::CodecError)
}
}
Expand Down Expand Up @@ -845,7 +848,15 @@ impl<TStorage: ?Sized + ReadableStorageTraits + 'static> Array<TStorage> {
&& chunk_subset.shape() == chunk_representation.shape_u64()
{
// Fast path if `chunk_subset` encompasses the whole chunk
self.retrieve_chunk_into(chunk_indices, output, output_shape, output_subset, options)
unsafe {
self.retrieve_chunk_into(
chunk_indices,
output,
output_shape,
output_subset,
options,
)
}
} else {
let storage_handle = Arc::new(StorageHandle::new(self.storage.clone()));
let storage_transformer = self
Expand All @@ -856,11 +867,19 @@ impl<TStorage: ?Sized + ReadableStorageTraits + 'static> Array<TStorage> {
self.chunk_key(chunk_indices),
));

Ok(self
.codecs
.clone()
.partial_decoder(input_handle, &chunk_representation, options)?
.partial_decode_into(chunk_subset, output, output_shape, output_subset, options)?)
unsafe {
self.codecs
.clone()
.partial_decoder(input_handle, &chunk_representation, options)?
.partial_decode_into(
chunk_subset,
output,
output_shape,
output_subset,
options,
)?;
}
Ok(())
}
}

Expand Down
30 changes: 15 additions & 15 deletions zarrs/src/array/chunk_grid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
chunk_indices.len() == self.dimensionality()
&& array_shape.len() == self.dimensionality()
&& self.grid_shape(array_shape).is_ok_and(|chunk_grid_shape| {
chunk_grid_shape.map_or(false, |chunk_grid_shape| {
chunk_grid_shape.is_some_and(|chunk_grid_shape| {
std::iter::zip(chunk_indices, chunk_grid_shape)
.all(|(index, shape)| shape == 0 || *index < shape)
})
Expand All @@ -388,7 +388,7 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
/// See [`ChunkGridTraits::chunk_origin`].
///
/// # Safety
/// The length of `chunk_indices` must match the dimensionality of the chunk grid.
/// The length of `chunk_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn chunk_origin_unchecked(
&self,
chunk_indices: &[u64],
Expand All @@ -398,7 +398,7 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
/// See [`ChunkGridTraits::chunk_shape`].
///
/// # Safety
/// The length of `chunk_indices` must match the dimensionality of the chunk grid.
/// The length of `chunk_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn chunk_shape_unchecked(
&self,
chunk_indices: &[u64],
Expand All @@ -408,7 +408,7 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
/// See [`ChunkGridTraits::chunk_shape_u64`].
///
/// # Safety
/// The length of `chunk_indices` must match the dimensionality of the chunk grid.
/// The length of `chunk_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn chunk_shape_u64_unchecked(
&self,
chunk_indices: &[u64],
Expand All @@ -418,7 +418,7 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
/// See [`ChunkGridTraits::chunk_indices`].
///
/// # Safety
/// The length of `array_indices` must match the dimensionality of the chunk grid.
/// The length of `array_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn chunk_indices_unchecked(
&self,
array_indices: &[u64],
Expand All @@ -428,7 +428,7 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
/// See [`ChunkGridTraits::chunk_element_indices`].
///
/// # Safety
/// The length of `array_indices` must match the dimensionality of the chunk grid.
/// The length of `array_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn chunk_element_indices_unchecked(
&self,
array_indices: &[u64],
Expand All @@ -438,21 +438,21 @@ pub trait ChunkGridTraits: core::fmt::Debug + Send + Sync {
/// See [`ChunkGridTraits::subset`].
///
/// # Safety
/// The length of `chunk_indices` must match the dimensionality of the chunk grid.
/// The length of `chunk_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn subset_unchecked(
&self,
chunk_indices: &[u64],
array_shape: &[u64],
) -> Option<ArraySubset> {
debug_assert_eq!(self.dimensionality(), chunk_indices.len());
if let (Some(chunk_origin), Some(chunk_shape)) = (
self.chunk_origin_unchecked(chunk_indices, array_shape),
self.chunk_shape_u64_unchecked(chunk_indices, array_shape),
) {
Some(ArraySubset::new_with_start_shape_unchecked(
chunk_origin,
chunk_shape,
))
let chunk_origin =
// SAFETY: The length of `chunk_indices` and `array_shape` matches the dimensionality of the chunk grid
unsafe { self.chunk_origin_unchecked(chunk_indices, array_shape) };
let chunk_shape =
// SAFETY: The length of `chunk_indices` and `array_shape` matches the dimensionality of the chunk grid
unsafe { self.chunk_shape_u64_unchecked(chunk_indices, array_shape) };
if let (Some(chunk_origin), Some(chunk_shape)) = (chunk_origin, chunk_shape) {
Some(unsafe { ArraySubset::new_with_start_shape_unchecked(chunk_origin, chunk_shape) })
} else {
None
}
Expand Down
24 changes: 14 additions & 10 deletions zarrs/src/array/chunk_grid/rectangular.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,8 @@ impl ChunkGridTraits for RectangularChunkGrid {
RectangularChunkGridDimension::Varying(s) => {
let last_default = OffsetSize {
offset: 0,
size: NonZeroU64::new_unchecked(1),
// SAFETY: 1 is non-zero
size: unsafe { NonZeroU64::new_unchecked(1) },
};
let last = s.last().unwrap_or(&last_default);
if *array_shape == last.offset + last.size.get() {
Expand Down Expand Up @@ -222,7 +223,8 @@ impl ChunkGridTraits for RectangularChunkGrid {
RectangularChunkGridDimension::Varying(offsets_sizes) => {
let last_default = OffsetSize {
offset: 0,
size: NonZeroU64::new_unchecked(1),
// SAFETY: 1 is non-zero
size: unsafe { NonZeroU64::new_unchecked(1) },
};
let last = offsets_sizes.last().unwrap_or(&last_default);
if *index < last.offset + last.size.get() {
Expand All @@ -242,19 +244,21 @@ impl ChunkGridTraits for RectangularChunkGrid {
.collect()
}

/// # Safety
/// The length of `array_indices` and `array_shape` must match the dimensionality of the chunk grid.
unsafe fn chunk_element_indices_unchecked(
&self,
array_indices: &[u64],
array_shape: &[u64],
) -> Option<ArrayIndices> {
let chunk_indices = self.chunk_indices_unchecked(array_indices, array_shape);
let chunk_indices = unsafe { self.chunk_indices_unchecked(array_indices, array_shape) };
chunk_indices.and_then(|chunk_indices| {
self.chunk_origin_unchecked(&chunk_indices, array_shape)
.map(|chunk_start| {
std::iter::zip(array_indices, &chunk_start)
.map(|(i, s)| i - s)
.collect()
})
// SAFETY: The length of chunk_indices matches the dimensionality of the chunk grid
unsafe { self.chunk_origin_unchecked(&chunk_indices, array_shape) }.map(|chunk_start| {
std::iter::zip(array_indices, &chunk_start)
.map(|(i, s)| i - s)
.collect()
})
})
}

Expand All @@ -268,7 +272,7 @@ impl ChunkGridTraits for RectangularChunkGrid {
RectangularChunkGridDimension::Fixed(_) => true,
RectangularChunkGridDimension::Varying(offsets_sizes) => offsets_sizes
.last()
.map_or(false, |last| *array_index < last.offset + last.size.get()),
.is_some_and(|last| *array_index < last.offset + last.size.get()),
}
},
)
Expand Down
2 changes: 1 addition & 1 deletion zarrs/src/array/codec/array_to_bytes/bytes/bytes_codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ impl BytesCodec {
return Err(CodecError::UnsupportedDataType(
decoded_representation.data_type().clone(),
super::IDENTIFIER.to_string(),
))
));
}
DataTypeSize::Fixed(data_type_size) => {
let array_size = decoded_representation.num_elements() * data_type_size as u64;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl ArrayPartialDecoderTraits for BytesPartialDecoder<'_> {
return Err(CodecError::UnsupportedDataType(
self.data_type().clone(),
super::IDENTIFIER.to_string(),
))
));
}
DataTypeSize::Fixed(data_type_size) => {
// Get byte ranges
Expand Down Expand Up @@ -166,7 +166,7 @@ impl AsyncArrayPartialDecoderTraits for AsyncBytesPartialDecoder {
return Err(CodecError::UnsupportedDataType(
self.data_type().clone(),
super::IDENTIFIER.to_string(),
))
));
}
DataTypeSize::Fixed(data_type_size) => array_subset
.byte_ranges(&chunk_shape, data_type_size)
Expand Down
Loading

0 comments on commit 7da0d30

Please sign in to comment.