Skip to content

Commit 66515f1

Browse files
authored
Merge pull request #213 from mulimoen/bugfix/write-scalar
Add check for chunking of dataset for filters
2 parents 1d60252 + 0194da8 commit 66515f1

File tree

4 files changed

+26
-3
lines changed

4 files changed

+26
-3
lines changed

CHANGELOG.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
- The `H5Type` derive macro now uses `proc-macro-error` to emit error messages.
1717
- MSRV is now `1.64.0` and Rust edition has now been bumped to 2021.
18-
- Types in ChunkInfo has been changed to match HDF5
18+
- Types in ChunkInfo has been changed to match HDF5.
1919

2020
### Fixed
2121

@@ -25,6 +25,7 @@
2525
- Fixed a bug where errors were only silenced on the main thread.
2626
- Fixed a memory leak when opening datasets.
2727
- Avoid creating unaligned references in `H5Type` derive macro.
28+
- Applying filters without chunking will now produce an explicit error.
2829

2930
## 0.8.1
3031

hdf5/src/hl/dataset.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1066,4 +1066,22 @@ mod tests {
10661066
let e = SimpleExtents::new(&[1, 1, 100]);
10671067
assert_eq!(compute_chunk_shape(&e, 51), vec![1, 1, 100]);
10681068
}
1069+
1070+
#[test]
1071+
fn test_read_write_scalar() {
1072+
use crate::internal_prelude::*;
1073+
with_tmp_file(|file| {
1074+
if !crate::filters::deflate_available() {
1075+
return;
1076+
}
1077+
let val: f64 = 0.2;
1078+
let dataset = file.new_dataset::<f64>().deflate(3).create("foo");
1079+
assert_err_re!(dataset, "Filter requires dataset to be chunked");
1080+
1081+
let dataset = file.new_dataset::<f64>().create("foo").unwrap();
1082+
dataset.write_scalar(&val).unwrap();
1083+
let val_back = dataset.read_scalar().unwrap();
1084+
assert_eq!(val, val_back);
1085+
})
1086+
}
10691087
}

hdf5/src/hl/filters.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -519,9 +519,9 @@ impl Filter {
519519
}
520520
}
521521

522-
pub(crate) fn validate_filters(filters: &[Filter], type_class: H5T_class_t) -> Result<()> {
523-
const COMP_FILTER_IDS: &[H5Z_filter_t] = &[H5Z_FILTER_DEFLATE, H5Z_FILTER_SZIP, 32000, 32001];
522+
const COMP_FILTER_IDS: &[H5Z_filter_t] = &[H5Z_FILTER_DEFLATE, H5Z_FILTER_SZIP, 32000, 32001];
524523

524+
pub(crate) fn validate_filters(filters: &[Filter], type_class: H5T_class_t) -> Result<()> {
525525
let mut map: HashMap<H5Z_filter_t, &Filter> = HashMap::new();
526526
let mut comp_filter: Option<&Filter> = None;
527527

@@ -627,6 +627,7 @@ mod tests {
627627

628628
let mut b = DatasetCreate::build();
629629
b.set_filters(&pipeline);
630+
b.chunk(10);
630631
let plist = b.finish()?;
631632
assert_eq!(Filter::extract_pipeline(plist.id())?, pipeline);
632633

hdf5/src/hl/plist/dataset_create.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -582,6 +582,9 @@ impl DatasetCreateBuilder {
582582
}
583583

584584
fn populate_plist(&self, id: hid_t) -> Result<()> {
585+
if !self.filters.is_empty() {
586+
ensure!(self.chunk.is_some(), "Filter requires dataset to be chunked");
587+
}
585588
for filter in &self.filters {
586589
filter.apply_to_plist(id)?;
587590
}

0 commit comments

Comments
 (0)