diff --git a/lib/iris/tests/integration/netcdf/test_attributes.py b/lib/iris/tests/integration/netcdf/test_attributes.py
index ca32060c22..b24538079f 100644
--- a/lib/iris/tests/integration/netcdf/test_attributes.py
+++ b/lib/iris/tests/integration/netcdf/test_attributes.py
@@ -4,12 +4,7 @@
# See LICENSE in the root of the repository for full licensing details.
"""Integration tests for attribute-related loading and saving netcdf files."""
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests # isort:skip
-
from contextlib import contextmanager
-from unittest import mock
from cf_units import Unit
import pytest
@@ -17,21 +12,22 @@
import iris
from iris.cube import Cube, CubeList
from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION
+from iris.tests import _shared_utils
-class TestUmVersionAttribute(tests.IrisTest):
- def test_single_saves_as_global(self):
+class TestUmVersionAttribute:
+ def test_single_saves_as_global(self, tmp_path, request):
cube = Cube(
[1.0],
standard_name="air_temperature",
units="K",
attributes={"um_version": "4.3"},
)
- with self.temp_filename(".nc") as nc_path:
- iris.save(cube, nc_path)
- self.assertCDL(nc_path)
+ nc_path = tmp_path / "test.nc"
+ iris.save(cube, nc_path)
+ _shared_utils.assert_CDL(request, nc_path)
- def test_multiple_same_saves_as_global(self):
+ def test_multiple_same_saves_as_global(self, tmp_path, request):
cube_a = Cube(
[1.0],
standard_name="air_temperature",
@@ -44,11 +40,11 @@ def test_multiple_same_saves_as_global(self):
units="hPa",
attributes={"um_version": "4.3"},
)
- with self.temp_filename(".nc") as nc_path:
- iris.save(CubeList([cube_a, cube_b]), nc_path)
- self.assertCDL(nc_path)
+ nc_path = tmp_path / "test.nc"
+ iris.save(CubeList([cube_a, cube_b]), nc_path)
+ _shared_utils.assert_CDL(request, nc_path)
- def test_multiple_different_saves_on_variables(self):
+ def test_multiple_different_saves_on_variables(self, tmp_path, request):
cube_a = Cube(
[1.0],
standard_name="air_temperature",
@@ -61,19 +57,19 @@ def test_multiple_different_saves_on_variables(self):
units="hPa",
attributes={"um_version": "4.4"},
)
- with self.temp_filename(".nc") as nc_path:
- iris.save(CubeList([cube_a, cube_b]), nc_path)
- self.assertCDL(nc_path)
+ nc_path = tmp_path / "test.nc"
+ iris.save(CubeList([cube_a, cube_b]), nc_path)
+ _shared_utils.assert_CDL(request, nc_path)
@contextmanager
-def _patch_site_configuration():
+def _patch_site_configuration(mocker):
def cf_patch_conventions(conventions):
return ", ".join([conventions, "convention1, convention2"])
def update(config):
- config["cf_profile"] = mock.Mock(name="cf_profile")
- config["cf_patch"] = mock.Mock(name="cf_patch")
+ config["cf_profile"] = mocker.Mock(name="cf_profile")
+ config["cf_patch"] = mocker.Mock(name="cf_patch")
config["cf_patch_conventions"] = cf_patch_conventions
orig_site_config = iris.site_configuration.copy()
@@ -82,8 +78,8 @@ def update(config):
iris.site_configuration = orig_site_config
-class TestConventionsAttributes(tests.IrisTest):
- def test_patching_conventions_attribute(self):
+class TestConventionsAttributes:
+ def test_patching_conventions_attribute(self, tmp_path, mocker):
# Ensure that user defined conventions are wiped and those which are
# saved patched through site_config can be loaded without an exception
# being raised.
@@ -95,24 +91,24 @@ def test_patching_conventions_attribute(self):
)
# Patch the site configuration dictionary.
- with _patch_site_configuration(), self.temp_filename(".nc") as nc_path:
+ nc_path = tmp_path / "test.nc"
+ with _patch_site_configuration(mocker):
iris.save(cube, nc_path)
res = iris.load_cube(nc_path)
- self.assertEqual(
- res.attributes["Conventions"],
- "{}, {}, {}".format(CF_CONVENTIONS_VERSION, "convention1", "convention2"),
+ assert res.attributes["Conventions"] == "{}, {}, {}".format(
+ CF_CONVENTIONS_VERSION, "convention1", "convention2"
)
-class TestStandardName(tests.IrisTest):
- def test_standard_name_roundtrip(self):
+class TestStandardName:
+ def test_standard_name_roundtrip(self, tmp_path):
standard_name = "air_temperature detection_minimum"
cube = iris.cube.Cube(1, standard_name=standard_name)
- with self.temp_filename(suffix=".nc") as fout:
- iris.save(cube, fout)
- detection_limit_cube = iris.load_cube(fout)
- self.assertEqual(detection_limit_cube.standard_name, standard_name)
+ fout = tmp_path / "standard_name.nc"
+ iris.save(cube, fout)
+ detection_limit_cube = iris.load_cube(fout)
+ assert detection_limit_cube.standard_name == standard_name
class TestCalendar:
@@ -126,7 +122,3 @@ def test_calendar_roundtrip(self, tmp_path):
iris.save(self.cube, fout)
detection_limit_cube = iris.load_cube(fout)
assert detection_limit_cube.units == self.calendar
-
-
-if __name__ == "__main__":
- tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_aux_factories.py b/lib/iris/tests/integration/netcdf/test_aux_factories.py
index 4b4976bd18..20686045b6 100644
--- a/lib/iris/tests/integration/netcdf/test_aux_factories.py
+++ b/lib/iris/tests/integration/netcdf/test_aux_factories.py
@@ -4,17 +4,17 @@
# See LICENSE in the root of the repository for full licensing details.
"""Integration tests for aux-factory-related loading and saving netcdf files."""
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests # isort:skip
+import pytest
import iris
+from iris.tests import _shared_utils
from iris.tests import stock as stock
-@tests.skip_data
-class TestAtmosphereSigma(tests.IrisTest):
- def setUp(self):
+@_shared_utils.skip_data
+class TestAtmosphereSigma:
+ @pytest.fixture(autouse=True)
+ def _setup(self):
# Modify stock cube so it is suitable to have a atmosphere sigma
# factory added to it.
cube = stock.realistic_4d_no_derived()
@@ -33,23 +33,24 @@ def setUp(self):
cube.add_aux_factory(factory)
self.cube = cube
- def test_save(self):
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(self.cube, filename)
- self.assertCDL(filename)
+ def test_save(self, request, tmp_path):
+ filename = tmp_path / "fn.nc"
+ iris.save(self.cube, filename)
+ _shared_utils.assert_CDL(request, filename)
- def test_save_load_loop(self):
+ def test_save_load_loop(self, tmp_path):
# Ensure that the AtmosphereSigmaFactory is automatically loaded
# when loading the file.
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(self.cube, filename)
- cube = iris.load_cube(filename, "air_potential_temperature")
- assert cube.coords("air_pressure")
+ filename = tmp_path / "fn.nc"
+ iris.save(self.cube, filename)
+ cube = iris.load_cube(filename, "air_potential_temperature")
+ assert cube.coords("air_pressure")
-@tests.skip_data
-class TestHybridPressure(tests.IrisTest):
- def setUp(self):
+@_shared_utils.skip_data
+class TestHybridPressure:
+ @pytest.fixture(autouse=True)
+ def _setup(self):
# Modify stock cube so it is suitable to have a
# hybrid pressure factory added to it.
cube = stock.realistic_4d_no_derived()
@@ -66,29 +67,27 @@ def setUp(self):
cube.add_aux_factory(factory)
self.cube = cube
- def test_save(self):
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(self.cube, filename)
- self.assertCDL(filename)
+ def test_save(self, request, tmp_path):
+ filename = tmp_path / "fn.nc"
+ iris.save(self.cube, filename)
+ _shared_utils.assert_CDL(request, filename)
- def test_save_load_loop(self):
+ def test_save_load_loop(self, tmp_path):
# Tests an issue where the variable names in the formula
# terms changed to the standard_names instead of the variable names
# when loading a previously saved cube.
- with (
- self.temp_filename(suffix=".nc") as filename,
- self.temp_filename(suffix=".nc") as other_filename,
- ):
- iris.save(self.cube, filename)
- cube = iris.load_cube(filename, "air_potential_temperature")
- iris.save(cube, other_filename)
- other_cube = iris.load_cube(other_filename, "air_potential_temperature")
- self.assertEqual(cube, other_cube)
-
-
-@tests.skip_data
-class TestSaveMultipleAuxFactories(tests.IrisTest):
- def test_hybrid_height_and_pressure(self):
+ filename = tmp_path / "fn.nc"
+ other_filename = tmp_path / "ofn.nc"
+ iris.save(self.cube, filename)
+ cube = iris.load_cube(filename, "air_potential_temperature")
+ iris.save(cube, other_filename)
+ other_cube = iris.load_cube(other_filename, "air_potential_temperature")
+ assert cube == other_cube
+
+
+@_shared_utils.skip_data
+class TestSaveMultipleAuxFactories:
+ def test_hybrid_height_and_pressure(self, request, tmp_path):
cube = stock.realistic_4d()
cube.add_aux_coord(
iris.coords.DimCoord(1200.0, long_name="level_pressure", units="hPa")
@@ -105,11 +104,11 @@ def test_hybrid_height_and_pressure(self):
cube.coord("surface_air_pressure"),
)
cube.add_aux_factory(factory)
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(cube, filename)
- self.assertCDL(filename)
+ filename = tmp_path / "fn.nc"
+ iris.save(cube, filename)
+ _shared_utils.assert_CDL(request, filename)
- def test_shared_primary(self):
+ def test_shared_primary(self, tmp_path):
cube = stock.realistic_4d()
factory = iris.aux_factory.HybridHeightFactory(
cube.coord("level_height"),
@@ -118,37 +117,29 @@ def test_shared_primary(self):
)
factory.rename("another altitude")
cube.add_aux_factory(factory)
- with (
- self.temp_filename(suffix=".nc") as filename,
- self.assertRaisesRegex(ValueError, "multiple aux factories"),
- ):
+ filename = tmp_path / "fn.nc"
+ with pytest.raises(ValueError, match="multiple aux factories"):
iris.save(cube, filename)
- def test_hybrid_height_cubes(self):
+ def test_hybrid_height_cubes(self, request, tmp_path):
hh1 = stock.simple_4d_with_hybrid_height()
hh1.attributes["cube"] = "hh1"
hh2 = stock.simple_4d_with_hybrid_height()
hh2.attributes["cube"] = "hh2"
sa = hh2.coord("surface_altitude")
sa.points = sa.points * 10
- with self.temp_filename(".nc") as fname:
- iris.save([hh1, hh2], fname)
- cubes = iris.load(fname, "air_temperature")
- cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"])
- self.assertCML(cubes)
+ filename = tmp_path / "fn.nc"
+ iris.save([hh1, hh2], filename)
+ cubes = iris.load(filename, "air_temperature")
+ cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"])
+ _shared_utils.assert_CML(request, cubes)
- def test_hybrid_height_cubes_on_dimension_coordinate(self):
+ def test_hybrid_height_cubes_on_dimension_coordinate(self, tmp_path):
hh1 = stock.hybrid_height()
hh2 = stock.hybrid_height()
sa = hh2.coord("surface_altitude")
sa.points = sa.points * 10
emsg = "Unable to create dimensonless vertical coordinate."
- with (
- self.temp_filename(".nc") as fname,
- self.assertRaisesRegex(ValueError, emsg),
- ):
- iris.save([hh1, hh2], fname)
-
-
-if __name__ == "__main__":
- tests.main()
+ filename = tmp_path / "fn.nc"
+ with pytest.raises(ValueError, match=emsg):
+ iris.save([hh1, hh2], filename)
diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py
index 9f4f272dd7..aa7b715912 100644
--- a/lib/iris/tests/integration/netcdf/test_coord_systems.py
+++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py
@@ -4,10 +4,6 @@
# See LICENSE in the root of the repository for full licensing details.
"""Integration tests for coord-system-related loading and saving netcdf files."""
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests # isort:skip
-
import warnings
import numpy as np
@@ -16,8 +12,8 @@
import iris
from iris.coords import DimCoord
from iris.cube import Cube
+from iris.tests import _shared_utils
from iris.tests import stock as stock
-from iris.tests._shared_utils import assert_CML
from iris.tests.stock.netcdf import ncgen_from_cdl
from iris.tests.unit.fileformats.netcdf.loader import test_load_cubes as tlc
@@ -172,7 +168,7 @@ def multi_cs_osgb_wkt():
"""
-@tests.skip_data
+@_shared_utils.skip_data
class TestCoordSystem:
@pytest.fixture(autouse=True)
def _setup(self):
@@ -182,11 +178,11 @@ def _setup(self):
def test_load_laea_grid(self, request):
cube = iris.load_cube(
- tests.get_data_path(
+ _shared_utils.get_data_path(
("NetCDF", "lambert_azimuthal_equal_area", "euro_air_temp.nc")
)
)
- assert_CML(request, cube, ("netcdf", "netcdf_laea.cml"))
+ _shared_utils.assert_CML(request, cube, ("netcdf", "netcdf_laea.cml"))
def test_load_datum_wkt(self, datum_wkt_cdl):
expected = "OSGB 1936"
diff --git a/lib/iris/tests/integration/netcdf/test_dataless.py b/lib/iris/tests/integration/netcdf/test_dataless.py
index 442777ce18..d254149039 100644
--- a/lib/iris/tests/integration/netcdf/test_dataless.py
+++ b/lib/iris/tests/integration/netcdf/test_dataless.py
@@ -16,7 +16,7 @@
class TestDataless:
@pytest.fixture(autouse=True)
- def setup(self, tmp_path_factory):
+ def _setup(self, tmp_path_factory):
ny, nx = 3, 4
self.testcube = Cube(
shape=(ny, nx),
diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py
index 4b8a5cbe5c..6473266e6f 100644
--- a/lib/iris/tests/integration/netcdf/test_delayed_save.py
+++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py
@@ -15,7 +15,7 @@
import iris
from iris.fileformats.netcdf._thread_safe_nc import default_fillvals
-import iris.tests
+from iris.tests import _shared_utils
from iris.tests.stock import realistic_4d
@@ -38,7 +38,7 @@ def all_saves_with_split_attrs(self):
def output_path(self, tmp_path):
# A temporary output netcdf-file path, **unique to each test call**.
self.temp_output_filepath = tmp_path / "tmp.nc"
- yield self.temp_output_filepath
+ return self.temp_output_filepath
@pytest.fixture(autouse=True, scope="module")
def all_vars_lazy(self):
@@ -118,8 +118,8 @@ def fix_array(array):
cube.add_cell_measure(cm, cm_dims)
return cube
- def test_realfile_loadsave_equivalence(self, save_is_delayed, output_path):
- input_filepath = iris.tests.get_data_path(
+ def test_realfile_loadsave_equivalence(self, save_is_delayed, output_path, request):
+ input_filepath = _shared_utils.get_data_path(
["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"]
)
original_cubes = iris.load(input_filepath)
diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py
index 21afb6bc8b..fa544a1d67 100644
--- a/lib/iris/tests/integration/netcdf/test_general.py
+++ b/lib/iris/tests/integration/netcdf/test_general.py
@@ -4,16 +4,9 @@
# See LICENSE in the root of the repository for full licensing details.
"""Integration tests for loading and saving netcdf files."""
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests # isort:skip
-
from itertools import repeat
import os.path
from pathlib import Path
-import shutil
-import tempfile
-from unittest import mock
import warnings
import dask
@@ -32,6 +25,7 @@
# netCDF4" check in "iris.tests.test_coding_standards.test_netcdf4_import()".
import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc
from iris.loading import LOAD_PROBLEMS
+from iris.tests import _shared_utils
import iris.warnings
nc = threadsafe_nc.netCDF4
@@ -39,49 +33,50 @@
from iris.tests.stock.netcdf import ncgen_from_cdl
-class TestLazySave(tests.IrisTest):
- @tests.skip_data
- def test_lazy_preserved_save(self):
- fpath = tests.get_data_path(
+class TestLazySave:
+ @_shared_utils.skip_data
+ def test_lazy_preserved_save(self, mocker, tmp_path):
+ fpath = _shared_utils.get_data_path(
("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc")
)
# While loading, "turn off" loading small variables as real data.
- with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0):
- acube = iris.load_cube(fpath, "air_temperature")
- self.assertTrue(acube.has_lazy_data())
+ mocker.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0)
+ acube = iris.load_cube(fpath, "air_temperature")
+ assert acube.has_lazy_data()
# Also check a coord with lazy points + bounds.
- self.assertTrue(acube.coord("forecast_period").has_lazy_points())
- self.assertTrue(acube.coord("forecast_period").has_lazy_bounds())
- with self.temp_filename(".nc") as nc_path:
- with Saver(nc_path, "NETCDF4") as saver:
- saver.write(acube)
+ assert acube.coord("forecast_period").has_lazy_points()
+ assert acube.coord("forecast_period").has_lazy_bounds()
+ nc_path = tmp_path / "tmp.nc"
+ with Saver(nc_path, "NETCDF4") as saver:
+ saver.write(acube)
# Check that cube data is not realised, also coord points + bounds.
- self.assertTrue(acube.has_lazy_data())
- self.assertTrue(acube.coord("forecast_period").has_lazy_points())
- self.assertTrue(acube.coord("forecast_period").has_lazy_bounds())
+ assert acube.has_lazy_data()
+ assert acube.coord("forecast_period").has_lazy_points()
+ assert acube.coord("forecast_period").has_lazy_bounds()
-@tests.skip_data
-class TestCellMeasures(tests.IrisTest):
- def setUp(self):
- self.fname = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc"))
+@_shared_utils.skip_data
+class TestCellMeasures:
+ @pytest.fixture(autouse=True)
+ def _setup(self):
+ self.fname = _shared_utils.get_data_path(("NetCDF", "ORCA2", "votemper.nc"))
def test_load_raw(self):
(cube,) = iris.load_raw(self.fname)
- self.assertEqual(len(cube.cell_measures()), 1)
- self.assertEqual(cube.cell_measures()[0].measure, "area")
+ assert len(cube.cell_measures()) == 1
+ assert cube.cell_measures()[0].measure == "area"
def test_load(self):
cube = iris.load_cube(self.fname)
- self.assertEqual(len(cube.cell_measures()), 1)
- self.assertEqual(cube.cell_measures()[0].measure, "area")
+ assert len(cube.cell_measures()) == 1
+ assert cube.cell_measures()[0].measure == "area"
def test_merge_cell_measure_aware(self):
(cube1,) = iris.load_raw(self.fname)
(cube2,) = iris.load_raw(self.fname)
cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
cubes = CubeList([cube1, cube2]).merge()
- self.assertEqual(len(cubes), 2)
+ assert len(cubes) == 2
def test_concatenate_cell_measure_aware(self):
(cube1,) = iris.load_raw(self.fname)
@@ -92,8 +87,8 @@ def test_concatenate_cell_measure_aware(self):
cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
cube2.coord("time").points = cube2.coord("time").points + 1
cubes = CubeList([cube1, cube2]).concatenate()
- self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims)
- self.assertEqual(len(cubes), 2)
+ assert cubes[0]._cell_measures_and_dims == cm_and_dims
+ assert len(cubes) == 2
def test_concatenate_cell_measure_match(self):
(cube1,) = iris.load_raw(self.fname)
@@ -103,55 +98,47 @@ def test_concatenate_cell_measure_match(self):
cube2 = cube2[:, :, 0, 0]
cube2.coord("time").points = cube2.coord("time").points + 1
cubes = CubeList([cube1, cube2]).concatenate()
- self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims)
- self.assertEqual(len(cubes), 1)
+ assert cubes[0]._cell_measures_and_dims == cm_and_dims
+ assert len(cubes) == 1
- def test_round_trip(self):
+ def test_round_trip(self, tmp_path):
(cube,) = iris.load(self.fname)
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(cube, filename, unlimited_dimensions=[])
- (round_cube,) = iris.load_raw(filename)
- self.assertEqual(len(round_cube.cell_measures()), 1)
- self.assertEqual(round_cube.cell_measures()[0].measure, "area")
+ filename = tmp_path / "tmp.nc"
+ iris.save(cube, filename, unlimited_dimensions=[])
+ (round_cube,) = iris.load_raw(filename)
+ assert len(round_cube.cell_measures()) == 1
+ assert round_cube.cell_measures()[0].measure == "area"
def test_print(self):
cube = iris.load_cube(self.fname)
printed = cube.__str__()
- self.assertIn(
- (
- "Cell measures:\n"
- " cell_area - - "
- " x x"
- ),
- printed,
- )
+ assert (
+ "Cell measures:\n"
+ " cell_area - - "
+ " x x"
+ ) in printed
-class TestCellMethod_unknown(tests.IrisTest):
- def test_unknown_method(self):
+class TestCellMethod_unknown:
+ def test_unknown_method(self, tmp_path_factory):
cube = Cube([1, 2], long_name="odd_phenomenon")
cube.add_cell_method(CellMethod(method="oddity", coords=("x",)))
- temp_dirpath = tempfile.mkdtemp()
- try:
- temp_filepath = os.path.join(temp_dirpath, "tmp.nc")
- iris.save(cube, temp_filepath)
- with warnings.catch_warnings(record=True) as warning_records:
- iris.load(temp_filepath)
- # Filter to get the warning we are interested in.
- warning_messages = [record.message for record in warning_records]
- warning_messages = [
- warn
- for warn in warning_messages
- if isinstance(warn, iris.warnings.IrisUnknownCellMethodWarning)
- ]
- self.assertEqual(len(warning_messages), 1)
- message = warning_messages[0].args[0]
- msg = (
- "NetCDF variable 'odd_phenomenon' contains unknown cell method 'oddity'"
- )
- self.assertIn(msg, message)
- finally:
- shutil.rmtree(temp_dirpath)
+ temp_dirpath = tmp_path_factory.mktemp("test")
+ temp_filepath = os.path.join(temp_dirpath, "tmp.nc")
+ iris.save(cube, temp_filepath)
+ with warnings.catch_warnings(record=True) as warning_records:
+ iris.load(temp_filepath)
+ # Filter to get the warning we are interested in.
+ warning_messages = [record.message for record in warning_records]
+ warning_messages = [
+ warn
+ for warn in warning_messages
+ if isinstance(warn, iris.warnings.IrisUnknownCellMethodWarning)
+ ]
+ assert len(warning_messages) == 1
+ message = warning_messages[0].args[0]
+ msg = "NetCDF variable 'odd_phenomenon' contains unknown cell method 'oddity'"
+ assert msg in message
def _get_scale_factor_add_offset(cube, datatype):
@@ -181,11 +168,11 @@ def _get_scale_factor_add_offset(cube, datatype):
return (scale_factor, add_offset)
-@tests.skip_data
-class TestPackedData(tests.IrisTest):
- def _single_test(self, datatype, CDLfilename, manual=False):
+@_shared_utils.skip_data
+class TestPackedData:
+ def _single_test(self, datatype, CDLfilename, request, tmp_path, manual=False):
# Read PP input file.
- file_in = tests.get_data_path(
+ file_in = _shared_utils.get_data_path(
(
"PP",
"cf_processing",
@@ -201,43 +188,48 @@ def _single_test(self, datatype, CDLfilename, manual=False):
else:
packspec = datatype
# Write Cube to netCDF file.
- with self.temp_filename(suffix=".nc") as file_out:
- iris.save(cube, file_out, packing=packspec)
- decimal = int(-np.log10(scale_factor))
- packedcube = iris.load_cube(file_out)
- # Check that packed cube is accurate to expected precision
- self.assertArrayAlmostEqual(cube.data, packedcube.data, decimal=decimal)
- # Check the netCDF file against CDL expected output.
- self.assertCDL(
- file_out,
- (
- "integration",
- "netcdf",
- "general",
- "TestPackedData",
- CDLfilename,
- ),
- )
+ file_out = tmp_path / "tmp.nc"
+ iris.save(cube, file_out, packing=packspec)
+ decimal = int(-np.log10(scale_factor))
+ packedcube = iris.load_cube(file_out)
+ # Check that packed cube is accurate to expected precision
+ _shared_utils.assert_array_almost_equal(
+ cube.data, packedcube.data, decimal=decimal
+ )
+ # Check the netCDF file against CDL expected output.
+ _shared_utils.assert_CDL(
+ request,
+ file_out,
+ (
+ "integration",
+ "netcdf",
+ "general",
+ "TestPackedData",
+ CDLfilename,
+ ),
+ )
- def test_single_packed_signed(self):
+ def test_single_packed_signed(self, request, tmp_path):
"""Test saving a single CF-netCDF file with packing."""
- self._single_test("i2", "single_packed_signed.cdl")
+ self._single_test("i2", "single_packed_signed.cdl", request, tmp_path)
- def test_single_packed_unsigned(self):
+ def test_single_packed_unsigned(self, request, tmp_path):
"""Test saving a single CF-netCDF file with packing into unsigned."""
- self._single_test("u1", "single_packed_unsigned.cdl")
+ self._single_test("u1", "single_packed_unsigned.cdl", request, tmp_path)
- def test_single_packed_manual_scale(self):
+ def test_single_packed_manual_scale(self, request, tmp_path):
"""Test saving a single CF-netCDF file.
File with packing with scale factor and add_offset set manually.
"""
- self._single_test("i2", "single_packed_manual.cdl", manual=True)
+ self._single_test(
+ "i2", "single_packed_manual.cdl", request, tmp_path, manual=True
+ )
- def _multi_test(self, CDLfilename, multi_dtype=False):
+ def _multi_test(self, CDLfilename, request, tmp_path, multi_dtype=False):
"""Test saving multiple packed cubes with pack_dtype list."""
# Read PP input file.
- file_in = tests.get_data_path(
+ file_in = _shared_utils.get_data_path(
("PP", "cf_processing", "abcza_pa19591997_daily_29.b.pp")
)
cubes = iris.load(file_in)
@@ -256,74 +248,77 @@ def _multi_test(self, CDLfilename, multi_dtype=False):
dtypes = repeat(packspec)
# Write Cube to netCDF file.
- with self.temp_filename(suffix=".nc") as file_out:
- iris.save(cubes, file_out, packing=packspec)
- # Check the netCDF file against CDL expected output.
- self.assertCDL(
- file_out,
- (
- "integration",
- "netcdf",
- "general",
- "TestPackedData",
- CDLfilename,
- ),
- )
- packedcubes = iris.load(file_out)
- packedcubes.sort(key=lambda cube: cube.cell_methods[0].method)
- for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes):
- if dtype:
- sf, ao = _get_scale_factor_add_offset(cube, dtype)
- decimal = int(-np.log10(sf))
- # Check that packed cube is accurate to expected precision
- self.assertArrayAlmostEqual(
- cube.data, packedcube.data, decimal=decimal
- )
- else:
- self.assertArrayEqual(cube.data, packedcube.data)
-
- def test_multi_packed_single_dtype(self):
+ file_out = tmp_path / "tmp.nc"
+ iris.save(cubes, file_out, packing=packspec)
+ # Check the netCDF file against CDL expected output.
+ _shared_utils.assert_CDL(
+ request,
+ file_out,
+ (
+ "integration",
+ "netcdf",
+ "general",
+ "TestPackedData",
+ CDLfilename,
+ ),
+ )
+ packedcubes = iris.load(file_out)
+ packedcubes.sort(key=lambda cube: cube.cell_methods[0].method)
+ for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes):
+ if dtype:
+ sf, ao = _get_scale_factor_add_offset(cube, dtype)
+ decimal = int(-np.log10(sf))
+ # Check that packed cube is accurate to expected precision
+ _shared_utils.assert_array_almost_equal(
+ cube.data, packedcube.data, decimal=decimal
+ )
+ else:
+ _shared_utils.assert_array_equal(cube.data, packedcube.data)
+
+ def test_multi_packed_single_dtype(self, request, tmp_path):
"""Test saving multiple packed cubes with the same pack_dtype."""
# Read PP input file.
- self._multi_test("multi_packed_single_dtype.cdl")
+ self._multi_test("multi_packed_single_dtype.cdl", request, tmp_path)
- def test_multi_packed_multi_dtype(self):
+ def test_multi_packed_multi_dtype(self, request, tmp_path):
"""Test saving multiple packed cubes with pack_dtype list."""
# Read PP input file.
- self._multi_test("multi_packed_multi_dtype.cdl", multi_dtype=True)
+ self._multi_test(
+ "multi_packed_multi_dtype.cdl", request, tmp_path, multi_dtype=True
+ )
-class TestScalarCube(tests.IrisTest):
- def test_scalar_cube_save_load(self):
+class TestScalarCube:
+ def test_scalar_cube_save_load(self, tmp_path):
cube = iris.cube.Cube(1, long_name="scalar_cube")
- with self.temp_filename(suffix=".nc") as fout:
- iris.save(cube, fout)
- scalar_cube = iris.load_cube(fout)
- self.assertEqual(scalar_cube.name(), "scalar_cube")
+ fout = tmp_path / "scalar_cube.nc"
+ iris.save(cube, fout)
+ scalar_cube = iris.load_cube(fout)
+ assert scalar_cube.name() == "scalar_cube"
-@tests.skip_data
-class TestConstrainedLoad(tests.IrisTest):
- filename = tests.get_data_path(
+@_shared_utils.skip_data
+class TestConstrainedLoad:
+ filename = _shared_utils.get_data_path(
("NetCDF", "label_and_climate", "A1B-99999a-river-sep-2070-2099.nc")
)
def test_netcdf_with_NameConstraint(self):
constr = iris.NameConstraint(var_name="cdf_temp_dmax_tmean_abs")
cubes = iris.load(self.filename, constr)
- self.assertEqual(len(cubes), 1)
- self.assertEqual(cubes[0].var_name, "cdf_temp_dmax_tmean_abs")
+ assert len(cubes) == 1
+ assert cubes[0].var_name == "cdf_temp_dmax_tmean_abs"
def test_netcdf_with_2_NameConstraints(self):
var_names = ["cdf_temp_dmax_tmean_abs", "temp_dmax_tmean_abs"]
constrs = [iris.NameConstraint(var_name=var_name) for var_name in var_names]
cubes = iris.load(self.filename, constrs)
- self.assertEqual(len(cubes), 2)
- self.assertEqual(sorted([cube.var_name for cube in cubes]), var_names)
+ assert len(cubes) == 2
+ assert sorted([cube.var_name for cube in cubes]) == var_names
def test_netcdf_with_no_constraint(self):
cubes = iris.load(self.filename)
- self.assertEqual(len(cubes), 3)
+ assert len(cubes) == 3
class TestSkippedCoord:
@@ -367,7 +362,8 @@ def create_nc_file(self, tmp_path):
def test_lat_not_loaded(self):
with pytest.warns(match="Not all file objects were parsed correctly"):
cube = iris.load_cube(self.nc_path)
- with pytest.raises(iris.exceptions.CoordinateNotFoundError):
+ msg = "Expected to find exactly 1"
+ with pytest.raises(iris.exceptions.CoordinateNotFoundError, match=msg):
_ = cube.coord("lat")
problems = LOAD_PROBLEMS.problems
assert isinstance(problems[-2].loaded, iris.coords.DimCoord)
@@ -375,11 +371,12 @@ def test_lat_not_loaded(self):
assert problems[-1].loaded.name() == "latitude"
-@tests.skip_data
-class TestDatasetAndPathLoads(tests.IrisTest):
- @classmethod
- def setUpClass(cls):
- cls.filepath = tests.get_data_path(
+@_shared_utils.skip_data
+class TestDatasetAndPathLoads:
+ @pytest.fixture(scope="class", autouse=True)
+ def _setup_class(self, request):
+ cls = request.cls
+ cls.filepath = _shared_utils.get_data_path(
["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"]
)
cls.phenom_id = "Carbon Dioxide"
@@ -390,27 +387,28 @@ def test_basic_load(self):
ds = nc.Dataset(self.filepath)
result = iris.load_cube(ds, self.phenom_id)
# It should still be open (!)
- self.assertTrue(ds.isopen())
+ assert ds.isopen()
ds.close()
# Check that result is just the same as a 'direct' load.
- self.assertEqual(self.expected, result)
+ assert self.expected == result
def test_path_string_load_same(self):
# Check that loading from a Path is the same as passing a filepath string.
# Apart from general utility, checks that we won't mistake a Path for a Dataset.
path = Path(self.filepath)
result = iris.load_cube(path, self.phenom_id)
- self.assertEqual(result, self.expected)
+ assert self.expected == result
-@tests.skip_data
-class TestDatasetAndPathSaves(tests.IrisTest):
- @classmethod
- def setUpClass(cls):
+@_shared_utils.skip_data
+class TestDatasetAndPathSaves:
+ @pytest.fixture(scope="class", autouse=True)
+ def _setup_class(self, request, tmp_path_factory):
+ cls = request.cls
# Create a temp directory for transient test files.
- cls.temp_dir = tempfile.mkdtemp()
- cls.testpath = tests.get_data_path(
+ cls.temp_dir = tmp_path_factory.mktemp("test")
+ cls.testpath = _shared_utils.get_data_path(
["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"]
)
# Load some test data for save testing.
@@ -419,12 +417,7 @@ def setUpClass(cls):
testdata = sorted(testdata, key=lambda cube: cube.name())
cls.testdata = testdata
- @classmethod
- def tearDownClass(cls):
- # Destroy the temp directory.
- shutil.rmtree(cls.temp_dir)
-
- def test_basic_save(self):
+ def test_basic_save(self, request):
# test saving to a Dataset, in place of a filepath spec.
# NOTE that this requires 'compute=False', as delayed saves can only operate on
# a closed file.
@@ -433,7 +426,7 @@ def test_basic_save(self):
filepath_direct = f"{self.temp_dir}/tmp_direct.nc"
iris.save(self.testdata, filepath_direct)
# Check against test-specific CDL result file.
- self.assertCDL(filepath_direct)
+ _shared_utils.assert_CDL(request, filepath_direct)
# Save same data indirectly via a netcdf dataset.
filepath_indirect = f"{self.temp_dir}/tmp_indirect.nc"
@@ -444,19 +437,17 @@ def test_basic_save(self):
# Do some very basic sanity checks on the resulting Dataset.
# It should still be open (!)
- self.assertTrue(nc_dataset.isopen())
- self.assertEqual(
- ["time", "levelist", "latitude", "longitude"],
- list(nc_dataset.dimensions),
+ assert nc_dataset.isopen()
+ assert ["time", "levelist", "latitude", "longitude"] == list(
+ nc_dataset.dimensions
)
- self.assertEqual(
- ["co2", "time", "levelist", "latitude", "longitude", "lnsp"],
- list(nc_dataset.variables),
+ assert ["co2", "time", "levelist", "latitude", "longitude", "lnsp"] == list(
+ nc_dataset.variables,
)
nc_dataset.close()
# Check the saved file against the same CDL as the 'normal' save.
- self.assertCDL(filepath_indirect)
+ _shared_utils.assert_CDL(request, filepath_indirect)
# Confirm that cube content is however not yet written.
ds = nc.Dataset(filepath_indirect)
@@ -484,7 +475,7 @@ def test_computed_delayed_save__fail(self):
with pytest.raises(ValueError, match=msg):
iris.save(self.testdata, nc_dataset, saver="nc")
- def test_path_string_save_same(self):
+ def test_path_string_save_same(self, request):
# Ensure that save to a Path is the same as passing a filepath string.
# Apart from general utility, checks that we won't mistake a Path for a Dataset.
tempfile_fromstr = f"{self.temp_dir}/tmp_fromstr.nc"
@@ -492,18 +483,18 @@ def test_path_string_save_same(self):
tempfile_frompath = f"{self.temp_dir}/tmp_frompath.nc"
path = Path(tempfile_frompath)
iris.save(self.testdata, path)
- self.assertCDL(tempfile_fromstr)
- self.assertCDL(tempfile_frompath)
+ _shared_utils.assert_CDL(request, tempfile_fromstr)
+ _shared_utils.assert_CDL(request, tempfile_frompath)
-@tests.skip_data
-class TestWarningRepeats(tests.IrisTest):
+@_shared_utils.skip_data
+class TestWarningRepeats:
def test_warning_repeats(self):
"""Confirm Iris load does not break Python duplicate warning handling."""
# units.nc is designed for testing Iris' 'ignoring invalid units'
# warning; it contains two variables with invalid units, producing two
# unique warnings (due to two different messages).
- file_path = tests.get_data_path(("NetCDF", "testing", "units.nc"))
+ file_path = _shared_utils.get_data_path(("NetCDF", "testing", "units.nc"))
def _raise_warning() -> None:
# Contain in function so warning always has identical line number.
@@ -530,7 +521,3 @@ def _raise_warning() -> None:
_ = iris.load(file_path)
_raise_warning()
assert len(record) == 3
-
-
-if __name__ == "__main__":
- tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py
index b2b9b6d4e1..1f0183374c 100644
--- a/lib/iris/tests/integration/netcdf/test_self_referencing.py
+++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py
@@ -4,25 +4,20 @@
# See LICENSE in the root of the repository for full licensing details.
"""Integration tests for iris#3367 - loading a self-referencing NetCDF file."""
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests # isort:skip
-
-import os
-import tempfile
-from unittest import mock
-
import numpy as np
+import pytest
import iris
from iris.fileformats.netcdf import _thread_safe_nc
+from iris.tests import _shared_utils
from iris.warnings import IrisCfMissingVarWarning
-@tests.skip_data
-class TestCMIP6VolcelloLoad(tests.IrisTest):
- def setUp(self):
- self.fname = tests.get_data_path(
+@_shared_utils.skip_data
+class TestCMIP6VolcelloLoad:
+ @pytest.fixture(autouse=True)
+ def _setup(self):
+ self.fname = _shared_utils.get_data_path(
(
"NetCDF",
"volcello",
@@ -43,22 +38,18 @@ def test_cmip6_volcello_load_issue_3367(self):
"referenced by netCDF variable %r" % (areacello_str, volcello_str)
)
- with mock.patch("warnings.warn") as warn:
+ with pytest.warns(IrisCfMissingVarWarning, match=expected_msg):
# ensure file loads without failure
cube = iris.load_cube(self.fname)
- warn.assert_has_calls(
- [mock.call(expected_msg, category=IrisCfMissingVarWarning)]
- )
# extra check to ensure correct variable was found
assert cube.standard_name == "ocean_volume"
-class TestSelfReferencingVarLoad(tests.IrisTest):
- def setUp(self):
- self.temp_dir_path = os.path.join(
- tempfile.mkdtemp(), "issue_3367_volcello_test_file.nc"
- )
+class TestSelfReferencingVarLoad:
+ @pytest.fixture(autouse=True)
+ def _setup(self, tmp_path):
+ self.temp_dir_path = tmp_path / "issue_3367_volcello_test_file.nc"
dataset = _thread_safe_nc.DatasetWrapper(self.temp_dir_path, "w")
dataset.createDimension("lat", 4)
@@ -110,17 +101,9 @@ def test_self_referencing_load_issue_3367(self):
"referenced by netCDF variable %r" % (areacello_str, volcello_str)
)
- with mock.patch("warnings.warn") as warn:
+ with pytest.warns(IrisCfMissingVarWarning, match=expected_msg):
# ensure file loads without failure
cube = iris.load_cube(self.temp_dir_path)
- warn.assert_called_with(expected_msg, category=IrisCfMissingVarWarning)
# extra check to ensure correct variable was found
assert cube.standard_name == "ocean_volume"
-
- def tearDown(self):
- os.remove(self.temp_dir_path)
-
-
-if __name__ == "__main__":
- tests.main()
diff --git a/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl b/lib/iris/tests/results/integration/netcdf/attributes/UmVersionAttribute/multiple_different_saves_on_variables.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl
rename to lib/iris/tests/results/integration/netcdf/attributes/UmVersionAttribute/multiple_different_saves_on_variables.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/UmVersionAttribute/multiple_same_saves_as_global.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl
rename to lib/iris/tests/results/integration/netcdf/attributes/UmVersionAttribute/multiple_same_saves_as_global.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/UmVersionAttribute/single_saves_as_global.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl
rename to lib/iris/tests/results/integration/netcdf/attributes/UmVersionAttribute/single_saves_as_global.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/AtmosphereSigma/save.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl
rename to lib/iris/tests/results/integration/netcdf/aux_factories/AtmosphereSigma/save.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/HybridPressure/save.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl
rename to lib/iris/tests/results/integration/netcdf/aux_factories/HybridPressure/save.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/SaveMultipleAuxFactories/hybrid_height_and_pressure.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl
rename to lib/iris/tests/results/integration/netcdf/aux_factories/SaveMultipleAuxFactories/hybrid_height_and_pressure.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/aux_factories/SaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/aux_factories/SaveMultipleAuxFactories/hybrid_height_cubes.cml
new file mode 100644
index 0000000000..4530b366ba
--- /dev/null
+++ b/lib/iris/tests/results/integration/netcdf/aux_factories/SaveMultipleAuxFactories/hybrid_height_cubes.cml
@@ -0,0 +1,79 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml
deleted file mode 100644
index 975488f656..0000000000
--- a/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml
+++ /dev/null
@@ -1,131 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/basic_save.cdl b/lib/iris/tests/results/integration/netcdf/general/DatasetAndPathSaves/basic_save.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/basic_save.cdl
rename to lib/iris/tests/results/integration/netcdf/general/DatasetAndPathSaves/basic_save.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/path_string_save_same.cdl b/lib/iris/tests/results/integration/netcdf/general/DatasetAndPathSaves/path_string_save_same.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/general/TestDatasetAndPathSaves/path_string_save_same.cdl
rename to lib/iris/tests/results/integration/netcdf/general/DatasetAndPathSaves/path_string_save_same.cdl