Skip to content
Snippets Groups Projects

Tests

Merged Martin Bergemann requested to merge tests into main
14 files
+ 554
138
Compare changes
  • Side-by-side
  • Inline
Files
14
+ 118
0
"""pytest definitions to run the unittests."""
from pathlib import Path
from tempfile import TemporaryDirectory, NamedTemporaryFile
from typing import Generator, Tuple
import dask
import pytest
import numpy as np
import xarray as xr
def create_data(
variable_name: str, chunk_size: Tuple[int, ...], dims: Tuple[str, ...]
) -> xr.Dataset:
"""Create a netcdf dataset."""
coords = {d: np.ones(chunk_size[n]) for (n, d) in enumerate(dims)}
dset = xr.DataArray(
np.zeros(chunk_size),
dims=dims,
coords=coords,
name=variable_name,
).chunk(dict(zip(dims, chunk_size)))
dset.encoding = {"chunksizes": chunk_size}
return xr.Dataset({variable_name: dset})
@pytest.fixture(scope="session")
def small_chunk() -> Generator[Tuple[int, int, int, int], None, None]:
"""Define tuple for smaller chunks sizes."""
yield (1, 1, 24, 24)
@pytest.fixture(scope="session")
def large_chunk() -> Generator[Tuple[int, int, int, int], None, None]:
"""Define tuple for smaller chunks sizes."""
yield (720, 12, 4, 4)
@pytest.fixture(scope="session")
def dims() -> Generator[Tuple[str, str, str, str], None, None]:
"""Dimensions of all datasets."""
yield ("time", "height", "Latitude", "Longitude")
@pytest.fixture(scope="function")
def temp_dir() -> Generator[Path, None, None]:
"""Temporary Directory for creating data files."""
with TemporaryDirectory() as temporary_dir:
yield Path(temporary_dir)
@pytest.fixture(scope="session")
def small_chunk_data(
small_chunk: Tuple[int, int, int, int],
dims: Tuple[str, str, str, str],
variable_name: str,
) -> Generator[xr.Dataset, None, None]:
"""Create a dataset with small chunks."""
yield create_data(variable_name, small_chunk, dims)
@pytest.fixture(scope="function")
def large_chunk_data(
large_chunk: Tuple[int, int, int, int],
dims: Tuple[str, str, str, str],
variable_name: str,
) -> Generator[xr.Dataset, None, None]:
"""Create a dataset with small chunks."""
with dask.config.set({"array.chunk-size": "1MiB"}):
yield create_data(variable_name, large_chunk, dims)
@pytest.fixture(scope="session")
def variable_name() -> str:
return "tas"
@pytest.fixture(scope="function")
def data_dir(
temp_dir: Path,
variable_name: str,
small_chunk: Tuple[int, int, int, int],
dims: Tuple[str, str, str],
small_chunk_data: xr.Dataset,
) -> Generator[Path, None, None]:
"""Create a directory with netcdf files."""
encoding = {variable_name: {"chunksizes": small_chunk}}
for number in range(1, 10):
file_name = temp_dir / "foo" / "bar" / f"tas_model1_{number}.nc"
file_name.parent.mkdir(parents=True, exist_ok=True)
small_chunk_data.to_netcdf(file_name, encoding=encoding)
yield temp_dir
@pytest.fixture(scope="function")
def data_file(
variable_name: str,
large_chunk: Tuple[int, int, int, int],
dims: Tuple[str, str, str],
large_chunk_data: xr.Dataset,
) -> Generator[Path, None, None]:
"""Create a directory with netcdf files."""
encoding = {variable_name: {"chunksizes": large_chunk}}
with NamedTemporaryFile(suffix=".nc") as temp_file:
file_name = Path(temp_file.name)
large_chunk_data.to_netcdf(file_name, encoding=encoding)
yield file_name
@pytest.fixture(scope="function")
def wrong_file_type(temp_dir) -> Generator[Path, None, None]:
"""Temporary Directory contaning non netcdf files."""
for _ in range(1, 3):
file_name = temp_dir / "worng_file.txt"
file_name.touch()
yield temp_dir
Loading