Skip to content
Snippets Groups Projects
Commit f0457aaa authored by Martin Bergemann's avatar Martin Bergemann :speech_balloon:
Browse files

Update tests

parent a7acbd8e
No related branches found
No related tags found
1 merge request!4Tests
Pipeline #19052 passed
......@@ -19,42 +19,42 @@ lint:
test_36:
<< : *py_test
before_script:
- conda create -q -n test python=3.6 pip -y
- conda run -n test python -m pip install .[test]
- conda create -q -p /tmp/test python=3.6 pip dask -y
- /tmp/test/bin/python -m pip install .[test]
script:
- conda run -n test pytest -vv
- /tmp/test/bin/python -m pytest -vv
test_37:
<< : *py_test
before_script:
- conda create -q -n test python=3.7 pip -y
- conda run -n test python -m pip install .[test]
- conda create -q -p /tmp/test python=3.7 pip dask -y
- /tmp/test/bin/python -m pip install .[test]
script:
- conda run -n test pytest -vv
- /tmp/test/bin/python -m pytest -vv
test_38:
<< : *py_test
before_script:
- conda create -q -n test python=3.8 pip
- conda run -n test python -m pip install .[test]
- conda create -q -p /tmp/test python=3.8 pip dask -y
- /tmp/test/bin/python -m pip install .[test]
script:
- conda run -n test pytest -vv
- /tmp/test/bin/python -m pytest -vv
test_39:
<< : *py_test
before_script:
- conda create -q -n test python=3.10 pip -y
- conda run -n test python -m pip install .[test]
- conda create -q -p /tmp/test python=3.9 pip dask -y
- /tmp/test/bin/python -m pip install .[test]
script:
- conda run -n test pytest -vv
- /tmp/test/bin/python -m pytest -vv
test_latest:
<< : *py_test
before_script:
- conda create -q -n test python=3.10 pip -y
- conda run -n test python -m pip install .[test]
- conda create -q -p /tmp/test python=3.10 pip dask -y
- /tmp/test/bin/python -m pip install .[test]
script:
- conda run -n test pytest -vv
- /tmp/test/bin/python -m pytest -vv
artifacts:
when: always
reports:
......
......@@ -97,7 +97,7 @@ def _save_dataset(
if not encoding and not override:
logger.debug("Chunk size already optimized for %s", file_name.name)
return
logger.debug("Saving file ot %s", file_name)
logger.debug("Saving file ot %s", str(file_name))
try:
dset.to_netcdf(
file_name,
......@@ -166,7 +166,7 @@ def rechunk_netcdf_file(
"""
input_path = Path(input_path).expanduser().absolute()
for input_file in _search_for_nc_files(input_path):
logger.info("Working on file: %s", input_file)
logger.info("Working on file: %s", str(input_file))
if output_path is None:
output_file = input_file
elif Path(output_path).expanduser().absolute().is_dir():
......@@ -176,7 +176,7 @@ def rechunk_netcdf_file(
output_file = Path(output_path)
output_file.parent.mkdir(exist_ok=True, parents=True)
try:
with xr.open_mfdataset(input_file, parallel=True) as nc_data:
with xr.open_mfdataset(str(input_file), parallel=True) as nc_data:
new_data, encoding = _rechunk_dataset(nc_data)
if encoding:
logger.debug(
......@@ -184,7 +184,9 @@ def rechunk_netcdf_file(
)
new_data = new_data.load()
except Exception as error:
logger.error("Error while processing file %s: %s", input_file, str(error))
logger.error(
"Error while processing file %s: %s", str(input_file), str(error)
)
continue
_save_dataset(
new_data,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment