Skip to content
Snippets Groups Projects
Commit c2dfa06f authored by Sergey Kosukhin's avatar Sergey Kosukhin
Browse files

Extract distributed array test to a separate script.

parent 7380861c
No related branches found
No related tags found
2 merge requests!34Version 2.2.0,!13Consolidation with CDI-PIO (develop)
Showing with 65 additions and 5 deletions
......@@ -46,6 +46,7 @@ Makefile.in
/tests/pio_cksum_writer.test
/tests/pio_write_deco2d_run_parallel.test
/tests/pio_write_deco2d_run_serial
/tests/pio_write_dist_array.test
/tests/pio_write_run_parallel.test
/tests/pio_write_run_serial
/tests/test_byteswap_run
......
......@@ -693,6 +693,7 @@ AC_CONFIG_FILES([
tests/pio_cksum_writer.test
tests/pio_write_deco2d_run_parallel.test
tests/pio_write_deco2d_run_serial
tests/pio_write_dist_array.test
tests/pio_write_run_parallel.test
tests/pio_write_run_serial
tests/test_byteswap_run
......
......@@ -34,6 +34,7 @@ TESTS += \
pio_cksum_nc4.test \
pio_cksum_writer.test \
pio_write_deco2d_run_parallel.test \
pio_write_dist_array.test \
pio_write_run_parallel.test \
test_resource_copy_mpi_run.test
......@@ -49,7 +50,8 @@ pio_cksum_nc2.log: pio_cksum_nc.log
pio_cksum_nc4.log: pio_cksum_nc2.log
pio_cksum_writer.log: pio_cksum_nc4.log
pio_write_deco2d_run_parallel.log: pio_cksum_writer.log
pio_write_run_parallel.log: pio_write_deco2d_run_parallel.log
pio_write_dist_array.log: pio_write_deco2d_run_parallel.log
pio_write_run_parallel.log: pio_write_dist_array.log
test_resource_copy_mpi_run.log: pio_write_run_parallel.log
endif !with_concurrent_mpi_checks
......
......@@ -4,6 +4,7 @@ pio_write_args="-p PIO_ASYNCH -w 3"
mpi_task_num=7
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -4,6 +4,7 @@ pio_write_args="-p PIO_FPGUARD -w 3"
mpi_task_num=6
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -5,6 +5,7 @@ mpi_task_num=8
suffix=grb2
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -4,3 +4,4 @@ pio_write_args="-p PIO_MPI_FW_AT_ALL -w 2"
mpi_task_num=5
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
......@@ -4,3 +4,4 @@ pio_write_args="-p PIO_MPI_FW_AT_REBLOCK -w 2"
mpi_task_num=5
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
......@@ -4,3 +4,4 @@ pio_write_args="-p PIO_MPI_FW_ORDERED -w 2"
mpi_task_num=5
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
......@@ -4,6 +4,7 @@ pio_write_args="-p PIO_MPI -w 2"
mpi_task_num=4
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -5,6 +5,7 @@ pio_write_args="-w 2 -qtaxis-type=relative"
mpi_task_num=5
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -5,6 +5,7 @@ pio_write_args="-w 3 -qtaxis-type=relative"
mpi_task_num=8
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -5,6 +5,7 @@ pio_write_args="-w 3 -qtaxis-type=relative"
mpi_task_num=7
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
......@@ -5,6 +5,7 @@ pio_write_args="-p PIO_WRITER -w 4"
mpi_task_num=8
. ./pio_write_run_parallel.test
( . ./pio_write_dist_array.test ) || test $? -eq 77
#
# Local Variables:
# mode: sh
......
#!@SHELL@
@ENABLE_MPI_FALSE@exit 77
test 'x@MPI_LAUNCH@' != xtrue || exit 77
test 'x@HAVE_PPM_DIST_ARRAY@' = xyes || exit 77
prefix=${0##*/}
suffix=${suffix-grb}
case ${suffix} in
grb)
test 'x@ENABLE_GRIB@' = xyes || exit 77
;;
grb2)
@HAVE_LIBGRIB_API_FALSE@exit 77
;;
nc|nc2)
test 'x@ENABLE_NETCDF@' = xyes || exit 77
;;
nc4)
test 'x@ENABLE_NETCDF@' = xyes || exit 77
test 'x@ENABLE_NC4@' = xyes || exit 77
;;
*)
echo "Unsupported suffix '${suffix}'" >&2
exit 1
;;
esac
mpi_task_num=${mpi_task_num-4}
variation='-qcreate-curvilinear-grid -quse-dist-grid'
echo "creating data with ${variation}" >&2
@top_builddir@/libtool --mode=execute \
@MPI_LAUNCH@ \
-n ${mpi_task_num} ${tool_wrap} ./pio_write_parallel -b ${prefix} -f ${suffix} ${pio_write_args} -s 7 ${variation} || exit 1
echo "checking ${prefix}_0.${suffix}" >&2
@top_builddir@/libtool --mode=execute \
${tool_wrap} ./cksum_read ${prefix}_0.${suffix} ${prefix}_0.cksum || exit 1
echo "checking ${prefix}_1.${suffix}" >&2
@top_builddir@/libtool --mode=execute \
${tool_wrap} ./cksum_read ${prefix}_1.${suffix} ${prefix}_1.cksum || exit 1
#
# Local Variables:
# mode: sh
# End:
#
......@@ -49,10 +49,6 @@ for variation in "" $variations ; do
test_variation "$variation"
done
if test 'x@HAVE_PPM_DIST_ARRAY@' = xyes; then
test_variation "-qcreate-curvilinear-grid -quse-dist-grid"
fi
#
# Local Variables:
# mode: sh
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment