Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
mpim-sw
libcdi
Commits
7d075404
Commit
7d075404
authored
Apr 12, 2011
by
Thomas Jahns
🤸
Browse files
Split out Fortran interface of PIO.
parent
1decd39a
Changes
15
Hide whitespace changes
Inline
Side-by-side
.gitattributes
View file @
7d075404
...
...
@@ -142,6 +142,7 @@ pioExamples/cdi_write_parallel.job -text
pioExamples/cdi_write_serial.job -text
pioExamples/myModel.c -text
pioExamples/myModel2003.F90 -text
pioExamples/pio_interface.inc -text
pioExamples/pio_write.F90 -text
pioExamples/write_nec_parallel.job -text
pioExamples/write_tornado_parallel.job -text
...
...
@@ -201,6 +202,8 @@ src/pio_dbuffer.c -text
src/pio_impl.h -text
src/pio_interface.c -text
src/pio_interface.h -text
src/pio_interface.inc -text
src/pio_interface_f.c -text
src/pio_interface_f.h -text
src/pio_mpinonb.c -text
src/pio_posixasynch.c -text
...
...
configure
View file @
7d075404
...
...
@@ -24015,6 +24015,18 @@ if test x"$USE_MPI" = xyes; then :
$as_echo "#define USE_MPI 1" >>confdefs.h
for ac_func in MPI_Comm_f2c
do :
ac_fn_c_check_func "$LINENO" "MPI_Comm_f2c" "ac_cv_func_MPI_Comm_f2c"
if test "x$ac_cv_func_MPI_Comm_f2c" = x""yes; then :
cat >>confdefs.h <<_ACEOF
#define HAVE_MPI_COMM_F2C 1
_ACEOF
fi
done
fi
if test x"$USE_MPI" = xyes; then
USE_MPI_TRUE=
...
...
configure.ac
View file @
7d075404
...
...
@@ -110,7 +110,9 @@ AC_ARG_ENABLE(mpi,AS_HELP_STRING([--enable-mpi],[Compile with MPI compiler [defa
AS_IF([test x"${enable_mpi}" = x"yes"],
[USE_MPI=yes])
AS_IF([test x"$USE_MPI" = xyes],
[AC_DEFINE([USE_MPI],[1],[parallel I/O requested and available])])
[AC_DEFINE([USE_MPI],[1],[parallel I/O requested and available])
AC_CHECK_FUNCS([MPI_Comm_f2c])
])
AM_CONDITIONAL([USE_MPI],[test x"$USE_MPI" = xyes])
# ----------------------------------------------------------------------
# Create the Fortran Interface via iso_c_binding module (Fortran 2003 Standard)
...
...
pioExamples/Makefile.am
View file @
7d075404
if
USE_MPI
noinst_PROGRAMS
=
pio_write myModel myModel2003
endif
AM_CFLAGS
=
-I
$(top_srcdir)
/src
...
...
pioExamples/Makefile.in
View file @
7d075404
...
...
@@ -34,8 +34,8 @@ PRE_UNINSTALL = :
POST_UNINSTALL
=
:
build_triplet
=
@build@
host_triplet
=
@host@
noinst_PROGRAMS
=
pio_write
$(EXEEXT)
myModel
$(EXEEXT)
\
myModel2003
$(EXEEXT)
@USE_MPI_TRUE@
noinst_PROGRAMS
=
pio_write
$(EXEEXT)
myModel
$(EXEEXT)
\
@USE_MPI_TRUE@
myModel2003
$(EXEEXT)
subdir
=
pioExamples
DIST_COMMON
=
README
$(srcdir)
/Makefile.am
$(srcdir)
/Makefile.in
ACLOCAL_M4
=
$(top_srcdir)
/aclocal.m4
...
...
pioExamples/pio_interface.inc
0 → 120000
View file @
7d075404
..
/
src
/
pio_interface
.
inc
\ No newline at end of file
pioExamples/pio_write.F90
View file @
7d075404
! mpxlf95_r -o cdi_write_parallel -I../src cdi_write_f.F90 -L../src -lcdi
! mpxlf95_r -o cdi_write_parallel -I../src cdi_write_f.F90 -L../src -lcdi
!-qextname -L/sw/aix53/szip-2.1/lib -lsz -L/sw/aix61/netcdf-4.0.1-ibm/lib
! -lnetcdf -L/sw/aix61/hdf5-1.8.4-patch1/lib -lhdf5_hl -lhdf5 -lz
! TODO
! TODO
! varId (:,:) RESTRICTED TO STREAMSIZES(1) module loadbalancing
! catch cases with nonsensical relation nnodes/nstreams/sizesstreams
...
...
@@ -11,55 +11,55 @@
!--------------------------------------------------------------------------
MODULE
loadbalancing
CONTAINS
CONTAINS
SUBROUTINE
mapProblems
(
problemSizes
,
problemMapping
,
nProblems
,
nCalculator
)
!... data dictionary
IMPLICIT
NONE
INTEGER
,
INTENT
(
in
)
::
nProblems
INTEGER
,
INTENT
(
in
)
::
problemSizes
(
nProblems
)
INTEGER
,
INTENT
(
out
)
::
problemMapping
(
nProblems
)
INTEGER
,
INTENT
(
in
)
::
nCalculator
INTEGER
,
PARAMETER
::
INITVALUE1
=
99
INTEGER
,
PARAMETER
::
INITVALUE2
=
99
INTEGER
,
PARAMETER
::
TOLERANCE
=
0
INTEGER
,
PARAMETER
::
NOINDEX
=
-1
INTEGER
,
PARAMETER
::
MIN
=
-99
INTEGER
::
sortedProblems
(
nProblems
,
2
)
INTEGER
::
load
(
nCalculator
)
REAL
::
mean
REAL
::
mean
REAL
::
capacityLeft
INTEGER
::
currCapacity
,
nextCapacity
INTEGER
::
currIndex
,
nextIndex
INTEGER
::
i
,
j
!... init
problemMapping
=
INITVALUE1
load
=
INITVALUE2
DO
i
=
1
,
nProblems
sortedProblems
(
i
,
1
)
=
problemSizes
(
i
)
sortedProblems
(
i
,
2
)
=
i
ENDDO
CALL
iQsort
(
sortedProblems
)
mean
=
SUM
(
problemSizes
)/
float
(
nCalculator
)
!... loop over problems, biggest problem first
prbl
:
DO
i
=
1
,
nProblems
!... 1. loop over PEs, find a capacity that fits
fit
:
DO
j
=
1
,
nCalculator
capacityLeft
=
ABS
(
mean
-
load
(
j
)
-
sortedProblems
(
i
,
1
))
IF
(
capacityLeft
<=
TOLERANCE
)
THEN
...
...
@@ -69,14 +69,14 @@ CONTAINS
EXIT
fit
ENDIF
ENDDO
fit
IF
(
problemMapping
(
sortedProblems
(
i
,
2
))/
=
INITVALUE1
)
CYCLE
prbl
currCapacity
=
MIN
currIndex
=
NOINDEX
!... potential 2. loop over PEs, take PE with the least load
leastload
:
DO
j
=
1
,
nCalculator
nextIndex
=
j
nextCapacity
=
mean
-
load
(
j
)
...
...
@@ -85,29 +85,29 @@ CONTAINS
currCapacity
=
nextCapacity
ENDIF
ENDDO
leastload
problemMapping
(
sortedProblems
(
i
,
2
))
=
currIndex
load
(
currIndex
)
=
load
(
currIndex
)
+
sortedProblems
(
i
,
1
)
ENDDO
prbl
CONTAINS
RECURSIVE
SUBROUTINE
iQsort
(
A
)
IMPLICIT
NONE
INTEGER
,
INTENT
(
inout
)
::
A
(:,:)
INTEGER
::
pivot
(
1
,
2
),
temp
(
1
,
2
)
INTEGER
::
n
,
pivotIndex
,
currIndex
IF
(
SIZE
(
A
,
2
)/
=
2
)
STOP
'MAIN: error: subroutine iQsort(A), dimension(A,2) has to be 2.'
n
=
SIZE
(
A
,
1
)
IF
(
n
<=
1
)
RETURN
pivot
(
1
,:)
=
A
(
n
,:)
pivotIndex
=
n
currIndex
=
1
DO
WHILE
(
currIndex
<
pivotIndex
)
IF
(
A
(
currIndex
,
1
)
<
pivot
(
1
,
1
))
THEN
temp
=
pivot
...
...
@@ -119,38 +119,38 @@ CONTAINS
currIndex
=
currIndex
+1
ENDIF
ENDDO
IF
(
pivotIndex
/
=
1
)
CALL
iQsort
(
A
(
1
:
pivotIndex
-1
,:))
IF
(
pivotIndex
/
=
n
)
CALL
iQsort
(
A
(
pivotIndex
+1
:
n
,:))
END
SUBROUTINE
iQsort
END
SUBROUTINE
mapProblems
END
MODULE
loadbalancing
!---------------------------------------------------------
!---------------------------------------------------------
PROGRAM
CDIWRITE
USE
loadbalancing
IMPLICIT
NONE
INCLUDE
'cdi.inc'
#ifndef NOMPI
include
'pio_interface.inc'
#ifndef NOMPI
INCLUDE
'mpif.h'
#endif
INTEGER
,
PARAMETER
::
dp
=
SELECTED_REAL_KIND
(
12
,
307
)
INTEGER
,
PARAMETER
::
MAXTYPES
=
4
INTEGER
,
PARAMETER
::
dp
=
SELECTED_REAL_KIND
(
12
,
307
)
INTEGER
,
PARAMETER
::
MAXTYPES
=
4
INTEGER
,
PARAMETER
::
MAXNODES
=
249
INTEGER
,
PARAMETER
::
ddebug
=
0
INTEGER
,
PARAMETER
::
NLON
=
384
! Number of longitudes 384
INTEGER
,
PARAMETER
::
NLAT
=
192
! Number of latitudes 192
INTEGER
,
PARAMETER
::
NLEV
=
96
! Number of levels 96
...
...
@@ -158,11 +158,11 @@ PROGRAM CDIWRITE
INTEGER
,
PARAMETER
::
NSTREAMS
=
10
! Number of files 7
INTEGER
,
PARAMETER
::
STREAMSIZES
(
NSTREAMS
)
=
(/
35
,
35
,
35
,
35
,
35
,
&
35
,
35
,
35
,
35
,
35
/)
CHARACTER
*
(
*
),
PARAMETER
::
STREAMNAMES
(
NSTREAMS
)
=
(/
'dat0.grb'
,
&
'dat1.grb'
,
'dat2.grb'
,
'dat3.grb'
,
'dat4.grb'
,
'dat5.grb'
,
'dat6.grb'
,
&
'dat7.grb'
,
'dat8.grb'
,
'dat9.grb'
/)
CHARACTER
*
(
*
),
PARAMETER
::
PTYPENAMES
(
7
)
=
(/
&
'PIO_NONE '
,
&
'PIO_MPI_NONB '
,
&
...
...
@@ -171,37 +171,37 @@ PROGRAM CDIWRITE
'PIO_POSIX_FPGUARD_SENDRECV '
,
&
'PIO_POSIX_FPGUARD_THREAD_REFUSE'
,
&
'PIO_POSIX_FPGUARD_THREAD '
/)
REAL
(
dp
)::
LONS
(
NLON
),
LATS
(
NLAT
),
LEVS
(
NLEV
)
REAL
(
dp
)::
var
(
NLON
*
NLAT
*
NLEV
)
INTEGER
::
gridID
,
zaxisID
,
taxisID
INTEGER
,
ALLOCATABLE
::
vlistID
(
:
),
streamID
(
:
)
INTEGER
,
ALLOCATABLE
::
varIDs
(
:,
:
)
INTEGER
::
sID
,
tsID
,
i
,
j
,
nmiss
,
status
INTEGER
::
sID
,
tsID
,
i
,
j
,
nmiss
,
status
INTEGER
::
pioComm
,
pioCollectorComm_NODE
,
rank
,
error
INTEGER
::
PTYPE
INTEGER
::
PTYPE
INTEGER
::
NNODES
,
nstreams_NODE
,
myNODE
INTEGER
,
ALLOCATABLE
::
streamsizes_NODE
(
:
)
CHARACTER
(
len
=
8
),
ALLOCATABLE
::
streamnames_NODE
(
:
)
INTEGER
,
ALLOCATABLE
::
streamfirsts_PE
(
:
),
streamlasts_PE
(
:
)
INTEGER
,
ALLOCATABLE
::
streamfirsts_PE
(
:
),
streamlasts_PE
(
:
)
INTEGER
::
collectingData
INTEGER
::
ncollectors_NODE
REAL
(
dp
)
::
timerSTART
,
timerSTOP
REAL
(
dp
)
::
sumOPEN
,
sumCLOSE
,
sumWRITE
REAL
(
dp
)
::
sumMaxOPEN
,
sumMaxCLOSE
,
sumMaxWRITE
#ifndef NOMPI
CALL
MPI_INIT
(
error
)
CALL
MPI_COMM_DUP
(
MPI_COMM_WORLD
,
pioComm
,
error
)
CALL
MPI_COMM_RANK
(
pioComm
,
rank
,
error
)
CALL
readArgs
(
PTYPE
)
IF
(
ddebug
==
1
)
THEN
WRITE
(
0
,
*
)
'MAIN after readArgs: pe'
,
rank
,
', PTYPE='
,
PTYPE
END
IF
...
...
@@ -211,7 +211,7 @@ PROGRAM CDIWRITE
#endif
collectingData
=
pioInit
(
PTYPE
,
pioComm
,
myNODE
,
NNODES
,
pioCollectorComm_NODE
)
IF
(
ddebug
==
1
)
THEN
#ifndef NOMPI
WRITE
(
0
,
*
)
'MAIN after pioInit: pe'
,
rank
,
', NNODES='
,
NNODES
,
', myNODE='
,
myNODE
...
...
@@ -220,13 +220,13 @@ PROGRAM CDIWRITE
', myNODE='
,
myNODE
#endif
END
IF
IF
(
collectingData
/
=
1
)
THEN
GO TO
1
END
IF
CALL
mapStreamsOnNodes
IF
(
ddebug
==
1
)
THEN
#ifndef NOMPI
WRITE
(
0
,
*
)
'MAIN after mapStreamsOnNodes: pe'
,
rank
,
&
...
...
@@ -240,39 +240,39 @@ PROGRAM CDIWRITE
END
IF
CALL
mapVarsOnPes
IF
(
ddebug
==
1
)
THEN
#ifndef NOMPI
WRITE
(
0
,
*
)
'MAIN after mapVarsOnPes: pe'
,
rank
,
', streamfirsts_PE='
,
&
streamfirsts_PE
,
'streamlasts_PE='
,
streamlasts_PE
streamfirsts_PE
,
'streamlasts_PE='
,
streamlasts_PE
#else
WRITE
(
0
,
*
)
'MAIN after mapVarsOnPes: streamfirsts_PE='
,
&
streamfirsts_PE
,
'streamlasts_PE='
,
streamlasts_PE
streamfirsts_PE
,
'streamlasts_PE='
,
streamlasts_PE
#endif
END
IF
CALL
initCoords
()
CALL
initCoords
()
ALLOCATE
(
vlistID
(
nstreams_NODE
),
stat
=
status
)
ALLOCATE
(
streamID
(
nstreams_NODE
),
stat
=
status
)
ALLOCATE
(
varIDs
(
streamsizes_NODE
(
1
),
nstreams_NODE
),
stat
=
status
)
! Create a regular lon/lat grid
gridID
=
gridCreate
(
GRID_LONLAT
,
NLON
*
NLAT
)
CALL
gridDefXsize
(
gridID
,
NLON
)
CALL
gridDefYsize
(
gridID
,
NLAT
)
CALL
gridDefXvals
(
gridID
,
lons
)
CALL
gridDefYvals
(
gridID
,
lats
)
! Create a pressure level Z-axis
zaxisID
=
zaxisCreate
(
ZAXIS_PRESSURE
,
NLEV
)
CALL
zaxisDefLevels
(
zaxisID
,
levs
)
! Create a variable list
DO
i
=
1
,
nstreams_NODE
vlistID
(
i
)
=
vlistCreate
()
END
DO
! Define the variables and the variable names
DO
i
=
1
,
nstreams_NODE
DO
j
=
1
,
streamsizes_NODE
(
1
)
...
...
@@ -280,27 +280,27 @@ PROGRAM CDIWRITE
CALL
vlistDefVarName
(
vlistID
(
i
),
varIDs
(
j
,
i
),
"varname"
)
END
DO
END
DO
! Create a Time axis
taxisID
=
taxisCreate
(
TAXIS_ABSOLUTE
)
! Assign the Time axis to the variable list
DO
i
=
1
,
nstreams_NODE
CALL
vlistDefTaxis
(
vlistID
(
i
),
taxisID
)
END
DO
sumOPEN
=
0.0
sumCLOSE
=
0.0
sumWRITE
=
0.0
#ifndef NOMPI
timerSTART
=
MPI_WTIME
()
#else
CALL
CPU_TIME
(
timerSTART
)
#endif
! Open files and create a dataset in GRB fromat
DO
i
=
1
,
nstreams_NODE
streamID
(
i
)
=
streamOpenWrite
(
streamnames_NODE
(
i
),
FILETYPE_GRB
)
IF
(
streamID
(
i
)
<
0
)
THEN
...
...
@@ -312,22 +312,22 @@ PROGRAM CDIWRITE
#endif
END
IF
END
DO
#ifndef NOMPI
timerSTOP
=
MPI_WTIME
()
#else
CALL
CPU_TIME
(
timerSTOP
)
#endif
sumOPEN
=
timerSTOP
-
timerSTART
sumOPEN
=
timerSTOP
-
timerSTART
CALL
initVar
(
var
,
1
,
1
)
! Assign the variable list to the dataset
DO
i
=
1
,
nstreams_NODE
CALL
streamDefVlist
(
streamID
(
i
),
vlistID
(
i
))
END
DO
nmiss
=
0
! Loop over the number of time steps
...
...
@@ -345,18 +345,18 @@ PROGRAM CDIWRITE
#else
CALL
CPU_TIME
(
timerSTART
)
#endif
! Write var
CALL
streamWriteVar
(
streamID
(
i
),
varIDs
(
sID
,
i
),
var
,
nmiss
)
#ifndef NOMPI
timerSTOP
=
MPI_WTIME
()
#else
CALL
CPU_TIME
(
timerSTOP
)
#endif
sumWRITE
=
sumWRITE
+
timerSTOP
-
timerSTART
END
DO
END
DO
END
DO
...
...
@@ -367,56 +367,56 @@ PROGRAM CDIWRITE
CALL
CPU_TIME
(
timerSTART
)
#endif
! Close the output stream
! Close the output stream
DO
i
=
1
,
nstreams_NODE
CALL
streamClose
(
streamID
(
i
))
END
DO
#ifndef NOMPI
timerSTOP
=
MPI_WTIME
()
#else
CALL
CPU_TIME
(
timerSTOP
)
#endif
sumCLOSE
=
timerSTOP
-
timerSTART
! Destroy the objects
DO
i
=
1
,
nstreams_NODE
CALL
vlistDestroy
(
vlistID
(
i
))
END
DO
CALL
taxisDestroy
(
taxisID
)
CALL
zaxisDestroy
(
zaxisID
)
CALL
gridDestroy
(
gridID
)
DEALLOCATE
(
streamsizes_NODE
,
stat
=
status
)
DEALLOCATE
(
streamnames_NODE
,
stat
=
status
)
DEALLOCATE
(
streamfirsts_PE
,
stat
=
status
)
DEALLOCATE
(
streamlasts_PE
,
stat
=
status
)
DEALLOCATE
(
vlistID
,
stat
=
status
)
DEALLOCATE
(
streamID
,
stat
=
status
)
DEALLOCATE
(
streamID
,
stat
=
status
)
DEALLOCATE
(
varIDs
,
stat
=
status
)
1
CALL
pioFinalize
()
#ifndef NOMPI
#ifndef NOMPI
IF
(
ddebug
==
1
)
THEN
WRITE
(
0
,
*
)
'MAIN : pe'
,
rank
,
' at label "finish"'
WRITE
(
0
,
*
)
'MAIN : pe'
,
rank
,
' at label "finish"'
END
IF
Call
MPI_REDUCE
(
sumOPEN
,
sumMaxOPEN
,
1
,
MPI_DOUBLE_PRECISION
,
&
MPI_MAX
,
0
,
pioComm
,
error
)
CALL
MPI_REDUCE
(
sumWRITE
,
sumMaxWRITE
,
1
,
MPI_DOUBLE_PRECISION
,
&
MPI_MAX
,
0
,
pioComm
,
error
)
CALL
MPI_REDUCE
(
sumCLOSE
,
sumMaxCLOSE
,
1
,
MPI_DOUBLE_PRECISION
,
&
MPI_MAX
,
0
,
pioComm
,
error
)
IF
(
rank
==
0
)
THEN
WRITE
(
0
,
*
)
'MAIN: NNODES = '
,
NNODES
,
&
', ncollectors_NODE = '
,
ncollectors_NODE
,
&
', PTYPE = '
,
PTYPENAMES
(
PTYPE
+
1
)
WRITE
(
0
,
*
)
'------------------------------------------------------------------'
WRITE
(
0
,
*
)
'MAIN: Maximum open time:'
,
sumMaxOPEN
,
's'
WRITE
(
0
,
*
)
'MAIN: Maximum write time:'
,
sumMaxWRITE
,
's'
...
...
@@ -425,72 +425,72 @@ PROGRAM CDIWRITE
END
IF
FLUSH
(
0
)
CALL
MPI_COMM_FREE
(
pioComm
,
error
)
CALL
MPI_FINALIZE
(
error
)
#else
}
}
WRITE
(
0
,
*
)
'MAIN: ptype = SERIAL'
WRITE
(
0
,
*
)
'------------------------------------------------------------------'
WRITE
(
0
,
*
)
'MAIN: Open time:'
,
sumOPEN
,
's'
WRITE
(
0
,
*
)
'MAIN: Write time:'
,
sumWRITE
,
's'
WRITE
(
0
,
*
)
'MAIN: Close time:'
,
sumCLOSE
,
's'
WRITE
(
0
,
*
)
'------------------------------------------------------------------'
FLUSH
(
0
)
#endif
CONTAINS
!********************************************
#ifndef NOMPI
SUBROUTINE
readArgs
(
argMode
)
INTEGER
,
INTENT
(
OUT
)
::
argMode
INTEGER
::
numarg
,
inum
,
length
#ifndef __SX__
INTEGER
::
COMMAND_ARGUMENT_COUNT
#else
INTEGER
::
IARGC
#endif
#endif
CHARACTER
(
len
=
32
)
::
arg
IF
(
rank
==
0
)
THEN