Back to home page

MITgcm

 
 

    


File indexing completed on 2020-07-29 05:10:38 UTC

view on githubraw file Latest commit b9dadda2 on 2020-07-28 16:49:33 UTC
924557e60a Chri*0001 #include "CPP_EEOPTIONS.h"
4c563c2ee9 Chri*0002 
057255cb61 Jean*0003 CBOP
4c563c2ee9 Chri*0004 C     !ROUTINE: INI_PROCS
                0005 
                0006 C     !INTERFACE:
924557e60a Chri*0007       SUBROUTINE INI_PROCS
                0008 
4c563c2ee9 Chri*0009 C     !DESCRIPTION:
                0010 C     *==========================================================*
4f74f8e269 Jean*0011 C     | SUBROUTINE INI\_PROCS
                0012 C     | o Initialise multiple concurrent processes environment.
4c563c2ee9 Chri*0013 C     *==========================================================*
4f74f8e269 Jean*0014 C     | Under MPI this routine calls various MPI service routines
                0015 C     | that map the model grid to MPI processes. The information
                0016 C     | is then stored in a common block for later use.
                0017 C     | Note: This routine can also be compiled with CPP
                0018 C     | directives set so that no multi-processing is initialise.
                0019 C     | This is OK and should work fine.
4c563c2ee9 Chri*0020 C     *==========================================================*
                0021 
                0022 C     !USES:
057255cb61 Jean*0023       IMPLICIT NONE
924557e60a Chri*0024 C     === Global data ===
                0025 #include "SIZE.h"
                0026 #include "EEPARAMS.h"
                0027 #include "EESUPPORT.h"
                0028 
4f74f8e269 Jean*0029 #ifdef ALLOW_USE_MPI
                0030 C     !FUNCTIONS:
                0031 
4c563c2ee9 Chri*0032 C     !LOCAL VARIABLES:
924557e60a Chri*0033 C     === Local variables ===
057255cb61 Jean*0034 C     msgBuf         :: IO buffer
                0035 C     myThid         :: Dummy thread id
                0036 C     mpiRC          :: Error code reporting variable used with MPI.
                0037 C     mpiGridSpec    :: No. of processes in X and Y.
                0038 C     mpiPeriodicity :: Flag indicating XY priodicity to MPI.
                0039 C     arrElSize      :: Size of an array element in bytes used to define
                0040 C                       MPI datatypes for communication operations.
                0041 C     arrElSep       :: Separation in units of array elements between
                0042 C                       blocks to be communicated.
                0043 C     elCount        :: No. of blocks that are associated with MPI datatype.
                0044 C     elLen          :: Length of an MPI datatype in terms of preexisting
                0045 C                       datatype.
                0046 C     elStride       :: Distance between starting location of elements in
                0047 C                       an MPI datatype - can be bytes of datatype units.
924557e60a Chri*0048       INTEGER mpiRC
                0049       INTEGER mpiGridSpec(2)
                0050       INTEGER mpiPeriodicity(2)
                0051       INTEGER mpiLProcNam
                0052       CHARACTER*(MPI_MAX_PROCESSOR_NAME) mpiProcNam
                0053       INTEGER arrElSize
                0054       INTEGER arrElSep
                0055       INTEGER elCount
                0056       INTEGER elLen
                0057       INTEGER elStride
057255cb61 Jean*0058       INTEGER np, pId, itemp(2)
                0059       INTEGER ierr
b9dadda204 Mart*0060       INTEGER iTmp, jTmp
                0061       CHARACTER*(23) fmtStr
008de5469f Dimi*0062 #endif /* ALLOW_USE_MPI */
057255cb61 Jean*0063       CHARACTER*(MAX_LEN_MBUF) msgBuf
008de5469f Dimi*0064       INTEGER myThid
4c563c2ee9 Chri*0065 CEOP
924557e60a Chri*0066 
                0067 C--   Default values set to single processor case
057255cb61 Jean*0068 C     pid[W-SE] are the MPI process id of the neighbor processes.
                0069 C     A process can be its own neighbor!
a85d6ab24e Chri*0070       myThid      = 1
057255cb61 Jean*0071       myPid       = 0
a85d6ab24e Chri*0072       nProcs      = 1
                0073       myPx        = 1
                0074       myPy        = 1
                0075       myXGlobalLo = 1
                0076       myYGlobalLo = 1
057255cb61 Jean*0077       pidW        = 0
                0078       pidE        = 0
                0079       pidN        = 0
                0080       pidS        = 0
9073792660 Alis*0081 c     errorMessageUnit    = 0
                0082 c     standardMessageUnit = 6
a85d6ab24e Chri*0083 
057255cb61 Jean*0084       IF ( usingMPI ) THEN
924557e60a Chri*0085 #ifdef ALLOW_USE_MPI
                0086 C--
                0087 C--   MPI style full multiple-process initialisation
                0088 C--   ==============================================
                0089 
                0090 C--    Arrange MPI processes on a cartesian grid
                0091 C      Set variable indicating which MPI process is to the north,
                0092 C      south, east, west, south-west, south-east, north-west
                0093 C      and north-east of me e.g.
                0094 C
                0095 C      Plan view of model domain centered on process ME
                0096 C      ================================================
                0097 C
057255cb61 Jean*0098 C            :         :         :        :
                0099 C            :         :         :        :
                0100 C            :         :         :        :
                0101 C       .....------------------------------.....
                0102 C            |         |         |        |
                0103 C            |  NW     |   N     |  NE    |
                0104 C            |         |         |        |
                0105 C       .....------------------------------.....
                0106 C            |         |         |        |
                0107 C            |  W      |   ME    |  E     |
                0108 C            |         |         |        |
                0109 C       .....------------------------------.....
                0110 C            |         |         |        |
                0111 C            |  SW     |   S     |  SE    |
                0112 C            |         |         |        |
                0113 C       .....------------------------------.....
                0114 C  Y         :         :         :        :
                0115 C / \        :         :         :        :
                0116 C  |         :         :         :        :
924557e60a Chri*0117 C  |
                0118 C  |----> X
                0119 C
                0120 C--    Set default MPI communicator to XY processor grid
                0121        mpiGridSpec(1) = nPx
                0122        mpiGridSpec(2) = nPy
                0123 C      Could be periodic in X and/or Y - set at run time or compile time!
                0124        mpiPeriodicity(1) = _mpiTRUE_
                0125        mpiPeriodicity(2) = _mpiTRUE_
                0126 #ifdef CAN_PREVENT_X_PERIODICITY
                0127 #ifndef ALWAYS_PREVENT_X_PERIODICITY
                0128        IF ( notUsingXPeriodicity ) THEN
                0129 #endif
                0130         mpiPeriodicity(1) = _mpiFALSE_
                0131 #ifndef ALWAYS_PREVENT_X_PERIODICITY
                0132        ENDIF
                0133 #endif
                0134 #endif /* CAN_PREVENT_X_PERIODICITY */
                0135 #ifdef  CAN_PREVENT_Y_PERIODICITY
                0136 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
                0137        IF ( notUsingYPeriodicity ) THEN
                0138 #endif
                0139         mpiPeriodicity(2) = _mpiFALSE_
                0140 #ifndef ALWAYS_PREVENT_Y_PERIODICITY
                0141        ENDIF
                0142 #endif
                0143 #endif /* CAN_PREVENT_Y_PERIODICITY */
                0144 
                0145        CALL MPI_CART_CREATE(
9d9b5e8eba Alis*0146      I  MPI_COMM_MODEL,2,mpiGridSpec,mpiPeriodicity,_mpiTRUE_,
924557e60a Chri*0147      O  mpiComm, mpiRC )
                0148        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0149         eeBootError = .TRUE.
057255cb61 Jean*0150         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0151      &        'S/R INI_PROCS: MPI_CART_CREATE return code',
4f74f8e269 Jean*0152      &        mpiRC
057255cb61 Jean*0153         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0154         GOTO 999
                0155        ENDIF
                0156 
                0157 C--    Get my location on the grid
                0158        CALL MPI_CART_COORDS( mpiComm, mpiMyId, 2, mpiGridSpec, mpiRC )
                0159        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0160         eeBootError = .TRUE.
057255cb61 Jean*0161         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0162      &        'S/R INI_PROCS: MPI_CART_COORDS return code',
4f74f8e269 Jean*0163      &        mpiRC
057255cb61 Jean*0164         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0165         GOTO 999
                0166        ENDIF
a85d6ab24e Chri*0167        myPid = mpiMyId
924557e60a Chri*0168        mpiPx = mpiGridSpec(1)
                0169        mpiPy = mpiGridSpec(2)
                0170        mpiXGlobalLo = 1 + sNx*nSx*(mpiPx)
                0171        mpiYGlobalLo = 1 + sNy*nSy*(mpiPy)
                0172        myXGlobalLo  = mpiXGlobalLo
                0173        myYGlobalLo  = mpiYGlobalLo
6060ec2938 Dimi*0174 
                0175 C--   To speed-up mpi gather and scatter routines, myXGlobalLo
                0176 C     and myYGlobalLo from each process are transferred to
                0177 C     a common block array.  This allows process 0 to know
                0178 C     the location of the domains controlled by each process.
057255cb61 Jean*0179        DO np = 1, nPx*nPy
d146e5d9c0 Patr*0180           itemp(1) = myXGlobalLo
                0181           itemp(2) = myYGlobalLo
057255cb61 Jean*0182           pId = np - 1
                0183           CALL MPI_BCAST(itemp, 2, MPI_INTEGER, pId,
d146e5d9c0 Patr*0184      &         MPI_COMM_MODEL, ierr)
057255cb61 Jean*0185           mpi_myXGlobalLo(np) = itemp(1)
                0186           mpi_myYGlobalLo(np) = itemp(2)
6060ec2938 Dimi*0187        ENDDO
                0188 
a85d6ab24e Chri*0189        myPx = mpiPx+1
                0190        myPy = mpiPy+1
924557e60a Chri*0191 C--    Get MPI id for neighboring procs.
                0192        mpiGridSpec(1) = mpiPx-1
2a429ccc1b Alis*0193        IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
4f74f8e269 Jean*0194      &   .AND. mpiGridSpec(1) .LT. 0 )
924557e60a Chri*0195      &  mpiGridSpec(1) = nPx-1
                0196        mpiGridSpec(2) = mpiPy
0deb8b4619 Jean*0197 
                0198 #ifdef ALLOW_NEST_CHILD
                0199       IF ( useNEST_CHILD) THEN
                0200        IF ( mpiPeriodicity(1) .EQ. _mpiFALSE_
                0201      &      .AND. mpiGridSpec(1) .LT. 0 )
                0202      &      mpiGridSpec(1) =  0
                0203       ENDIF
                0204 #endif /* ALLOW_NEST_CHILD */
                0205 
924557e60a Chri*0206        CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidW , mpiRC )
                0207        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0208         eeBootError = .TRUE.
057255cb61 Jean*0209         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0210      &        'S/R INI_PROCS: MPI_CART_RANK (pidW) return code',
4f74f8e269 Jean*0211      &        mpiRC
057255cb61 Jean*0212         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0213         GOTO 999
                0214        ENDIF
                0215        pidW = mpiPidW
                0216        mpiGridSpec(1) = mpiPx+1
2a429ccc1b Alis*0217        IF ( mpiPeriodicity(1) .EQ. _mpiTRUE_
                0218      &   .AND. mpiGridSpec(1) .GT. nPx-1 )
924557e60a Chri*0219      &  mpiGridSpec(1) = 0
                0220        mpiGridSpec(2) = mpiPy
0deb8b4619 Jean*0221 
                0222 #ifdef ALLOW_NEST_CHILD
                0223       IF ( useNEST_CHILD) THEN
                0224        IF ( mpiPeriodicity(1) .EQ. _mpiFALSE_
                0225      &   .AND. mpiGridSpec(1) .GT. nPx-1 )
                0226      &    mpiGridSpec(1) = nPx-1
                0227       ENDIF
                0228 #endif /* ALLOW_NEST_CHILD */
                0229 
924557e60a Chri*0230        CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidE , mpiRC )
                0231        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0232         eeBootError = .TRUE.
057255cb61 Jean*0233         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0234      &        'S/R INI_PROCS: MPI_CART_RANK (pidE) return code',
4f74f8e269 Jean*0235      &        mpiRC
057255cb61 Jean*0236         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0237         GOTO 999
                0238        ENDIF
                0239        pidE = mpiPidE
                0240        mpiGridSpec(1) = mpiPx
                0241        mpiGridSpec(2) = mpiPy-1
2a429ccc1b Alis*0242        IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
                0243      &   .AND. mpiGridSpec(2) .LT. 0 )
924557e60a Chri*0244      &  mpiGridSpec(2) = nPy - 1
                0245        CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidS , mpiRC )
                0246        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0247         eeBootError = .TRUE.
057255cb61 Jean*0248         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0249      &        'S/R INI_PROCS: MPI_CART_RANK (pidS) return code',
4f74f8e269 Jean*0250      &        mpiRC
057255cb61 Jean*0251         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0252         GOTO 999
                0253        ENDIF
                0254        pidS = mpiPidS
                0255        mpiGridSpec(1) = mpiPx
                0256        mpiGridSpec(2) = mpiPy+1
2a429ccc1b Alis*0257        IF ( mpiPeriodicity(2) .EQ. _mpiTRUE_
                0258      &   .AND. mpiGridSpec(2) .GT. nPy-1 )
924557e60a Chri*0259      &  mpiGridSpec(2) = 0
                0260        CALL MPI_CART_RANK( mpiComm, mpiGridSpec, mpiPidN , mpiRC )
                0261        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0262         eeBootError = .TRUE.
057255cb61 Jean*0263         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0264      &        'S/R INI_PROCS: MPI_CART_RANK (pidN) return code',
4f74f8e269 Jean*0265      &        mpiRC
057255cb61 Jean*0266         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0267         GOTO 999
                0268        ENDIF
                0269        pidN = mpiPidN
                0270 
                0271 C--    Print summary of processor mapping on standard output
                0272        CALL MPI_GET_PROCESSOR_NAME( mpiProcNam, mpilProcNam, mpiRC )
                0273        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0274         eeBootError = .TRUE.
057255cb61 Jean*0275         WRITE(msgBuf,'(A,I5)')
924557e60a Chri*0276      &        'S/R INI_PROCS: MPI_GET_PROCESSOR_NAME return code',
4f74f8e269 Jean*0277      &        mpiRC
057255cb61 Jean*0278         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0279         GOTO 999
                0280        ENDIF
057255cb61 Jean*0281        WRITE(msgBuf,'(A)')
2a429ccc1b Alis*0282      &   '======= Starting MPI parallel Run ========='
057255cb61 Jean*0283        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0284      &                     SQUEEZE_BOTH , myThid )
                0285        WRITE(msgBuf,'(A,I3,A,A)') ' My Processor Name (len:',
5c881cd160 Jean*0286      &  mpilProcNam, ' ) = ', mpiProcNam(1:mpilProcNam)
057255cb61 Jean*0287        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0288      &                     SQUEEZE_RIGHT , myThid )
b9dadda204 Mart*0289        iTmp = MAX(3,1 + INT(LOG10(DFLOAT(nPx*nPy))))
                0290        WRITE(fmtStr,'(4(A,I1),A)')
                0291      &  '(A,I',iTmp,',A,I',iTmp,',A,I',iTmp,',A,I',iTmp,',A)'
                0292        WRITE(msgBuf,fmtStr) ' Located at (', mpiPx,',',mpiPy,
924557e60a Chri*0293      &  ') on processor grid (0:',nPx-1,',0:',nPy-1,')'
057255cb61 Jean*0294        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0295      &                     SQUEEZE_RIGHT , myThid )
b9dadda204 Mart*0296        iTmp = MAX(6,1 + INT(LOG10(DFLOAT(nPx*sNx*nSx))))
                0297        jTmp = MAX(6,1 + INT(LOG10(DFLOAT(nPy*sNy*nSy))))
                0298        WRITE(fmtStr,'(4(A,I1),A)')
                0299      &  '(A,I',iTmp,',A,I',jTmp,',A,I',iTmp,',A,I',jTmp,',A)'
                0300        WRITE(msgBuf,fmtStr) ' Origin at  (',
924557e60a Chri*0301      &  mpiXGlobalLo,',',mpiYGLobalLo,
4f74f8e269 Jean*0302      &  ') on global grid (1:',nPx*sNx*nSx,',1:',nPy*sNy*nSy,')'
057255cb61 Jean*0303        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0304      &                     SQUEEZE_RIGHT , myThid )
b9dadda204 Mart*0305        iTmp = MAX(4,1 + INT(LOG10(DFLOAT(nPx*nPy))))
                0306        WRITE(fmtStr,'(2(A,I1),A)') '(A,I',iTmp,'.',iTmp,')'
                0307        WRITE(msgBuf,fmtStr) ' North neighbor = processor ', mpiPidN
057255cb61 Jean*0308        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0309      &                     SQUEEZE_RIGHT , myThid )
b9dadda204 Mart*0310        WRITE(msgBuf,fmtStr) ' South neighbor = processor ', mpiPidS
057255cb61 Jean*0311        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0312      &                     SQUEEZE_RIGHT , myThid )
b9dadda204 Mart*0313        WRITE(msgBuf,fmtStr) '  East neighbor = processor ', mpiPidE
057255cb61 Jean*0314        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0315      &                     SQUEEZE_RIGHT , myThid )
b9dadda204 Mart*0316        WRITE(msgBuf,fmtStr) '  West neighbor = processor ', mpiPidW
057255cb61 Jean*0317        CALL PRINT_MESSAGE( msgBuf, standardMessageUnit,
                0318      &                     SQUEEZE_RIGHT , myThid )
                0319 
924557e60a Chri*0320 C--    Create MPI types for transfer of array edges.
                0321 C--    Four and eight byte primitive (one block only) datatypes.
                0322 C--    These are common to all threads in the process.
                0323 C      Notes:
                0324 C      ======
                0325 C      1. The datatypes MPI_REAL4 and MPI_REAL8 are usually predefined.
                0326 C      If they are not defined code must be added to create them -
                0327 C      the MPI standard leaves optional whether they exist.
                0328 C      2. Per thread datatypes that handle all the edges for a thread
                0329 C      are defined based on the type defined here.
057255cb61 Jean*0330 
924557e60a Chri*0331 C--    xFace datatypes (east<-->west messages)
                0332 C--
                0333 C      xFace (y=constant) for XY arrays with real*4 declaration.
                0334        arrElSep  = (sNx+OLx*2)
                0335        elCount   = sNy+OLy*2
                0336        elLen     = OLx
                0337        elStride  = arrElSep
d86928d456 Cons*0338 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
b83875bf45 Alis*0339        CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL,
                0340      &                       mpiTypeXFaceBlock_xy_r4, mpiRC)
                0341 #else
924557e60a Chri*0342        CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL4,
                0343      &                       mpiTypeXFaceBlock_xy_r4, mpiRC)
b83875bf45 Alis*0344 #endif
924557e60a Chri*0345        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0346         eeBootError = .TRUE.
057255cb61 Jean*0347         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0348      &   'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r4)',
4f74f8e269 Jean*0349      &        mpiRC
057255cb61 Jean*0350         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0351        ENDIF
                0352        CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r4, mpiRC)
                0353        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0354         eeBootError = .TRUE.
057255cb61 Jean*0355         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0356      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r4)',
4f74f8e269 Jean*0357      &        mpiRC
057255cb61 Jean*0358         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0359        ENDIF
                0360 
                0361 C      xFace (y=constant) for XY arrays with real*8 declaration.
d86928d456 Cons*0362 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
b83875bf45 Alis*0363        CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_DOUBLE_PRECISION,
                0364      &                       mpiTypeXFaceBlock_xy_r8, mpiRC)
                0365 #else
924557e60a Chri*0366        CALL MPI_TYPE_VECTOR(elCount,elLen,elStride,MPI_REAL8,
                0367      &                       mpiTypeXFaceBlock_xy_r8, mpiRC)
b83875bf45 Alis*0368 #endif
924557e60a Chri*0369        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0370         eeBootError = .TRUE.
057255cb61 Jean*0371         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0372      &   'S/R INI_PROCS: MPI_TYPE_VECTOR (mpiTypeXFaceBlock_xy_r8)',
4f74f8e269 Jean*0373      &        mpiRC
057255cb61 Jean*0374         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0375        ENDIF
                0376        CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xy_r8, mpiRC)
                0377        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0378         eeBootError = .TRUE.
057255cb61 Jean*0379         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0380      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xy_r8)',
4f74f8e269 Jean*0381      &        mpiRC
057255cb61 Jean*0382         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0383        ENDIF
                0384 
                0385 C      xFace (y=constant) for XYZ arrays with real*4 declaration.
                0386        arrElSize = 4
                0387        arrElSep  = (sNx+OLx*2)*(sNy+OLy*2)
3d6b649e23 Chri*0388        elCount   = Nr
924557e60a Chri*0389        elLen     = 1
                0390        elStride  = arrElSize*arrElSep
                0391        CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
                0392      &                        mpiTypeXFaceBlock_xy_r4,
                0393      &                       mpiTypeXFaceBlock_xyz_r4, mpiRC)
                0394        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0395         eeBootError = .TRUE.
057255cb61 Jean*0396         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0397      &   'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r4)',
4f74f8e269 Jean*0398      &        mpiRC
057255cb61 Jean*0399         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0400        ENDIF
                0401        CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r4, mpiRC)
                0402        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0403         eeBootError = .TRUE.
057255cb61 Jean*0404         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0405      &   'S/R INI_PROCS: MPI_TYPE_COMMIT  (mpiTypeXFaceBlock_xyz_r4)',
4f74f8e269 Jean*0406      &        mpiRC
057255cb61 Jean*0407         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0408        ENDIF
                0409 
                0410 C      xFace (y=constant) for XYZ arrays with real*8 declaration.
                0411        arrElSize = 8
                0412        elStride  = arrElSize*arrElSep
                0413        CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
                0414      &                        mpiTypeXFaceBlock_xy_r8,
                0415      &                       mpiTypeXFaceBlock_xyz_r8, mpiRC)
                0416        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0417         eeBootError = .TRUE.
057255cb61 Jean*0418         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0419      &   'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeXFaceBlock_xyz_r8)',
4f74f8e269 Jean*0420      &        mpiRC
057255cb61 Jean*0421         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0422        ENDIF
                0423        CALL MPI_TYPE_COMMIT( mpiTypeXFaceBlock_xyz_r8, mpiRC)
                0424        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0425         eeBootError = .TRUE.
057255cb61 Jean*0426         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0427      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeXFaceBlock_xyz_r8)',
4f74f8e269 Jean*0428      &        mpiRC
057255cb61 Jean*0429         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0430        ENDIF
057255cb61 Jean*0431 
924557e60a Chri*0432 C--    yFace datatypes (north<-->south messages)
                0433 C--
                0434 C      yFace (x=constant) for XY arrays with real*4 declaration
                0435        elCount  = OLy*(sNx+OLx*2)
d86928d456 Cons*0436 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
b83875bf45 Alis*0437        CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL,
                0438      &                          mpiTypeYFaceBlock_xy_r4, mpiRC)
                0439 #else
924557e60a Chri*0440        CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL4,
                0441      &                          mpiTypeYFaceBlock_xy_r4, mpiRC)
b83875bf45 Alis*0442 #endif
924557e60a Chri*0443        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0444         eeBootError = .TRUE.
057255cb61 Jean*0445         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0446      &   'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r4)',
4f74f8e269 Jean*0447      &        mpiRC
057255cb61 Jean*0448         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0449        ENDIF
                0450        CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r4, mpiRC)
                0451        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0452         eeBootError = .TRUE.
057255cb61 Jean*0453         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0454      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r4)',
4f74f8e269 Jean*0455      &        mpiRC
057255cb61 Jean*0456         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0457        ENDIF
                0458 C      yFace (x=constant) for XY arrays with real*8 declaration
d86928d456 Cons*0459 #if (defined (TARGET_SGI) || defined (TARGET_AIX) || defined(TARGET_LAM))
b83875bf45 Alis*0460        CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_DOUBLE_PRECISION,
                0461      &                          mpiTypeYFaceBlock_xy_r8, mpiRC)
                0462 #else
924557e60a Chri*0463        CALL MPI_TYPE_CONTIGUOUS(elCount,MPI_REAL8,
                0464      &                          mpiTypeYFaceBlock_xy_r8, mpiRC)
b83875bf45 Alis*0465 #endif
924557e60a Chri*0466        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0467         eeBootError = .TRUE.
057255cb61 Jean*0468         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0469      &   'S/R INI_PROCS: MPI_TYPE_CONTIGUOUS (mpiTypeYFaceBlock_xy_r8)',
4f74f8e269 Jean*0470      &        mpiRC
057255cb61 Jean*0471         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0472        ENDIF
                0473        CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xy_r8, mpiRC)
                0474        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0475         eeBootError = .TRUE.
057255cb61 Jean*0476         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0477      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xy_r8)',
4f74f8e269 Jean*0478      &        mpiRC
057255cb61 Jean*0479         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0480        ENDIF
                0481 C      yFace (x=constant) for XYZ arrays with real*4 declaration
                0482        arrElSize = 4
                0483        arrElSep  = (sNx+OLx*2)*(sNy+OLy*2)
3d6b649e23 Chri*0484        elCount   = Nr
924557e60a Chri*0485        elLen     = 1
                0486        elStride  = arrElSize*arrElSep
                0487        CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
                0488      &                        mpiTypeYFaceBlock_xy_r4,
                0489      &                       mpiTypeYFaceBlock_xyz_r4, mpiRC)
                0490        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0491         eeBootError = .TRUE.
057255cb61 Jean*0492         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0493      &   'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r4)',
4f74f8e269 Jean*0494      &        mpiRC
057255cb61 Jean*0495         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0496        ENDIF
                0497        CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r4, mpiRC)
                0498        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0499         eeBootError = .TRUE.
057255cb61 Jean*0500         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0501      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r4)',
4f74f8e269 Jean*0502      &        mpiRC
057255cb61 Jean*0503         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0504        ENDIF
                0505 C      yFace (x=constant) for XYZ arrays with real*8 declaration
                0506        arrElSize = 8
                0507        elStride  = arrElSize*arrElSep
                0508        CALL MPI_TYPE_HVECTOR(elCount,elLen,elStride,
                0509      &                        mpiTypeYFaceBlock_xy_r8,
                0510      &                       mpiTypeYFaceBlock_xyz_r8, mpiRC)
                0511        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0512         eeBootError = .TRUE.
057255cb61 Jean*0513         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0514      &   'S/R INI_PROCS: MPI_TYPE_HVECTOR (mpiTypeYFaceBlock_xyz_r8)',
4f74f8e269 Jean*0515      &        mpiRC
057255cb61 Jean*0516         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0517        ENDIF
                0518        CALL MPI_TYPE_COMMIT( mpiTypeYFaceBlock_xyz_r8, mpiRC)
                0519        IF ( mpiRC .NE. MPI_SUCCESS ) THEN
                0520         eeBootError = .TRUE.
057255cb61 Jean*0521         WRITE(msgBuf,'(A,I5)')
2a429ccc1b Alis*0522      &   'S/R INI_PROCS: MPI_TYPE_COMMIT (mpiTypeYFaceBlock_xyz_r8)',
4f74f8e269 Jean*0523      &        mpiRC
057255cb61 Jean*0524         CALL PRINT_ERROR( msgBuf, myThid )
924557e60a Chri*0525        ENDIF
                0526 
                0527 C--    Assign MPI values used in generating unique tags for messages.
                0528        mpiTagW    = 1
                0529        mpiTagE    = 2
                0530        mpiTagS    = 3
                0531        mpiTagN    = 4
                0532 
9d9b5e8eba Alis*0533        CALL MPI_Barrier(MPI_COMM_MODEL,mpiRC)
924557e60a Chri*0534 
057255cb61 Jean*0535 #endif /* ALLOW_USE_MPI */
                0536       ELSE
                0537 C--   Case without using MPI (usingMPI=F)
924557e60a Chri*0538 
057255cb61 Jean*0539 C--   case without tile-communication (DISCONNECTED_TILES defined) is not
                0540 C     yet coded for multi-procs; for now, just stop if multi-procs domain
                0541        IF ( nPx*nPy .NE. 1 ) THEN
be04e9f2c8 Jean*0542         eeBootError = .TRUE.
057255cb61 Jean*0543         WRITE(msgBuf,'(2A,I6,A)') 'INI_PROCS: ',
                0544      &    'needs MPI for multi-procs (nPx*nPy=',  nPx*nPy, ') setup'
                0545         CALL PRINT_ERROR( msgBuf, myThid )
                0546         WRITE(msgBuf,'(2A)') 'INI_PROCS: ',
                0547      &    ' but presently usingMPI = False (in "eedata")'
                0548         CALL PRINT_ERROR( msgBuf, myThid )
be04e9f2c8 Jean*0549         GOTO 999
057255cb61 Jean*0550        ENDIF
                0551 
                0552 C--   End if usingMPI
924557e60a Chri*0553       ENDIF
                0554 
                0555  999  CONTINUE
                0556 
                0557       RETURN
                0558       END