TABLE OF CONTENTS
- ABINIT/xmpi_allgather
- ABINIT/xmpi_allgather_char
- ABINIT/xmpi_allgather_dp1d
- ABINIT/xmpi_allgather_dp2d
- ABINIT/xmpi_allgather_dp3d
- ABINIT/xmpi_allgather_dp4d
- ABINIT/xmpi_allgather_int
- ABINIT/xmpi_allgather_int1d
- ABINIT/xmpi_allgather_int1d_1b
- ABINIT/xmpi_allgather_int2d
ABINIT/xmpi_allgather [ Functions ]
NAME
xmpi_allgather
FUNCTION
This module contains functions that calls MPI routine, if we compile the code using the MPI CPP flags. xmpi_allgather is the generic function.
COPYRIGHT
Copyright (C) 2001-2024 ABINIT group (AR,XG) This file is distributed under the terms of the GNU General Public License, see ~ABINIT/COPYING or http://www.gnu.org/copyleft/gpl.txt .
SOURCE
ABINIT/xmpi_allgather_char [ Functions ]
NAME
xmpi_allgather_char
FUNCTION
Gathers data from all tasks and distributes it to all. Target: one-dimensional character(20) arrays.
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
charval= buffer array recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
95 subroutine xmpi_allgather_char(charval,recvbuf,comm,ier) 96 97 !Arguments------------------------- 98 integer,intent(in) :: comm 99 integer,intent(out) :: ier 100 character(len=20),intent(inout) :: charval 101 character(len=20), DEV_CONTARRD intent(inout) :: recvbuf(:) 102 103 !Local variables------------------- 104 #if defined HAVE_MPI 105 integer :: ii 106 character :: arr_charval(20) 107 #endif 108 109 ! ************************************************************************* 110 ier=0 111 #if defined HAVE_MPI 112 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 113 ! allgather xval on all proc. in comm 114 do ii=1,20;arr_charval(ii)=charval(ii:ii);enddo 115 call MPI_ALLGATHER(arr_charval,20,MPI_CHARACTER,recvbuf,20,MPI_CHARACTER,comm,ier) 116 else if (comm == MPI_COMM_SELF) then 117 recvbuf=charval 118 end if 119 #else 120 recvbuf=charval 121 #endif 122 123 end subroutine xmpi_allgather_char
ABINIT/xmpi_allgather_dp1d [ Functions ]
NAME
xmpi_allgather_dp1d
FUNCTION
Gathers data from all tasks and distributes it to all. Target: double precision one-dimensional arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
300 subroutine xmpi_allgather_dp1d(xval,nelem,recvbuf,comm,ier) 301 302 !Arguments------------------------- 303 real(dp), DEV_CONTARRD intent(in) :: xval(:) 304 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:) 305 integer ,intent(in) :: nelem,comm 306 integer ,intent(out) :: ier 307 308 ! ************************************************************************* 309 ier=0 310 #if defined HAVE_MPI 311 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 312 ! allgather xval on all proc. in comm 313 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,comm,ier) 314 else if (comm == MPI_COMM_SELF) then 315 recvbuf(1:nelem)=xval(1:nelem) 316 end if 317 #else 318 recvbuf(1:nelem)=xval(1:nelem) 319 #endif 320 end subroutine xmpi_allgather_dp1d
ABINIT/xmpi_allgather_dp2d [ Functions ]
NAME
xmpi_allgather_dp2d
FUNCTION
Gathers data from all tasks and distributes it to all. Target: double precision two-dimensional arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
349 subroutine xmpi_allgather_dp2d(xval,nelem,recvbuf,comm,ier) 350 351 !Arguments------------------------- 352 real(dp), DEV_CONTARRD intent(in) :: xval(:,:) 353 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:) 354 integer ,intent(in) :: nelem,comm 355 integer ,intent(out) :: ier 356 357 ! ************************************************************************* 358 ier=0 359 #if defined HAVE_MPI 360 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 361 ! allgather xval on all proc. in comm 362 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,comm,ier) 363 else if (comm == MPI_COMM_SELF) then 364 recvbuf(:,:)=xval(:,:) 365 end if 366 #else 367 recvbuf(:,:)=xval(:,:) 368 #endif 369 end subroutine xmpi_allgather_dp2d
ABINIT/xmpi_allgather_dp3d [ Functions ]
NAME
xmpi_allgather_dp3d
FUNCTION
Gathers data from all tasks and distributes it to all. Target: double precision three-dimensional arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
398 subroutine xmpi_allgather_dp3d(xval,nelem,recvbuf,comm,ier) 399 400 !Arguments------------------------- 401 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:) 402 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:) 403 integer ,intent(in) :: nelem,comm 404 integer ,intent(out) :: ier 405 406 ! ************************************************************************* 407 ier=0 408 #if defined HAVE_MPI 409 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 410 ! allgather xval on all proc. in comm 411 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,comm,ier) 412 else if (comm == MPI_COMM_SELF) then 413 recvbuf(:,:,:)=xval(:,:,:) 414 end if 415 #else 416 recvbuf(:,:,:)=xval(:,:,:) 417 #endif 418 end subroutine xmpi_allgather_dp3d
ABINIT/xmpi_allgather_dp4d [ Functions ]
NAME
xmpi_allgather_dp4d
FUNCTION
Gathers data from all tasks and distributes it to all. Target: double precision four-dimensional arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
447 subroutine xmpi_allgather_dp4d(xval,nelem,recvbuf,comm,ier) 448 449 !Arguments------------------------- 450 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:) 451 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:,:) 452 integer ,intent(in) :: nelem,comm 453 integer ,intent(out) :: ier 454 455 ! ************************************************************************* 456 ier=0 457 #if defined HAVE_MPI 458 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 459 ! allgather xval on all proc. in comm 460 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,comm,ier) 461 else if (comm == MPI_COMM_SELF) then 462 recvbuf(:,:,:,:)=xval(:,:,:,:) 463 end if 464 #else 465 recvbuf(:,:,:,:)=xval(:,:,:,:) 466 #endif 467 end subroutine xmpi_allgather_dp4d
ABINIT/xmpi_allgather_int [ Functions ]
NAME
xmpi_allgather_int
FUNCTION
Gathers data from all tasks and distributes it to all. Target: one-dimensional integer arrays.
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
46 subroutine xmpi_allgather_int(xval,recvbuf,comm,ier) 47 48 !Arguments------------------------- 49 integer,intent(inout) :: xval 50 integer, DEV_CONTARRD intent(inout) :: recvbuf(:) 51 integer, intent(in) :: comm 52 integer,intent(out) :: ier 53 54 ! ************************************************************************* 55 ier=0 56 #if defined HAVE_MPI 57 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 58 ! allgather xval on all proc. in comm 59 call MPI_ALLGATHER([xval],1,MPI_INTEGER,recvbuf,1,MPI_INTEGER,comm,ier) 60 else if (comm == MPI_COMM_SELF) then 61 recvbuf(1)=xval 62 end if 63 #else 64 recvbuf(1)=xval 65 #endif 66 end subroutine xmpi_allgather_int
ABINIT/xmpi_allgather_int1d [ Functions ]
NAME
xmpi_allgather_int1d
FUNCTION
Gathers data from all tasks and distributes it to all. Target: one-dimensional integer arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
201 subroutine xmpi_allgather_int1d(xval,nelem,recvbuf,comm,ier) 202 203 !Arguments------------------------- 204 integer, DEV_CONTARRD intent(in) :: xval(:) 205 integer, DEV_CONTARRD intent(inout) :: recvbuf(:) 206 integer ,intent(in) :: nelem,comm 207 integer ,intent(out) :: ier 208 209 ! ************************************************************************* 210 ier=0 211 #if defined HAVE_MPI 212 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 213 ! allgather xval on all proc. in comm 214 call MPI_ALLGATHER(xval,nelem,MPI_INTEGER,recvbuf,nelem,MPI_INTEGER,comm,ier) 215 else if (comm == MPI_COMM_SELF) then 216 recvbuf(1:nelem)=xval(1:nelem) 217 end if 218 #else 219 recvbuf(1:nelem)=xval(1:nelem) 220 #endif 221 end subroutine xmpi_allgather_int1d
ABINIT/xmpi_allgather_int1d_1b [ Functions ]
NAME
xmpi_allgather_int1d_1b
FUNCTION
Gathers data from all tasks and distributes it to all. Target: one-dimensional integer arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
152 subroutine xmpi_allgather_int1d_1b(xval, nelem, recvbuf, comm, ier) 153 154 !Arguments------------------------- 155 integer(c_int8_t), DEV_CONTARRD intent(in) :: xval(:) 156 integer(c_int8_t), DEV_CONTARRD intent(inout) :: recvbuf(:) 157 integer ,intent(in) :: nelem,comm 158 integer ,intent(out) :: ier 159 160 ! ************************************************************************* 161 ier=0 162 #if defined HAVE_MPI 163 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 164 ! allgather xval on all proc. in comm 165 call MPI_ALLGATHER(xval,nelem,MPI_INTEGER1,recvbuf,nelem,MPI_INTEGER1,comm,ier) 166 else if (comm == MPI_COMM_SELF) then 167 recvbuf(1:nelem)=xval(1:nelem) 168 end if 169 #else 170 recvbuf(1:nelem)=xval(1:nelem) 171 #endif 172 end subroutine xmpi_allgather_int1d_1b
ABINIT/xmpi_allgather_int2d [ Functions ]
NAME
xmpi_allgather_int2d
FUNCTION
Gathers data from all tasks and distributes it to all. Target: two-dimensional integer arrays.
INPUTS
xval= buffer array nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
PARENTS
CHILDREN
mpi_allgather
SOURCE
250 subroutine xmpi_allgather_int2d(xval,nelem,recvbuf,comm,ier) 251 252 !Arguments------------------------- 253 integer, DEV_CONTARRD intent(in) :: xval(:,:) 254 integer, DEV_CONTARRD intent(inout) :: recvbuf(:,:) 255 integer ,intent(in) :: nelem,comm 256 integer ,intent(out) :: ier 257 258 ! ************************************************************************* 259 ier=0 260 #if defined HAVE_MPI 261 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 262 ! allgather xval on all proc. in comm 263 call MPI_ALLGATHER(xval,nelem,MPI_INTEGER,recvbuf,nelem,MPI_INTEGER,comm,ier) 264 else if (comm == MPI_COMM_SELF) then 265 recvbuf(:,:)=xval(:,:) 266 end if 267 #else 268 recvbuf(:,:)=xval(:,:) 269 #endif 270 end subroutine xmpi_allgather_int2d