TABLE OF CONTENTS
- ABINIT/xmpi_scatterv
- ABINIT/xmpi_scatterv_dp
- ABINIT/xmpi_scatterv_dp2d
- ABINIT/xmpi_scatterv_dp3d
- ABINIT/xmpi_scatterv_dp4d
- ABINIT/xmpi_scatterv_int
- ABINIT/xmpi_scatterv_int2d
ABINIT/xmpi_scatterv [ Functions ]
NAME
xmpi_scatterv
FUNCTION
This module contains functions that calls MPI routine, if we compile the code using the MPI CPP flags. xmpi_scatterv is the generic function.
COPYRIGHT
Copyright (C) 2001-2024 ABINIT group (MT) This file is distributed under the terms of the GNU General Public License, see ~ABINIT/COPYING or http://www.gnu.org/copyleft/gpl.txt .
SOURCE
ABINIT/xmpi_scatterv_dp [ Functions ]
NAME
xmpi_scatterv_dp
FUNCTION
Gathers data from all tasks and delivers it to all. Target: one-dimensional real arrays.
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
154 subroutine xmpi_scatterv_dp(xval,sendcounts,displs,recvbuf,recvcount,root,comm,ier) 155 156 !Arguments------------------------- 157 real(dp), DEV_CONTARRD intent(in) :: xval(:) 158 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:) 159 integer, DEV_CONTARRD intent(in) :: sendcounts(:),displs(:) 160 integer,intent(in) :: recvcount,root,comm 161 integer,intent(out) :: ier 162 163 !Local variables------------------- 164 integer :: dd 165 166 ! ************************************************************************* 167 168 ier=0 169 #if defined HAVE_MPI 170 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 171 call MPI_SCATTERV(xval,sendcounts,displs,MPI_DOUBLE_PRECISION,recvbuf,recvcount,& 172 & MPI_DOUBLE_PRECISION,root,comm,ier) 173 else if (comm == MPI_COMM_SELF) then 174 #endif 175 dd=0;if (size(displs)>0) dd=displs(1) 176 recvbuf(1:recvcount)=xval(dd+1:dd+recvcount) 177 #if defined HAVE_MPI 178 end if 179 #endif 180 181 end subroutine xmpi_scatterv_dp
ABINIT/xmpi_scatterv_dp2d [ Functions ]
NAME
xmpi_scatterv_dp2d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: two-dimensional real arrays.
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
208 subroutine xmpi_scatterv_dp2d(xval,sendcounts,displs,recvbuf,recvcount,root,comm,ier) 209 210 !Arguments------------------------- 211 real(dp), DEV_CONTARRD intent(in) :: xval(:,:) 212 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:) 213 integer, DEV_CONTARRD intent(in) :: sendcounts(:),displs(:) 214 integer,intent(in) :: recvcount,root,comm 215 integer,intent(out) :: ier 216 217 !Local variables------------------- 218 integer :: cc,dd,sz1 219 220 ! ************************************************************************* 221 222 ier=0 223 #if defined HAVE_MPI 224 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 225 call MPI_SCATTERV(xval,sendcounts,displs,MPI_DOUBLE_PRECISION,recvbuf,recvcount,& 226 & MPI_DOUBLE_PRECISION,root,comm,ier) 227 else if (comm == MPI_COMM_SELF) then 228 #endif 229 sz1=size(recvbuf,1);cc=recvcount/sz1 230 dd=0;if (size(displs)>0) dd=displs(1)/sz1 231 recvbuf(:,1:cc)=xval(:,dd+1:dd+cc) 232 #if defined HAVE_MPI 233 end if 234 #endif 235 236 end subroutine xmpi_scatterv_dp2d
ABINIT/xmpi_scatterv_dp3d [ Functions ]
NAME
xmpi_scatterv_dp3d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: three-dimensional real arrays.
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
263 subroutine xmpi_scatterv_dp3d(xval,sendcounts,displs,recvbuf,recvcount,root,comm,ier) 264 265 !Arguments------------------------- 266 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:) 267 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:) 268 integer, DEV_CONTARRD intent(in) :: sendcounts(:),displs(:) 269 integer,intent(in) :: recvcount,root,comm 270 integer,intent(out) :: ier 271 272 !Local variables------------------- 273 integer :: cc,dd,sz12 274 275 ! ************************************************************************* 276 277 ier=0 278 #if defined HAVE_MPI 279 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 280 call MPI_SCATTERV(xval,sendcounts,displs,MPI_DOUBLE_PRECISION,recvbuf,recvcount,& 281 & MPI_DOUBLE_PRECISION,root,comm,ier) 282 else if (comm == MPI_COMM_SELF) then 283 #endif 284 sz12=size(recvbuf,1)*size(recvbuf,2);cc=recvcount/sz12 285 dd=0;if (size(displs)>0) dd=displs(1)/sz12 286 recvbuf(:,:,1:cc)=xval(:,:,dd+1:dd+cc) 287 #if defined HAVE_MPI 288 end if 289 #endif 290 291 end subroutine xmpi_scatterv_dp3d
ABINIT/xmpi_scatterv_dp4d [ Functions ]
NAME
xmpi_scatterv_dp4d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: four-dimensional real arrays.
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
318 subroutine xmpi_scatterv_dp4d(xval,sendcounts,displs,recvbuf,recvcount,root,comm,ier) 319 320 !Arguments------------------------- 321 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:) 322 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:,:) 323 integer, DEV_CONTARRD intent(in) :: sendcounts(:),displs(:) 324 integer,intent(in) :: recvcount,root,comm 325 integer,intent(out) :: ier 326 327 !Local variables------------------- 328 integer :: cc,dd,sz123 329 330 ! ************************************************************************* 331 332 ier=0 333 #if defined HAVE_MPI 334 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 335 call MPI_SCATTERV(xval,sendcounts,displs,MPI_DOUBLE_PRECISION,recvbuf,recvcount,& 336 & MPI_DOUBLE_PRECISION,root,comm,ier) 337 else if (comm == MPI_COMM_SELF) then 338 #endif 339 sz123=size(recvbuf,1)*size(recvbuf,2)*size(recvbuf,2);cc=recvcount/sz123 340 dd=0;if (size(displs)>0) dd=displs(1)/sz123 341 recvbuf(:,:,:,1:cc)=xval(:,:,:,dd+1:dd+cc) 342 #if defined HAVE_MPI 343 end if 344 #endif 345 346 end subroutine xmpi_scatterv_dp4d
ABINIT/xmpi_scatterv_int [ Functions ]
NAME
xmpi_scatterv_int
FUNCTION
Gathers data from all tasks and delivers it to all. Target: one-dimensional integer arrays.
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
45 subroutine xmpi_scatterv_int(xval,sendcounts,displs,recvbuf,recvcount,root,comm,ier) 46 47 !Arguments------------------------- 48 integer, DEV_CONTARRD intent(in) :: xval(:) 49 integer, DEV_CONTARRD intent(inout) :: recvbuf(:) 50 integer, DEV_CONTARRD intent(in) :: sendcounts(:),displs(:) 51 integer,intent(in) :: recvcount,root,comm 52 integer,intent(out) :: ier 53 54 !Local variables------------------- 55 integer :: dd 56 57 ! ************************************************************************* 58 59 ier=0 60 #if defined HAVE_MPI 61 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 62 call MPI_SCATTERV(xval,sendcounts,displs,MPI_INTEGER,recvbuf,recvcount,& 63 & MPI_INTEGER,root,comm,ier) 64 else if (comm == MPI_COMM_SELF) then 65 #endif 66 dd=0;if (size(displs)>0) dd=displs(1) 67 recvbuf(1:recvcount)=xval(dd+1:dd+recvcount) 68 #if defined HAVE_MPI 69 end if 70 #endif 71 72 end subroutine xmpi_scatterv_int
ABINIT/xmpi_scatterv_int2d [ Functions ]
NAME
xmpi_scatterv_int2d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: two-dimensional integer arrays.
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
99 subroutine xmpi_scatterv_int2d(xval,sendcounts,displs,recvbuf,recvcount,root,comm,ier) 100 101 !Arguments------------------------- 102 integer, DEV_CONTARRD intent(in) :: xval(:,:) 103 integer, DEV_CONTARRD intent(inout) :: recvbuf(:,:) 104 integer, DEV_CONTARRD intent(in) :: sendcounts(:),displs(:) 105 integer,intent(in) :: recvcount,root,comm 106 integer,intent(out) :: ier 107 108 !Local variables------------------- 109 integer :: cc,dd,sz1 110 111 ! ************************************************************************* 112 113 ier=0 114 #if defined HAVE_MPI 115 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 116 call MPI_SCATTERV(xval,sendcounts,displs,MPI_INTEGER,recvbuf,recvcount,& 117 & MPI_INTEGER,root,comm,ier) 118 else if (comm == MPI_COMM_SELF) then 119 #endif 120 sz1=size(recvbuf,1);cc=recvcount/sz1 121 dd=0;if (size(displs)>0) dd=displs(1)/sz1 122 recvbuf(:,1:cc)=xval(:,dd+1:dd+cc) 123 #if defined HAVE_MPI 124 end if 125 #endif 126 127 end subroutine xmpi_scatterv_int2d