TABLE OF CONTENTS
- ABINIT/xmpi_allgatherv_coeff2d
- ABINIT/xmpi_allgatherv_coeff2d_indx
- ABINIT/xmpi_allgatherv_dp
- ABINIT/xmpi_allgatherv_dp2d
- ABINIT/xmpi_allgatherv_dp3d
- ABINIT/xmpi_allgatherv_dp4d
- ABINIT/xmpi_allgatherv_dp5d
- ABINIT/xmpi_allgatherv_dp6d
- ABINIT/xmpi_allgatherv_int
- ABINIT/xmpi_allgatherv_int1_dp1
- ABINIT/xmpi_allgatherv_int2d
ABINIT/xmpi_allgatherv_coeff2d [ Functions ]
NAME
xmpi_allgatherv_coeff2d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: coeff2_type 1D-structure
INPUTS
xval_in = coeff2d_type array structure comm= MPI communicator
OUTPUT
xval_out = coeff2d_type array structure ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
694 subroutine xmpi_allgatherv_coeff2d(xval_in,xval_out,comm,ierr) 695 696 !Arguments ------------------------------------ 697 !scalars 698 integer,intent(in) :: comm 699 integer,intent(out) :: ierr 700 !arrays 701 type(coeff2_type),intent(in) :: xval_in(:) 702 type(coeff2_type),intent(out) :: xval_out(:) 703 704 !Local variables------------------------------- 705 !scalars 706 integer :: ii,n1,n2 707 #if defined HAVE_MPI 708 integer :: buf_int_size,buf_int_size_all,buf_dp_size,buf_dp_size_all 709 integer :: i2,indx_int,indx_dp,nb,nb_out,nproc 710 #endif 711 !arrays 712 #if defined HAVE_MPI 713 integer, allocatable :: buf_int(:),buf_int_all(:) 714 integer, allocatable :: dimxval(:,:) 715 real(dp),allocatable :: buf_dp(:),buf_dp_all(:) 716 #endif 717 718 ! ************************************************************************* 719 720 ierr=0 721 #if defined HAVE_MPI 722 if (comm /= MPI_COMM_NULL) then 723 724 nproc=xmpi_comm_size(comm) 725 nb = size(xval_in,1) 726 727 if (comm==MPI_COMM_SELF.or.nproc==1) then 728 do ii=1,nb 729 n1=size(xval_in(ii)%value,1) 730 n2=size(xval_in(ii)%value,2) 731 if (allocated(xval_out(ii)%value)) then 732 ABI_FREE(xval_out(ii)%value) 733 end if 734 ABI_STAT_MALLOC(xval_out(ii)%value,(n1,n2), ierr) 735 if (ierr/= 0) call xmpi_abort(msg='error allocating xval_out%value in xmpi_allgatherv') 736 xval_out(ii)%value=xval_in(ii)%value 737 end do 738 return 739 end if 740 741 buf_dp_size=0 742 ABI_MALLOC(dimxval,(nb,2)) 743 do ii=1,nb 744 dimxval(ii,1)=size(xval_in(ii)%value,dim=1) 745 dimxval(ii,2)=size(xval_in(ii)%value,dim=2) 746 buf_dp_size=buf_dp_size+dimxval(ii,1)*dimxval(ii,2) 747 end do 748 749 buf_int_size=2*nb; 750 ABI_STAT_MALLOC(buf_int,(buf_int_size), ierr) 751 if (ierr/= 0) call xmpi_abort(msg='error allocating buf_int in xmpi_allgatherv') 752 indx_int=1 753 do ii=1,nb 754 buf_int(indx_int )=dimxval(ii,1) 755 buf_int(indx_int+1)=dimxval(ii,2) 756 indx_int=indx_int+2 757 end do 758 759 ABI_STAT_MALLOC(buf_dp,(buf_dp_size) ,ierr) 760 if (ierr/= 0) call xmpi_abort(msg='error allocating buf_dp_size in xmpi_allgatherv') 761 indx_dp=1 762 do ii=1,nb 763 n1=dimxval(ii,1); n2=dimxval(ii,2) 764 do i2=1,n2 765 buf_dp(indx_dp:indx_dp+n1-1)=xval_in(ii)%value(1:n1,i2) 766 indx_dp=indx_dp+n1 767 end do 768 end do 769 770 call xmpi_allgatherv(buf_int,buf_int_size,buf_dp,buf_dp_size,buf_int_all, & 771 & buf_int_size_all,buf_dp_all,buf_dp_size_all,comm,ierr) 772 773 774 nb_out=buf_int_size_all/2 775 776 indx_int=1;indx_dp=1 777 do ii=1,nb_out 778 n1=buf_int_all(indx_int) 779 n2=buf_int_all(indx_int+1) 780 indx_int=indx_int+2 781 if (allocated(xval_out(ii)%value)) then 782 ABI_FREE(xval_out(ii)%value) 783 end if 784 ABI_STAT_MALLOC(xval_out(ii)%value,(n1,n2), ierr) 785 if (ierr/= 0) call xmpi_abort(msg='error allocating xval_out%value in xmpi_allgatherv') 786 do i2=1,n2 787 xval_out(ii)%value(1:n1,i2)=buf_dp_all(indx_dp:indx_dp+n1-1) 788 indx_dp=indx_dp+n1 789 end do 790 end do 791 792 793 ABI_FREE(buf_dp_all) 794 ABI_FREE(buf_int_all) 795 ABI_FREE(buf_int) 796 ABI_FREE(buf_dp) 797 ABI_FREE(dimxval) 798 799 end if 800 801 #else 802 do ii=1,size(xval_in,1) 803 n1=size(xval_in(ii)%value,1) 804 n2=size(xval_in(ii)%value,2) 805 if (allocated(xval_out(ii)%value)) then 806 ABI_FREE(xval_out(ii)%value) 807 end if 808 ABI_STAT_MALLOC(xval_out(ii)%value,(n1,n2), ierr) 809 if (ierr/= 0) call xmpi_abort(msg='error allocating xval_out%value in xmpi_allgatherv') 810 xval_out(ii)%value=xval_in(ii)%value 811 end do 812 #endif 813 814 end subroutine xmpi_allgatherv_coeff2d
ABINIT/xmpi_allgatherv_coeff2d_indx [ Functions ]
NAME
xmpi_allgatherv_coeff2d_indx
FUNCTION
Gathers data from all tasks and delivers it to all. Target: coeff2_type 1D-structure use of an indirect index to sort data
INPUTS
xval_in = coeff2d_type array structure comm= MPI communicator indx= gives the indexes of xval_in in xval_out. xval_in(i) will be transfered in xval_out(indx(i))
OUTPUT
xval_out = coeff2d_type array structure ierr= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
845 subroutine xmpi_allgatherv_coeff2d_indx(xval_in,xval_out,comm,indx,ierr) 846 847 !Arguments ------------------------------------ 848 !scalars 849 integer,intent(in) :: comm 850 integer,intent(out) :: ierr 851 !arrays 852 integer,intent(in) :: indx(:) 853 type(coeff2_type),intent(in) :: xval_in(:) 854 type(coeff2_type),intent(out) :: xval_out(:) 855 856 !Local variables------------------------------- 857 !scalars 858 integer :: ii,ival,n1,n2,nb 859 #if defined HAVE_MPI 860 integer :: buf_int_size,buf_int_size_all,buf_dp_size,buf_dp_size_all 861 integer :: i2,indx_int,indx_dp,nb_out,nproc 862 #endif 863 !arrays 864 #if defined HAVE_MPI 865 integer, allocatable :: buf_int(:),buf_int_all(:) 866 integer, allocatable :: dimxval(:,:) 867 real(dp),allocatable :: buf_dp(:),buf_dp_all(:) 868 #endif 869 870 ! ************************************************************************* 871 872 ierr=0 ; nb = size(xval_in,1) 873 874 #if defined HAVE_MPI 875 if (comm == MPI_COMM_NULL) return 876 nproc=xmpi_comm_size(comm) 877 if (comm==MPI_COMM_SELF.or.nproc==1) then 878 #endif 879 do ii=1,nb 880 n1=size(xval_in(ii)%value,1) 881 n2=size(xval_in(ii)%value,2) 882 ival=indx(ii) 883 if (allocated(xval_out(ival)%value)) then 884 ABI_FREE(xval_out(ival)%value) 885 end if 886 ABI_STAT_MALLOC(xval_out(ival)%value,(n1,n2), ierr) 887 if (ierr/= 0) call xmpi_abort(msg='error allocating xval_out%value in xmpi_allgatherv') 888 xval_out(ii)%value=xval_in(ival)%value 889 end do 890 return 891 892 #if defined HAVE_MPI 893 end if 894 895 buf_dp_size=0 896 ABI_STAT_MALLOC(dimxval,(nb,2), ierr) 897 if (ierr/= 0) call xmpi_abort(msg='error allocating dimxval in xmpi_allgatherv') 898 do ii=1,nb 899 dimxval(ii,1)=size(xval_in(ii)%value,dim=1) 900 dimxval(ii,2)=size(xval_in(ii)%value,dim=2) 901 buf_dp_size=buf_dp_size+dimxval(ii,1)*dimxval(ii,2) 902 end do 903 904 buf_int_size=3*nb 905 ABI_STAT_MALLOC(buf_int,(buf_int_size), ierr) 906 if (ierr/= 0) call xmpi_abort(msg='error allocating buf_int in xmpi_allgatherv') 907 indx_int=1 908 do ii=1,nb 909 buf_int(indx_int )=dimxval(ii,1) 910 buf_int(indx_int+1)=dimxval(ii,2) 911 buf_int(indx_int+2)=indx(ii) 912 indx_int=indx_int+3 913 end do 914 915 ABI_STAT_MALLOC(buf_dp,(buf_dp_size), ierr) 916 if (ierr/= 0) call xmpi_abort(msg='error allocating buf_dp in xmpi_allgatherv') 917 indx_dp=1 918 do ii=1,nb 919 n1=dimxval(ii,1); n2=dimxval(ii,2) 920 do i2=1,n2 921 buf_dp(indx_dp:indx_dp+n1-1)=xval_in(ii)%value(1:n1,i2) 922 indx_dp=indx_dp+n1 923 end do 924 end do 925 926 call xmpi_allgatherv(buf_int,buf_int_size,buf_dp,buf_dp_size,buf_int_all, & 927 & buf_int_size_all,buf_dp_all,buf_dp_size_all,comm,ierr) 928 929 nb_out=buf_int_size_all/3 930 indx_int=1;indx_dp=1 931 do ii=1,nb_out 932 n1=buf_int_all(indx_int) 933 n2=buf_int_all(indx_int+1) 934 ival=buf_int_all(indx_int+2) 935 indx_int=indx_int+3 936 if (allocated(xval_out(ival)%value)) then 937 ABI_FREE(xval_out(ival)%value) 938 end if 939 ABI_STAT_MALLOC(xval_out(ival)%value,(n1,n2), ierr) 940 if (ierr/= 0) call xmpi_abort(msg='error allocating xval_out%value in xmpi_allgatherv') 941 do i2=1,n2 942 xval_out(ival)%value(1:n1,i2)=buf_dp_all(indx_dp:indx_dp+n1-1) 943 indx_dp=indx_dp+n1 944 end do 945 end do 946 947 ABI_FREE(buf_dp_all) 948 ABI_FREE(buf_int_all) 949 ABI_FREE(buf_int) 950 ABI_FREE(buf_dp) 951 ABI_FREE(dimxval) 952 #endif 953 954 end subroutine xmpi_allgatherv_coeff2d_indx
ABINIT/xmpi_allgatherv_dp [ Functions ]
NAME
xmpi_allgatherv_dp
FUNCTION
Gathers data from all tasks and delivers it to all. Target: one-dimensional double precision arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
343 subroutine xmpi_allgatherv_dp(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 344 345 !Arguments------------------------- 346 real(dp), DEV_CONTARRD intent(in) :: xval(:) 347 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:) 348 integer,intent(in) :: recvcounts(:),displs(:) 349 integer,intent(in) :: nelem,comm 350 integer,intent(out) :: ier 351 352 !Local variables-------------- 353 integer :: cc,dd 354 355 ! ************************************************************************* 356 357 ier=0 358 #if defined HAVE_MPI 359 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 360 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 361 & MPI_DOUBLE_PRECISION,comm,ier) 362 else if (comm == MPI_COMM_SELF) then 363 #endif 364 dd=0;if (size(displs)>0) dd=displs(1) 365 cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1) 366 recvbuf(dd+1:dd+cc)=xval(1:cc) 367 #if defined HAVE_MPI 368 end if 369 #endif 370 end subroutine xmpi_allgatherv_dp
ABINIT/xmpi_allgatherv_dp2d [ Functions ]
NAME
xmpi_allgatherv_dp2d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: double precision two-dimensional arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
401 subroutine xmpi_allgatherv_dp2d(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 402 403 !Arguments------------------------- 404 real(dp), DEV_CONTARRD intent(in) :: xval(:,:) 405 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:) 406 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 407 integer,intent(in) :: nelem,comm 408 integer,intent(out) :: ier 409 410 !Local variables-------------- 411 integer :: cc,dd,sz1 412 413 ! ************************************************************************* 414 415 ier=0 416 #if defined HAVE_MPI 417 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 418 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 419 & MPI_DOUBLE_PRECISION,comm,ier) 420 else if (comm == MPI_COMM_SELF) then 421 #endif 422 sz1=size(xval,1) 423 dd=0;if (size(displs)>0) dd=displs(1)/sz1 424 cc=size(xval,2);if (size(recvcounts)>0) cc=recvcounts(1)/sz1 425 recvbuf(:,dd+1:dd+cc)=xval(:,1:cc) 426 #if defined HAVE_MPI 427 end if 428 #endif 429 end subroutine xmpi_allgatherv_dp2d
ABINIT/xmpi_allgatherv_dp3d [ Functions ]
NAME
xmpi_allgatherv_dp3d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: double precision three-dimensional arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
460 subroutine xmpi_allgatherv_dp3d(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 461 462 !Arguments------------------------- 463 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:) 464 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:) 465 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 466 integer,intent(in) :: nelem,comm 467 integer,intent(out) :: ier 468 469 !Local variables-------------- 470 integer :: cc,dd,sz12 471 472 ! ************************************************************************* 473 474 ier=0 475 #if defined HAVE_MPI 476 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 477 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 478 & MPI_DOUBLE_PRECISION,comm,ier) 479 else if (comm == MPI_COMM_SELF) then 480 #endif 481 sz12=size(xval,1)*size(xval,2) 482 dd=0;if (size(displs)>0) dd=displs(1)/sz12 483 cc=size(xval,3);if (size(recvcounts)>0) cc=recvcounts(1)/sz12 484 recvbuf(:,:,dd+1:dd+cc)=xval(:,:,1:cc) 485 #if defined HAVE_MPI 486 end if 487 #endif 488 end subroutine xmpi_allgatherv_dp3d
ABINIT/xmpi_allgatherv_dp4d [ Functions ]
NAME
xmpi_allgatherv_dp4d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: double precision four-dimensional arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
519 subroutine xmpi_allgatherv_dp4d(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 520 521 !Arguments------------------------- 522 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:) 523 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:,:) 524 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 525 integer,intent(in) :: nelem,comm 526 integer,intent(out) :: ier 527 528 !Local variables------------------- 529 integer :: cc,dd,sz123 530 531 ! ************************************************************************* 532 533 ier=0 534 #if defined HAVE_MPI 535 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 536 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 537 & MPI_DOUBLE_PRECISION,comm,ier) 538 else if (comm == MPI_COMM_SELF) then 539 #endif 540 sz123=size(xval,1)*size(xval,2)*size(xval,3) 541 dd=0;if (size(displs)>0) dd=displs(1)/sz123 542 cc=size(xval,4);if (size(recvcounts)>0) cc=recvcounts(1)/sz123 543 recvbuf(:,:,:,dd+1:dd+cc)=xval(:,:,:,1:cc) 544 #if defined HAVE_MPI 545 end if 546 #endif 547 end subroutine xmpi_allgatherv_dp4d
ABINIT/xmpi_allgatherv_dp5d [ Functions ]
NAME
xmpi_allgatherv_dp5d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: double precision six-dimensional arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
578 subroutine xmpi_allgatherv_dp5d(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 579 580 !Arguments------------------------- 581 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:,:) 582 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:,:,:) 583 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 584 integer,intent(in) :: nelem,comm 585 integer,intent(out) :: ier 586 587 !Local variables------------------- 588 integer :: cc,dd,sz1234 589 590 ! ************************************************************************* 591 592 ier=0 593 #if defined HAVE_MPI 594 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 595 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 596 & MPI_DOUBLE_PRECISION,comm,ier) 597 else if (comm == MPI_COMM_SELF) then 598 #endif 599 sz1234=size(xval,1)*size(xval,2)*size(xval,3)*size(xval,4) 600 dd=0;if (size(displs)>0) dd=displs(1)/sz1234 601 cc=size(xval,5);if (size(recvcounts)>0) cc=recvcounts(1)/sz1234 602 recvbuf(:,:,:,:,dd+1:dd+cc)=xval(:,:,:,:,1:cc) 603 #if defined HAVE_MPI 604 end if 605 #endif 606 end subroutine xmpi_allgatherv_dp5d
ABINIT/xmpi_allgatherv_dp6d [ Functions ]
NAME
xmpi_allgatherv_dp6d
FUNCTION
Gathers data from all tasks and delivers it to all. Target: double precision six-dimensional arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
638 subroutine xmpi_allgatherv_dp6d(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 639 640 !Arguments------------------------- 641 real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:,:,:) 642 real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:,:,:,:,:) 643 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 644 integer,intent(in) :: nelem,comm 645 integer,intent(out) :: ier 646 647 !Local variables------------------- 648 integer :: cc,dd,sz12345 649 650 ! ************************************************************************* 651 652 ier=0 653 #if defined HAVE_MPI 654 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 655 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 656 & MPI_DOUBLE_PRECISION,comm,ier) 657 else if (comm == MPI_COMM_SELF) then 658 #endif 659 sz12345=size(xval,1)*size(xval,2)*size(xval,3)*size(xval,4)*size(xval,5) 660 dd=0;if (size(displs)>0) dd=displs(1)/sz12345 661 cc=size(xval,6);if (size(recvcounts)>0) cc=recvcounts(1)/sz12345 662 recvbuf(:,:,:,:,:,dd+1:dd+cc)=xval(:,:,:,:,:,1:cc) 663 #if defined HAVE_MPI 664 end if 665 #endif 666 end subroutine xmpi_allgatherv_dp6d
ABINIT/xmpi_allgatherv_int [ Functions ]
NAME
xmpi_allgatherv_int
FUNCTION
Gathers data from all tasks and delivers it to all. Target: one-dimensional integer arrays.
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
83 subroutine xmpi_allgatherv_int(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 84 85 !Arguments------------------------- 86 integer, DEV_CONTARRD intent(in) :: xval(:) 87 integer, DEV_CONTARRD intent(inout) :: recvbuf(:) 88 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 89 integer,intent(in) :: nelem,comm 90 integer,intent(out) :: ier 91 92 !Local variables------------------- 93 integer :: cc,dd 94 95 ! ************************************************************************* 96 97 ier=0 98 #if defined HAVE_MPI 99 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 100 call MPI_ALLGATHERV(xval,nelem,MPI_INTEGER,recvbuf,recvcounts,displs,& 101 & MPI_INTEGER,comm,ier) 102 else if (comm == MPI_COMM_SELF) then 103 #endif 104 dd=0;if (size(displs)>0) dd=displs(1) 105 cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1) 106 recvbuf(dd+1:dd+cc)=xval(1:cc) 107 #if defined HAVE_MPI 108 end if 109 #endif 110 end subroutine xmpi_allgatherv_int
ABINIT/xmpi_allgatherv_int1_dp1 [ Functions ]
NAME
xmpi_allgatherv_int1_dp1
FUNCTION
Gathers data from all tasks and delivers it to all. Target : one-dimensional integer arrray and one-dimensionnal dp array
INPUTS
buf_int=buffer integer array that is going to be gathered buf_int_size=size of buf_int array buf_dp=buffer dp array that is going to be gathered buf_dp_size=size of buf_dp array comm=MPI communicator
OUTPUT
buf_int_all=buffer integer array gathered buf_int_size_all=size of buffer integer array gathered buf_dp_all=buffer dp array gathered buf_dp_size_all=size of buffer dp array gathered ier=exit status, a non-zero value meaning there is an error
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
142 subroutine xmpi_allgatherv_int1_dp1(buf_int,buf_int_size,buf_dp,buf_dp_size,& 143 & buf_int_all,buf_int_size_all,buf_dp_all,buf_dp_size_all,comm,ier) 144 145 !Arguments------------------------- 146 !scalars 147 integer,intent(in) :: buf_dp_size,buf_int_size,comm 148 integer,intent(out) :: buf_dp_size_all,buf_int_size_all,ier 149 !arrays 150 integer, intent(in) :: buf_int(:) 151 integer,allocatable,target,intent(out) :: buf_int_all(:) 152 real(dp),intent(in) :: buf_dp(:) 153 real(dp),allocatable,target,intent(out) :: buf_dp_all(:) 154 155 !Local variables-------------- 156 !scalars 157 integer :: buf_pack_size,ierr,ii,iproc,istart_dp,istart_int,lg,lg1,lg2,lg_dp,lg_int 158 integer :: nproc,position,totalbufcount 159 logical,parameter :: use_pack=.false. 160 !arrays 161 integer :: buf_size(2),pos(3) 162 integer ,allocatable :: buf_int_size1(:),buf_dp_size1(:) 163 integer,allocatable :: count_dp(:),count_int(:),count_size(:),counts(:) 164 integer,allocatable :: disp_dp(:),disp_int(:),displ(:),displ_dp(:),displ_int(:) 165 integer,allocatable :: pos_all(:) 166 integer,pointer :: outbuf_int(:) 167 real(dp),pointer:: outbuf_dp(:) 168 character,allocatable :: buf_pack(:),buf_pack_tot(:) 169 170 ! ************************************************************************* 171 172 ier=0 173 174 #if defined HAVE_MPI 175 if (comm/=MPI_COMM_SELF.and.comm/=MPI_COMM_NULL) then 176 177 nproc=xmpi_comm_size(comm) 178 179 !First version: using 2 allgather (one for ints, another for reals) 180 !------------------------------------------------------------------ 181 if (.not.use_pack) then 182 183 ! Prepare communications 184 ABI_MALLOC(count_int,(nproc)) 185 ABI_MALLOC(disp_int,(nproc)) 186 ABI_MALLOC(count_dp,(nproc)) 187 ABI_MALLOC(disp_dp,(nproc)) 188 ABI_MALLOC(count_size,(2*nproc)) 189 buf_size(1)=buf_int_size; buf_size(2)=buf_dp_size 190 call xmpi_allgather(buf_size,2,count_size,comm,ier) 191 do iproc=1,nproc 192 count_int(iproc)=count_size(2*iproc-1) 193 count_dp(iproc)=count_size(2*iproc) 194 end do 195 disp_int(1)=0;disp_dp(1)=0 196 do ii=2,nproc 197 disp_int(ii)=disp_int(ii-1)+count_int(ii-1) 198 disp_dp (ii)=disp_dp (ii-1)+count_dp (ii-1) 199 end do 200 buf_int_size_all=sum(count_int) 201 buf_dp_size_all =sum(count_dp) 202 203 ABI_STAT_MALLOC(buf_int_all,(buf_int_size_all), ier) 204 if (ier/=0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_allgatherv') 205 ABI_STAT_MALLOC(buf_dp_all ,(buf_dp_size_all), ierr) 206 if (ier/=0) call xmpi_abort(msg='error allocating buf_dp_all in xmpi_allgatherv') 207 208 ! Communicate (one call for integers, one call for reals) 209 call xmpi_allgatherv(buf_int,buf_int_size,buf_int_all,count_int,disp_int,comm,ierr) 210 call xmpi_allgatherv(buf_dp,buf_dp_size,buf_dp_all,count_dp,disp_dp,comm,ierr) 211 212 ! Release the memory 213 ABI_FREE(count_int) 214 ABI_FREE(disp_int) 215 ABI_FREE(count_dp) 216 ABI_FREE(disp_dp) 217 ABI_FREE(count_size) 218 219 !2nd version: using 1 allgather (with MPI_PACK) 220 !----------------------------------------------------------------- 221 else 222 223 ! Compute size of message 224 call MPI_PACK_SIZE(buf_int_size,MPI_INTEGER,comm,lg1,ier) 225 call MPI_PACK_SIZE(buf_dp_size,MPI_DOUBLE_PRECISION,comm,lg2,ier) 226 lg=lg1+lg2 227 228 ! Pack data to be sent 229 position=0 ; buf_pack_size=lg1+lg2 230 ABI_MALLOC(buf_pack,(buf_pack_size)) 231 call MPI_PACK(buf_int,buf_int_size,MPI_INTEGER,buf_pack,buf_pack_size,position,comm,ier) 232 call MPI_PACK(buf_dp,buf_dp_size,MPI_DOUBLE_PRECISION,buf_pack,buf_pack_size,position,comm,ier) 233 234 ! Gather size of all packed messages 235 ABI_MALLOC(pos_all,(nproc*3)) 236 ABI_MALLOC(counts,(nproc)) 237 ABI_MALLOC(buf_int_size1,(nproc)) 238 ABI_MALLOC(buf_dp_size1,(nproc)) 239 ABI_MALLOC(displ,(nproc)) 240 ABI_MALLOC(displ_int,(nproc)) 241 ABI_MALLOC(displ_dp,(nproc)) 242 pos(1)=position;pos(2)=buf_int_size;pos(3)=buf_dp_size 243 call MPI_ALLGATHER(pos,3,MPI_INTEGER,pos_all,3,MPI_INTEGER,comm,ier) 244 ii=1 245 do iproc=1,nproc 246 counts(iproc)=pos_all(ii);ii=ii+1 247 buf_int_size1(iproc)=pos_all(ii);ii=ii+1 248 buf_dp_size1(iproc)=pos_all(ii);ii=ii+1 249 end do 250 251 displ(1)=0 ; displ_int(1)=0 ; displ_dp(1)=0 252 do iproc=2,nproc 253 displ(iproc)=displ(iproc-1)+counts(iproc-1) 254 displ_int(iproc)=displ_int(iproc-1)+buf_int_size1(iproc-1) 255 displ_dp(iproc)=displ_dp(iproc-1)+buf_dp_size1(iproc-1) 256 end do 257 258 totalbufcount=displ(nproc)+counts(nproc) 259 ABI_STAT_MALLOC(buf_pack_tot,(totalbufcount), ier) 260 if (ier/= 0) call xmpi_abort(msg='error allocating totalbufcount in xmpi_allgatherv') 261 buf_int_size_all=sum(buf_int_size1) 262 buf_dp_size_all=sum(buf_dp_size1) 263 ABI_STAT_MALLOC(buf_int_all,(buf_int_size_all), ier) 264 if (ier/=0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_allgatherv') 265 ABI_STAT_MALLOC(buf_dp_all,(buf_dp_size_all), ier) 266 if (ier/=0) call xmpi_abort(msg='error allocating buf_dp_size_all in xmpi_allgatherv') 267 268 ! Gather all packed messages 269 call MPI_ALLGATHERV(buf_pack,position,MPI_PACKED,buf_pack_tot,counts,displ,MPI_PACKED,comm,ier) 270 position=0 271 do iproc=1,nproc 272 lg_int=buf_int_size1(iproc); lg_dp=buf_dp_size1(iproc) 273 istart_int=displ_int(iproc); istart_dp=displ_dp(iproc) 274 outbuf_int=>buf_int_all(istart_int+1:istart_int+lg_int) 275 call MPI_UNPACK(buf_pack_tot,totalbufcount,position, outbuf_int,& 276 & lg_int,MPI_INTEGER,comm,ier) 277 outbuf_dp=>buf_dp_all(istart_dp+1:istart_dp+lg_dp) 278 call MPI_UNPACK(buf_pack_tot,totalbufcount,position,outbuf_dp,& 279 & lg_dp,MPI_DOUBLE_PRECISION,comm,ier) 280 end do 281 282 ! Release the memory 283 ABI_FREE(pos_all) 284 ABI_FREE(counts) 285 ABI_FREE(buf_int_size1) 286 ABI_FREE(buf_dp_size1) 287 ABI_FREE(displ) 288 ABI_FREE(displ_int) 289 ABI_FREE(displ_dp) 290 ABI_FREE(buf_pack_tot) 291 ABI_FREE(buf_pack) 292 293 end if 294 else if (comm==MPI_COMM_SELF) then 295 #endif 296 297 !Sequential version 298 ABI_STAT_MALLOC(buf_int_all,(buf_int_size), ier) 299 if (ier/=0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_allgatherv') 300 ABI_STAT_MALLOC(buf_dp_all,(buf_dp_size), ier) 301 if (ier/=0) call xmpi_abort(msg='error allocating buf_dp_all in xmpi_allgatherv') 302 303 buf_int_all(:)=buf_int(:) 304 buf_dp_all(:)=buf_dp(:) 305 buf_int_size_all=buf_int_size 306 buf_dp_size_all=buf_dp_size 307 308 #if defined HAVE_MPI 309 end if 310 #endif 311 312 end subroutine xmpi_allgatherv_int1_dp1
ABINIT/xmpi_allgatherv_int2d [ Functions ]
NAME
xmpi_allgatherv_int2d
FUNCTION
This module contains functions that calls MPI routine, if we compile the code using the MPI CPP flags. xmpi_allgatherv is the generic function.
COPYRIGHT
Copyright (C) 2001-2024 ABINIT group (AR,XG) This file is distributed under the terms of the GNU General Public License, see ~ABINIT/COPYING or http://www.gnu.org/copyleft/gpl.txt .
PARENTS
CHILDREN
xmpi_allgatherv
SOURCE
24 subroutine xmpi_allgatherv_int2d(xval,nelem,recvbuf,recvcounts,displs,comm,ier) 25 26 !Arguments------------------------- 27 integer, DEV_CONTARRD intent(in) :: xval(:,:) 28 integer, DEV_CONTARRD intent(inout) :: recvbuf(:,:) 29 integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:) 30 integer,intent(in) :: nelem,comm 31 integer,intent(out) :: ier 32 33 !Local variables-------------- 34 integer :: cc,dd,sz1 35 36 ! ************************************************************************* 37 38 ier=0 39 #if defined HAVE_MPI 40 if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then 41 call MPI_ALLGATHERV(xval,nelem,MPI_INTEGER,recvbuf,recvcounts,displs,& 42 & MPI_INTEGER,comm,ier) 43 else if (comm == MPI_COMM_SELF) then 44 #endif 45 sz1=size(xval,1) 46 dd=0;if (size(displs)>0) dd=displs(1)/sz1 47 cc=size(xval,2);if (size(recvcounts)>0) cc=recvcounts(1)/sz1 48 recvbuf(:,dd+1:dd+cc)=xval(:,1:cc) 49 #if defined HAVE_MPI 50 end if 51 #endif 52 end subroutine xmpi_allgatherv_int2d