Commit ea6d01cf authored by Uliana Alekseeva's avatar Uliana Alekseeva

possibility to compile with one-node-elpa added

parent e3a17789
......@@ -15,6 +15,7 @@ include("cmake/tests/test_Wannier5.cmake")
include("cmake/tests/test_MAGMA.cmake")
include("cmake/tests/test_GPU.cmake")
include("cmake/tests/test_LibXC.cmake")
include("cmake/tests/test_ELPA_onenode.cmake")
if (FLEUR_USE_MPI)
include("cmake/tests/test_SCALAPACK.cmake")
include("cmake/tests/test_ELPA.cmake")
......
......@@ -15,29 +15,30 @@ message("${Green}Compiler ID: ${CReset} ${CMAKE_Fortran_COMPILER_ID}")
message("${Green}Flags : ${CReset} ${CMAKE_Fortran_FLAGS}")
message("${Green}Libraries : ${CReset} ${FLEUR_LIBRARIES}")
message("\n${Red}These Libraries are required:${CReset}")
message("${Green} XML Library found : ${CReset} ${FLEUR_USE_XML}")
message("${Green} LAPACK Library found : ${CReset} ${FLEUR_USE_LAPACK}")
message("${Green} XML Library found : ${CReset} ${FLEUR_USE_XML}")
message("${Green} LAPACK Library found : ${CReset} ${FLEUR_USE_LAPACK}")
message("${Red}These Libraries are optional:${CReset}")
message("${Green} FFT from MKL found : ${CReset} ${FLEUR_USE_FFTMKL}")
message("${Green} LibXC Library found : ${CReset} ${FLEUR_USE_LIBXC}")
message("${Green} HDF5 Library found : ${CReset} ${FLEUR_USE_HDF5}")
message("${Green} Wannier90 1.2 Library found : ${CReset} ${FLEUR_USE_WANN}")
message("${Green} Wannier90-4 Library found : ${CReset} ${FLEUR_USE_WANN4}")
message("${Green} Wannier90-5 Library found : ${CReset} ${FLEUR_USE_WANN5}")
message("${Green} MAGMA Library found : ${CReset} ${FLEUR_USE_MAGMA}")
message("${Green} MPI Library found : ${CReset} ${FLEUR_USE_MPI}")
message("${Green} ELPA (one node) Library found : ${CReset} ${FLEUR_USE_ELPA_ONENODE}")
message("${Green} FFT from MKL found : ${CReset} ${FLEUR_USE_FFTMKL}")
message("${Green} LibXC Library found : ${CReset} ${FLEUR_USE_LIBXC}")
message("${Green} HDF5 Library found : ${CReset} ${FLEUR_USE_HDF5}")
message("${Green} Wannier90 1.2 Library found : ${CReset} ${FLEUR_USE_WANN}")
message("${Green} Wannier90-4 Library found : ${CReset} ${FLEUR_USE_WANN4}")
message("${Green} Wannier90-5 Library found : ${CReset} ${FLEUR_USE_WANN5}")
message("${Green} MAGMA Library found : ${CReset} ${FLEUR_USE_MAGMA}")
message("${Green} MPI Library found : ${CReset} ${FLEUR_USE_MPI}")
if (FLEUR_USE_MPI)
message("${Green} SCALAPACK Library found : ${CReset} ${FLEUR_USE_SCALAPACK}")
message("${Green} ELPA Library found : ${CReset} ${FLEUR_USE_ELPA}")
message("${Green} ChASE Library found : ${CReset} ${FLEUR_USE_CHASE}")
message("${Green} SCALAPACK Library found : ${CReset} ${FLEUR_USE_SCALAPACK}")
message("${Green} ELPA Library found : ${CReset} ${FLEUR_USE_ELPA}")
message("${Green} ChASE Library found : ${CReset} ${FLEUR_USE_CHASE}")
else()
message("${Green} SCALAPACK Library found : ${CReset} ---")
message("${Green} ELPA Library found : ${CReset} ---")
message("${Green} ChASE Library found : ${CReset} ---")
message("${Green} SCALAPACK Library found : ${CReset} ---")
message("${Green} ELPA Library found : ${CReset} ---")
message("${Green} ChASE Library found : ${CReset} ---")
endif()
message("${Green} Compile GPU version : ${CReset} ${FLEUR_USE_GPU}")
message("${Green} Compile GPU version : ${CReset} ${FLEUR_USE_GPU}")
if (FLEUR_USE_GPU)
message("${Green} CuSolver Library found : ${CReset} ${FLEUR_USE_CUSOLVER}")
message("${Green} CuSolver Library found : ${CReset} ${FLEUR_USE_CUSOLVER}")
endif()
message("\n")
message("${Green}Compile serial version : ${CReset} ${FLEUR_USE_SERIAL}")
......
#First check if we can compile with ELPA
try_compile(FLEUR_USE_ELPA_ONENODE ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/tests/test_ELPA.f90
LINK_LIBRARIES ${FLEUR_LIBRARIES})
if (NOT FLEUR_USE_ELPA_ONENODE)
if (DEFINED CLI_ELPA_OPENMP)
set(TEST_LIBRARIES "-lelpa_onenode_openmp;${FLEUR_LIBRARIES}")
#else()
# set(TEST_LIBRARIES "-lelpa;${FLEUR_LIBRARIES}")
endif()
try_compile(FLEUR_USE_ELPA_ONENODE ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/tests/test_ELPA.f90
LINK_LIBRARIES ${TEST_LIBRARIES})
if (FLEUR_USE_ELPA_ONENODE)
set(FLEUR_LIBRARIES "${TEST_LIBRARIES}")
endif()
endif()
message("ELPA (one node) Library found:${FLEUR_USE_ELPA_ONENODE}")
#Now check for version of elpa
if (FLEUR_USE_ELPA_ONENODE)
set(FLEUR_USE_ELPA_ONENODE false)
try_compile(FLEUR_USE_ELPA_ONENODE_20180525 ${CMAKE_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/tests/test_ELPA_20180525.f90
LINK_LIBRARIES ${FLEUR_LIBRARIES})
message("Version check for ELPA:")
message("20180525 ELPA: ${FLEUR_USE_ELPA_ONENODE_20180525}")
if (FLEUR_USE_ELPA_ONENODE_20180525)
set(FLEUR_USE_ELPA_ONENODE TRUE)
# set(FLEUR_DEFINITIONS ${FLEUR_DEFINITIONS} "CPP_ELPA" "CPP_ELPA2")
endif()
endif()
......@@ -9,7 +9,11 @@ diagonalization/chase_diag.F90
diagonalization/symmetrize_matrix.f90
diagonalization/cusolver_diag.F90
diagonalization/elemental.F90)
if (FLEUR_USE_ELPA_ONENODE_20180525)
set(fleur_F90 ${fleur_F90}
diagonalization/elpa_20180525_onenode.F90
)
endif()
if (FLEUR_USE_ELPA_20180525)
set(fleur_F90 ${fleur_F90}
diagonalization/elpa_20180525.F90
......
......@@ -41,6 +41,8 @@ MODULE m_eigen_diag
#endif
INTEGER,PARAMETER:: diag_lapack=4
INTEGER,PARAMETER:: diag_elpa_1node=14
INTEGER,PARAMETER:: diag_debugout=99
PUBLIC eigen_diag,parallel_solver_available
CONTAINS
......@@ -53,6 +55,7 @@ CONTAINS
USE m_lapack_diag
USE m_magma
USE m_elpa
USE m_elpa_onenode
USE m_scalapack
USE m_elemental
USE m_chase_diag
......@@ -105,6 +108,8 @@ CONTAINS
SELECT CASE (priv_select_solver(parallel))
CASE (diag_elpa)
CALL elpa_diag(hmat,smat,ne,eig,ev)
CASE (diag_elpa_1node)
CALL elpa_diag_onenode(hmat,smat,ne,eig,ev)
CASE (diag_elemental)
!CALL ELEMENTAL(hmat,smat,ne,eig,ev)
CASE (diag_scalapack)
......@@ -229,20 +234,22 @@ CONTAINS
ENDIF
!check if a special solver was requested
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa") diag_solver=diag_elpa
IF (trim(juDFT_string_for_argument("-diag"))=="scalapack") diag_solver=diag_scalapack
IF (trim(juDFT_string_for_argument("-diag"))=="elemental") diag_solver=diag_elemental
IF (trim(juDFT_string_for_argument("-diag"))=="lapack") diag_solver=diag_lapack
IF (trim(juDFT_string_for_argument("-diag"))=="magma") diag_solver=diag_magma
IF (trim(juDFT_string_for_argument("-diag"))=="chase") diag_solver=diag_chase
IF (trim(juDFT_string_for_argument("-diag"))=="cusolver") diag_solver=diag_cusolver
IF (trim(juDFT_string_for_argument("-diag"))=="debugout") diag_solver=diag_debugout
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa") diag_solver=diag_elpa
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa_1node") diag_solver=diag_elpa_1node
IF (trim(juDFT_string_for_argument("-diag"))=="scalapack") diag_solver=diag_scalapack
IF (trim(juDFT_string_for_argument("-diag"))=="elemental") diag_solver=diag_elemental
IF (trim(juDFT_string_for_argument("-diag"))=="lapack") diag_solver=diag_lapack
IF (trim(juDFT_string_for_argument("-diag"))=="magma") diag_solver=diag_magma
IF (trim(juDFT_string_for_argument("-diag"))=="chase") diag_solver=diag_chase
IF (trim(juDFT_string_for_argument("-diag"))=="cusolver") diag_solver=diag_cusolver
IF (trim(juDFT_string_for_argument("-diag"))=="debugout") diag_solver=diag_debugout
!Check if solver is possible
IF (diag_solver<0) CALL juDFT_error("You selected a solver for the eigenvalue problem that is not available",hint="You most probably did not provide the appropriate libraries for compilation/linking")
IF (ANY((/diag_lapack,diag_magma,diag_cusolver/)==diag_solver).AND.parallel) CALL judft_error("You selected an eigensolver that does not support distributed memory parallism",hint="Try scalapack,elpa or another supported solver for parallel matrices")
IF (ANY((/diag_elpa,diag_elemental,diag_scalapack/)==diag_solver).AND..NOT.parallel) CALL judft_error("You selected an eigensolver for matrices that are memory distributed",hint="Try lapack, cusolver or another supported solver for non-distributed matrices")
END FUNCTION priv_select_solver
......
!-------------------------------------------------------------------------------
! Copyright (c) 2016 Peter Grünberg Institut, Forschungszentrum Jülich, Germany
! This file is part of FLEUR and available as free software under the conditions
! of the MIT license as expressed in the LICENSE file in more detail.
!--------------------------------------------------------------------------------
MODULE m_elpa_onenode
CONTAINS
SUBROUTINE elpa_diag_onenode(hmat,smat,ne,eig,ev)
!
!----------------------------------------------------
!- Parallel eigensystem solver - driver routine based on chani; dw'12
! Uses the ELPA for the actual diagonalization
!
!
! hmat ..... Hamiltonian matrix
! smat ..... overlap matrix
! ne ....... number of ev's searched (and found) on this node
! On input, overall number of ev's searched,
! On output, local number of ev's found
! eig ...... eigenvalues, output
! ev ....... eigenvectors, output
!
!----------------------------------------------------
USE m_juDFT
!USE m_types_mpimat
USE m_types_mat
USE m_types
USE elpa
IMPLICIT NONE
CLASS(t_mat),INTENT(INOUT) :: hmat,smat
CLASS(t_mat),ALLOCATABLE,INTENT(OUT)::ev
REAL,INTENT(out) :: eig(:)
INTEGER,INTENT(INOUT) :: ne
!... Local variables
!
INTEGER :: num!, np,myid
INTEGER :: err
INTEGER :: i
REAL,ALLOCATABLE :: eig2(:)
!TYPE(t_mpimat) :: ev_dist
TYPE(t_mat) :: ev_dist
INTEGER :: kernel
CLASS(elpa_t),pointer :: elpa_obj
print*,"ELPA onenode"
!SELECT TYPE(hmat)
!TYPE IS (t_mpimat)
!TYPE IS (t_mat)
!SELECT TYPE(smat)
!TYPE IS (t_mpimat)
!TYPE IS (t_mat)
!CALL MPI_BARRIER(hmat%blacsdata%mpi_com,err)
!CALL MPI_COMM_SIZE(hmat%blacsdata%mpi_com,np,err)
!CALL MPI_COMM_RANK(hmat%blacsdata%mpi_com,myid,err)
err = elpa_init(20180525)
elpa_obj => elpa_allocate()
!ALLOCATE ( eig2(hmat%global_size1), stat=err ) ! The eigenvalue array
ALLOCATE ( eig2(hmat%matsize1), stat=err ) ! The eigenvalue array
IF (err.NE.0) CALL juDFT_error('Failed to allocated "eig2"', calledby ='elpa')
CALL ev_dist%init(hmat)! Eigenvectors
IF (err.NE.0) CALL juDFT_error('Failed to allocated "ev_dist"',calledby ='elpa')
! Blocking factor
!IF (hmat%blacsdata%blacs_desc(5).NE.hmat%blacsdata%blacs_desc(6)) CALL judft_error("Different block sizes for rows/columns not supported")
!CALL elpa_obj%set("na", hmat%global_size1, err)
CALL elpa_obj%set("na", hmat%matsize1, err)
CALL elpa_obj%set("nev", ne, err)
!CALL elpa_obj%set("local_nrows", hmat%matsize1, err)
!CALL elpa_obj%set("local_ncols", hmat%matsize2, err)
!CALL elpa_obj%set("nblk",hmat%blacsdata%blacs_desc(5), err)
!CALL elpa_obj%set("mpi_comm_parent", hmat%blacsdata%mpi_com, err)
!CALL elpa_obj%set("process_row", hmat%blacsdata%myrow, err)
!CALL elpa_obj%set("process_col", hmat%blacsdata%mycol, err)
!CALL elpa_obj%set("blacs_context", hmat%blacsdata%blacs_desc(2), err)
err = elpa_obj%setup()
!CALL hmat%generate_full_matrix()
!CALL smat%generate_full_matrix()
CALL hmat%add_transpose(hmat)
CALL smat%add_transpose(smat)
IF (hmat%l_real) THEN
CALL elpa_obj%generalized_eigenvectors(hmat%data_r,smat%data_r,eig2, ev_dist%data_r, .FALSE.,err)
ELSE
CALL elpa_obj%generalized_eigenvectors(hmat%data_c,smat%data_c,eig2, ev_dist%data_c, .FALSE., err)
ENDIF
CALL elpa_deallocate(elpa_obj)
CALL elpa_uninit()
! END of ELPA stuff
!
! Put those eigenvalues expected by chani to eig, i.e. for
! process i these are eigenvalues i+1, np+i+1, 2*np+i+1...
!
! num=ne
! ne=0
! DO i=myid+1,num,np
! ne=ne+1
! eig(ne)=eig2(i)
! ENDDO
eig(1:ne) = eig2(1:ne)
DEALLOCATE(eig2)
!
! Redistribute eigvec from ScaLAPACK distribution to each process
! having all eigenvectors corresponding to his eigenvalues as above
!
ALLOCATE(t_mat::ev)
CALL ev%alloc(hmat%l_real,hmat%matsize1,ne)
CALL ev%copy(ev_dist,hmat%matsize1,ne)
! ALLOCATE(t_mpimat::ev)
! CALL ev%init(hmat%l_real,hmat%global_size1,hmat%global_size1,hmat%blacsdata%mpi_com,.FALSE.)
! CALL ev%copy(ev_dist,1,1)
! CLASS DEFAULT
! CALL judft_error("Wrong type (1) in scalapack")
! END SELECT
! CLASS DEFAULT
! CALL judft_error("Wrong type (2) in scalapack")
! END SELECT
END SUBROUTINE elpa_diag_onenode
END MODULE m_elpa_onenode
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment