eigen_diag.F90 8.63 KB
Newer Older
1 2 3
MODULE m_eigen_diag
  USE m_juDFT
! the parameters are set to negative values to indicate that a particular solver is not compiled
4 5 6 7 8 9
#ifdef CPP_ELEMENTAL
    USE m_elemental
#endif
#ifdef CPP_SCALAPACK
    USE m_chani
#endif
10 11
#ifdef CPP_ELPA
    USE m_elpa
12 13 14
#endif
    IMPLICIT NONE
#ifdef CPP_ELPA
15 16 17 18 19 20 21 22 23 24 25 26 27
  INTEGER,PARAMETER:: diag_elpa=1
#else
  INTEGER,PARAMETER:: diag_elpa=-1
#endif
#ifdef CPP_ELEMENTAL
  INTEGER,PARAMETER:: diag_elemental=2
#else
  INTEGER,PARAMETER:: diag_elemental=-2
#endif
#ifdef CPP_SCALAPACK
  INTEGER,PARAMETER:: diag_scalapack=3
#else
  INTEGER,PARAMETER:: diag_scalapack=-3
28 29 30 31 32
#endif
#ifdef CPP_MAGMA
  INTEGER,PARAMETER:: diag_magma=6
#else
  INTEGER,PARAMETER:: diag_magma=-6
33 34 35 36 37
#endif
  INTEGER,PARAMETER:: diag_lapack=4
  INTEGER,PARAMETER:: diag_lapack2=5
CONTAINS
  SUBROUTINE eigen_diag(jsp,eig_id,it,atoms,dimension,matsize,mpi, n_rank,n_size,ne,nk,lapw,input,nred,sub_comm,&
38
       sym,matind,kveclo, noco,cell,bkpt,el,jij,l_wu,oneD,td,ud, eig,a,b,z,ne_found)
39 40 41 42 43
    USE m_zsymsecloc
    USE m_aline
    USE m_alinemuff
    USE m_types
    USE m_franza
44 45 46
#ifdef CPP_MAGMA
    USE m_magma
#endif
47
   
48
    IMPLICIT NONE
49 50 51
#ifdef CPP_MPI    
    include 'mpif.h'
#endif
52 53 54 55 56 57 58 59 60 61 62 63 64
    TYPE(t_mpi),INTENT(IN)       :: mpi
    TYPE(t_dimension),INTENT(IN) :: dimension
    TYPE(t_oneD),INTENT(IN)      :: oneD
    TYPE(t_input),INTENT(IN)     :: input
    TYPE(t_noco),INTENT(IN)      :: noco
    TYPE(t_jij),INTENT(IN)       :: jij
    TYPE(t_sym),INTENT(IN)       :: sym
    TYPE(t_cell),INTENT(IN)      :: cell
    TYPE(t_atoms),INTENT(IN)     :: atoms
    TYPE(t_lapw),INTENT(INOUT)   :: lapw !might be modified in aline
    INTEGER, INTENT(IN) :: jsp,eig_id,it,matsize 
    INTEGER, INTENT(IN) :: n_rank,n_size  ,nk   ,nred,sub_comm
    INTEGER, INTENT(IN) :: matind(dimension%nbasfcn,2),kveclo(atoms%nlotot)
65 66
    INTEGER,INTENT(IN)  :: ne
    INTEGER,INTENT(OUT) :: ne_found
67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87
    REAL,INTENT(IN)     :: el(:,:,:)
    LOGICAL, INTENT(IN) :: l_wu   
    REAL,INTENT(INOUT)  :: bkpt(3)
    TYPE(t_tlmplm),INTENT(IN) :: td
    TYPE(t_usdus),INTENT(IN)  :: ud

    REAL,INTENT(OUT) :: eig(:)

#ifndef CPP_INVERSION
    COMPLEX, ALLOCATABLE,INTENT(INOUT) :: a(:)
    COMPLEX, ALLOCATABLE,INTENT(INOUT) :: b(:)
    COMPLEX, ALLOCATABLE,INTENT(OUT) :: z(:,:)
#else
    REAL, ALLOCATABLE,INTENT(INOUT) :: a(:)
    REAL, ALLOCATABLE,INTENT(INOUT) :: b(:)
    REAL, ALLOCATABLE,INTENT(OUT) :: z(:,:)
#endif

    !Locals
    REAL :: time1
    REAL :: time2
88
    INTEGER :: ndim,err,n,nn,i
89
    LOGICAL :: parallel
90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
    CHARACTER(len=20)::f

#if 1==2
    !This is only needed for debugging
    print *,n_rank,lapw%nmat
    write(f,'(a,i0)') "a.",n_rank
    open(99,file=f)
    write(f,'(a,i0)') "b.",n_rank
    open(98,file=f)
    i=0
    DO n=n_rank+1,lapw%nmat,n_size
       DO nn=1,n
          i=i+1
          write(99,'(2(i10,1x),4(f15.4,1x))') n,nn,a(i)
          write(98,'(2(i10,1x),4(f15.4,1x))') n,nn,b(i)
       ENDDO
    ENDDO
    CALL MPI_BARRIER(MPI_COMM_WORLD,err)
    close(99)
     close(98)
    STOP 'DEBUG'
#endif
    
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
    !
    !  The array z will contain the eigenvectors and is allocated now
    !
    IF (n_size.NE.1) THEN
       ndim = CEILING(real(dimension%neigd)/n_size)
       ALLOCATE ( z(lapw%nmat,ndim), STAT = err )
    ELSE
       ndim = dimension%neigd
       ALLOCATE ( z(dimension%nbasfcn,ndim), STAT = err )
    ENDIF
    IF (err.NE.0) THEN
       WRITE (*,*) 'eigen: error during allocation of the'
       WRITE (*,*) 'eigenvecs',err,'  size: ',dimension%nbasfcn*ndim
       CALL juDFT_error("eigen: Error during allocation of the eigenvecs",calledby ="eigen")
    ENDIF

    !l_wu selects a full diagonalization step or a direct call of aline with a subspace diagonalization only
    IF (.NOT.l_wu) THEN
       CALL timestart("Diagonalization")
       !Select the solver
       parallel=(n_size>1)
134
       ne_found=ne
135 136 137
       SELECT CASE (priv_select_solver(parallel))
#ifdef CPP_ELPA
       CASE (diag_elpa)
138
          CALL elpa(lapw%nmat,n,SUB_COMM,a,b,z,eig,ne_found)
139 140 141 142
#endif
#ifdef CPP_ELEMENTAL
       CASE (diag_elemental)
          IF (it==1) THEN !switch between direct solver and iterative solver
143
             CALL elemental(lapw%nmat,dimension%nbasfcn/n_size,SUB_COMM,a,b,z,eig,ne_found,1)
144
          ELSE
145
             CALL elemental(lapw%nmat,,dimension%nbasfcn/n_size,SUB_COMM,a,b,z,eig,ne_found,0)
146 147 148 149 150
          ENDIF

#endif
#ifdef CPP_SCALAPACK
       CASE (diag_scalapack)
151
          CALL chani(lapw%nmat,dimension%nbasfcn/n_size,ndim, n_rank,n_size,SUB_COMM,mpi%mpi_comm, a,b,z,eig,ne_found)
152 153 154 155
#endif
#ifdef CPP_MAGMA
       CASE (diag_magma)
          CALL magma_diag(lapw%nmat,a,b,z,eig,ne_found)
156 157 158 159 160
#endif
       CASE (diag_lapack2)
          if (noco%l_ss) call juDFT_error("zsymsecloc not tested with noco%l_ss")
          if (input%gw>1) call juDFT_error("zsymsecloc not tested with input%gw>1")
          CALL zsymsecloc(jsp,input,lapw,bkpt,atoms,kveclo, sym,cell, dimension,matsize,ndim,&
161
                jij,matind,nred, a,b, z,eig,ne_found)
162 163
       CASE (diag_lapack)
          CALL franza(dimension%nbasfcn,ndim, lapw%nmat,&
164
               (sym%l_zref.AND.(atoms%nlotot.EQ.0)), jij%l_j,matind,nred, a,b,input%gw, z,eig,ne_found)
165 166 167 168 169 170 171 172 173 174
       CASE DEFAULT
          !This should only happen if you select a solver by hand which was not compiled against
          print*, "You selected a diagonalization scheme without compiling for it"
          CALL priv_solver_error(priv_select_solver(parallel),parallel)
       END SELECT
       CALL timestop("Diagonalization")
       !
    ELSE
       call timestart("aline")
       CALL aline(eig_id,nk,atoms,dimension,sym,cell,input,jsp,el,&
175
            ud,a,b,lapw,td,noco,oneD,bkpt,z,eig,ne_found)
176 177 178 179 180 181 182 183
       call timestop("aline")
    ENDIF
    !--->         SECOND VARIATION STEP
    IF (input%secvar) THEN
       !--->           compute and diagonalize the second variation
       !--->           hamiltonian
       call timestart("second variation diagonalization")

184
       CALL aline_muff(atoms,dimension,sym, cell, jsp,ne_found, ud,td, bkpt,lapw, z,eig)
185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
       call timestop("second variation diagonalization")
    END IF
  END SUBROUTINE eigen_diag

  FUNCTION priv_select_solver(parallel) result(diag_solver)
    LOGICAL,INTENT(IN):: parallel
    INTEGER           :: diag_solver
    diag_solver=-99

    !Determine the default solver
    IF (parallel) THEN
#ifdef CPP_ELPA
       diag_solver=diag_elpa
#else
       diag_solver=diag_scalapack
#endif
    ELSE
202 203 204
#ifdef CPP_MAGMA
       diag_solver=diag_magma
#else
205
       diag_solver=diag_lapack
206
#endif
207 208 209 210 211 212 213 214
    ENDIF

    !check if a special solver was requested
    IF (juDFT_was_argument("-elpa")) diag_solver=diag_elpa
    IF (juDFT_was_argument("-scalapack")) diag_solver=diag_scalapack
    IF (juDFT_was_argument("-elemental")) diag_solver=diag_elemental
    IF (juDFT_was_argument("-lapack")) diag_solver=diag_lapack
    IF (juDFT_was_argument("-lapack2")) diag_solver=diag_lapack2
215
    IF (juDFT_was_argument("-magma")) diag_solver=diag_magma
216 217 218 219 220 221 222 223 224 225 226
    !Check if solver is possible
    if (diag_solver<0) call priv_solver_error(diag_solver,parallel)
    if (any((/diag_lapack,diag_lapack2/)==diag_solver).and.parallel) call priv_solver_error(diag_solver,parallel)
    if (any((/diag_elpa,diag_elemental,diag_scalapack/)==diag_solver).and..not.parallel) call priv_solver_error(diag_solver,parallel)

  END FUNCTION priv_select_solver

  SUBROUTINE priv_solver_error(diag_solver,parallel)
    IMPLICIT NONE
    INTEGER,INTENT(IN):: diag_solver
    LOGICAL,INTENT(IN)::parallel
227
    print *,diag_solver,parallel
228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254
    SELECT CASE(diag_solver)
    CASE (diag_elpa)
       IF (parallel) THEN
          CALL juDFT_error("You did not compile with the ELPA solver and hence can not use it")
       ELSE
          CALL juDFT_error("The ELPA solver can not be used in serial")
       ENDIF
    CASE (diag_elemental)
       IF (parallel) THEN
          CALL juDFT_error("You did not compile with the ELEMENTAL solver and hence can not use it")
       ELSE
          CALL juDFT_error("The ELEMENTAL solver can not be used in serial")
       ENDIF
    CASE (diag_scalapack)
       IF (parallel) THEN
          CALL juDFT_error("You did not compile with the SCALAPACK solver and hence can not use it")
       ELSE
          CALL juDFT_error("The SCALAPACK solver can not be used in serial")
       ENDIF
    CASE (diag_lapack)
       IF (parallel) THEN
          CALL juDFT_error("The LAPACK solver can not be used in parallel")
       ENDIF
    CASE (diag_lapack2)
       IF (parallel) THEN
          CALL juDFT_error("The LAPACK2 solver can not be used in parallel")
       ENDIF  
255 256
    CASE (diag_magma)
       CALL juDFT_error("You have not compiled with MAGMA support")
257 258 259 260 261 262 263
    CASE DEFAULT
       CALL judft_error("You have selected an unkown eigensolver")
    END SELECT
  END SUBROUTINE priv_solver_error


END MODULE m_eigen_diag