IffGit has a new shared runner for building Docker images in GitLab CI. Visit https://iffgit.fz-juelich.de/examples/ci-docker-in-docker for more details.

elpa_20180525_onenode.F90 4.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
!-------------------------------------------------------------------------------
! Copyright (c) 2016 Peter Grünberg Institut, Forschungszentrum Jülich, Germany
! This file is part of FLEUR and available as free software under the conditions
! of the MIT license as expressed in the LICENSE file in more detail.
!--------------------------------------------------------------------------------

MODULE m_elpa_onenode
CONTAINS
  SUBROUTINE elpa_diag_onenode(hmat,smat,ne,eig,ev)
    !
    !----------------------------------------------------
    !- Parallel eigensystem solver - driver routine based on chani; dw'12
    !  Uses the ELPA for the actual diagonalization
    !
    !
    ! hmat ..... Hamiltonian matrix
    ! smat ..... overlap matrix
    ! ne ....... number of ev's searched (and found) on this node
    !            On input, overall number of ev's searched,
    !            On output, local number of ev's found
    ! eig ...... eigenvalues, output
    ! ev ....... eigenvectors, output
    !
    !----------------------------------------------------
    USE m_juDFT
    !USE m_types_mpimat
    USE m_types_mat
    USE m_types
29
#ifdef CPP_ELPA_ONENODE
30
    USE elpa
31
#endif
32
33
34
35
36
37
38
    IMPLICIT NONE

    CLASS(t_mat),INTENT(INOUT)    :: hmat,smat
    CLASS(t_mat),ALLOCATABLE,INTENT(OUT)::ev
    REAL,INTENT(out)              :: eig(:)
    INTEGER,INTENT(INOUT)         :: ne
    
39
#ifdef CPP_ELPA_ONENODE
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
    !...  Local variables
    !
    INTEGER           :: num!, np,myid
    INTEGER           :: err
    INTEGER           :: i
    REAL,ALLOCATABLE      :: eig2(:)
    !TYPE(t_mpimat)        :: ev_dist
    TYPE(t_mat)        :: ev_dist
    INTEGER               :: kernel
    CLASS(elpa_t),pointer :: elpa_obj

    print*,"ELPA onenode"
    !SELECT TYPE(hmat)
    !TYPE IS (t_mpimat)
    !TYPE IS (t_mat)
    !SELECT TYPE(smat)
    !TYPE IS (t_mpimat)
    !TYPE IS (t_mat)
       !CALL MPI_BARRIER(hmat%blacsdata%mpi_com,err)    
       !CALL MPI_COMM_SIZE(hmat%blacsdata%mpi_com,np,err)
       !CALL MPI_COMM_RANK(hmat%blacsdata%mpi_com,myid,err)
       err = elpa_init(20180525)
       elpa_obj => elpa_allocate()
       
       !ALLOCATE ( eig2(hmat%global_size1), stat=err ) ! The eigenvalue array
       ALLOCATE ( eig2(hmat%matsize1), stat=err ) ! The eigenvalue array
       IF (err.NE.0) CALL juDFT_error('Failed to allocated "eig2"', calledby ='elpa')

       CALL ev_dist%init(hmat)! Eigenvectors
       IF (err.NE.0) CALL juDFT_error('Failed to allocated "ev_dist"',calledby ='elpa')
       
       ! Blocking factor
       !IF (hmat%blacsdata%blacs_desc(5).NE.hmat%blacsdata%blacs_desc(6)) CALL judft_error("Different block sizes for rows/columns not supported")
       !CALL elpa_obj%set("na", hmat%global_size1, err)
       CALL elpa_obj%set("na", hmat%matsize1, err)
       CALL elpa_obj%set("nev", ne, err)
       !CALL elpa_obj%set("local_nrows", hmat%matsize1, err)
       !CALL elpa_obj%set("local_ncols", hmat%matsize2, err)
       !CALL elpa_obj%set("nblk",hmat%blacsdata%blacs_desc(5), err)
       !CALL elpa_obj%set("mpi_comm_parent", hmat%blacsdata%mpi_com, err)
       !CALL elpa_obj%set("process_row", hmat%blacsdata%myrow, err)
       !CALL elpa_obj%set("process_col", hmat%blacsdata%mycol, err)
       !CALL elpa_obj%set("blacs_context", hmat%blacsdata%blacs_desc(2), err)
       err = elpa_obj%setup()

       !CALL hmat%generate_full_matrix()
       !CALL smat%generate_full_matrix()
       CALL hmat%add_transpose(hmat)       
       CALL smat%add_transpose(smat)       

       IF (hmat%l_real) THEN
          CALL elpa_obj%generalized_eigenvectors(hmat%data_r,smat%data_r,eig2, ev_dist%data_r, .FALSE.,err)
       ELSE
          CALL elpa_obj%generalized_eigenvectors(hmat%data_c,smat%data_c,eig2, ev_dist%data_c, .FALSE., err)
       ENDIF
       
       CALL elpa_deallocate(elpa_obj)
       CALL elpa_uninit()
       ! END of ELPA stuff
       !
       !     Put those eigenvalues expected by chani to eig, i.e. for
       !     process i these are eigenvalues i+1, np+i+1, 2*np+i+1...
       !
  !     num=ne
  !     ne=0
  !     DO i=myid+1,num,np
  !        ne=ne+1
  !        eig(ne)=eig2(i)
  !     ENDDO

       eig(1:ne) = eig2(1:ne)
       DEALLOCATE(eig2)
       !
       !     Redistribute eigvec from ScaLAPACK distribution to each process
       !     having all eigenvectors corresponding to his eigenvalues as above
       !
    ALLOCATE(t_mat::ev)
    CALL ev%alloc(hmat%l_real,hmat%matsize1,ne)
    CALL ev%copy(ev_dist,hmat%matsize1,ne)
    
    !   ALLOCATE(t_mpimat::ev)
    !   CALL ev%init(hmat%l_real,hmat%global_size1,hmat%global_size1,hmat%blacsdata%mpi_com,.FALSE.)
    !   CALL ev%copy(ev_dist,1,1)
!    CLASS DEFAULT
!       CALL judft_error("Wrong type (1) in scalapack")
!    END SELECT
! CLASS DEFAULT
!    CALL judft_error("Wrong type (2) in scalapack")
! END SELECT
129
130

#endif
131
132
133
 
END SUBROUTINE elpa_diag_onenode
END MODULE m_elpa_onenode