Commit 56779a71 authored by Daniel Wortmann's avatar Daniel Wortmann

Refactoring of diagonalization part to enable build of mini-app

parent a944f914
......@@ -93,7 +93,8 @@ if(${FLEUR_USE_MPI})
add_executable(fleur_MPI ${juDFT_HDF} ${juDFT_SRC_F90} ${fleur_SRC} ${c_filesFleur} ${fleur_SRC_MPI})
target_compile_definitions(fleur_MPI PUBLIC ${FLEUR_MPI_DEFINITIONS})
target_link_libraries(fleur_MPI ${FLEUR_LIBRARIES})
set_target_properties(fleur_MPI PROPERTIES Fortran_MODULE_DIRECTORY fleur_MPI_modules COMPILE_OPTIONS -Ifleur_MPI_modules)
target_link_libraries(fleur_MPI juDFT)
set_target_properties(fleur_MPI PROPERTIES Fortran_MODULE_DIRECTORY fleur_MPI_modules COMPILE_OPTIONS "-Ijudft_modules -Ifleur_MPI_modules)
endif ()
......
......@@ -9,6 +9,8 @@ diagonalization/chase_diag.F90
diagonalization/symmetrize_matrix.f90
diagonalization/cusolver_diag.F90
diagonalization/elemental.F90
diagonalization/available_solvers.F90
diagonalization/writeout.F90
diagonalization/elpa_20180525_onenode.F90)
if (FLEUR_USE_ELPA_20180525)
set(fleur_F90 ${fleur_F90}
......@@ -20,3 +22,22 @@ else()
)
endif()
set(diag_test_files ${diag_test_files}
diagonalization/eigen_diag.F90
diagonalization/lapack_diag.F90
diagonalization/magma.F90
diagonalization/scalapack.F90
diagonalization/chase_diag.F90
diagonalization/symmetrize_matrix.f90
diagonalization/cusolver_diag.F90
diagonalization/elemental.F90
diagonalization/available_solvers.F90
diagonalization/writeout.F90
diagonalization/elpa_20180525_onenode.F90)
add_executable(diag_test ${diag_test_files})
target_compile_definitions(diag_test PUBLIC ${FLEUR_DEFINITIONS})
target_link_libraries(diag_test ${FLEUR_LIBRARIES})
set_target_properties(diag_test PROPERTIES Fortran_MODULE_DIRECTORY diag_test_modules COMPILE_OPTIONS -Idiag_test_modules)
!--------------------------------------------------------------------------------
! Copyright (c) 2016 Peter Grünberg Institut, Forschungszentrum Jülich, Germany
! This file is part of FLEUR and available as free software under the conditions
! of the MIT license as expressed in the LICENSE file in more detail.
!--------------------------------------------------------------------------------
MODULE m_available_solvers
IMPLICIT NONE
! list of constants indicating the different solvers
! the parameters are set to negative values to indicate that a particular solver is not compiled
! -solvers with numbers below 100 expect a non-distributed matrix
! -solvers with numbers 100-199 expect a distributed (scalapack-type) matrix
! -solvers with numbers higher than 200 can handle both
#ifdef CPP_ELPA
INTEGER,PARAMETER:: diag_elpa=101
#else
INTEGER,PARAMETER:: diag_elpa=-101
#endif
#ifdef CPP_ELEMENTAL
INTEGER,PARAMETER:: diag_elemental=102
#else
INTEGER,PARAMETER:: diag_elemental=-102
#endif
#ifdef CPP_SCALAPACK
INTEGER,PARAMETER:: diag_scalapack=103
#else
INTEGER,PARAMETER:: diag_scalapack=-103
#endif
#ifdef CPP_MAGMA
INTEGER,PARAMETER:: diag_magma=6
#else
INTEGER,PARAMETER:: diag_magma=-6
#endif
#ifdef CPP_CHASE
INTEGER,PARAMETER:: diag_chase=7
#else
INTEGER,PARAMETER:: diag_chase=-7
#endif
#ifdef CPP_CUSOLVER
INTEGER,PARAMETER:: diag_cusolver=8
#else
INTEGER,PARAMETER:: diag_cusolver=-8
#endif
INTEGER,PARAMETER:: diag_lapack=4
#ifdef CPP_ELPA_ONENODE
INTEGER,PARAMETER:: diag_elpa_1node=14
#else
INTEGER,PARAMETER:: diag_elpa_1node=-14
#endif
INTEGER,PARAMETER:: diag_debugout=201
INTEGER,PARAMETER::diag_all_solver(9)=(/diag_elpa,diag_elemental,diag_scalapack,diag_magma,diag_chase,diag_cusolver,diag_lapack,diag_elpa_1node,diag_debugout/)
CONTAINS
LOGICAL FUNCTION parallel_solver_available()
parallel_solver_available=ANY(diag_all_solver>100)
END FUNCTION parallel_solver_available
FUNCTION select_solver(suggested_solver,parallel) RESULT(diag_solver)
USE m_juDFT
LOGICAL,INTENT(IN):: parallel
INTEGER,INTENT(in):: suggested_solver
INTEGER :: diag_solver
diag_solver=-99
diag_solver=suggested_solver
IF (suggested_solver==0) THEN
!Determine the default solver
IF (parallel) THEN
diag_solver=MINVAL(diag_all_solver,mask=diag_all_solver>100)
ELSE
diag_solver=MINVAL(diag_all_solver,mask=diag_all_solver>0)
ENDIF
!check if a special solver was requested
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa") diag_solver=diag_elpa
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa_1node") diag_solver=diag_elpa_1node
IF (TRIM(juDFT_string_for_argument("-diag"))=="scalapack") diag_solver=diag_scalapack
IF (TRIM(juDFT_string_for_argument("-diag"))=="elemental") diag_solver=diag_elemental
IF (TRIM(juDFT_string_for_argument("-diag"))=="lapack") diag_solver=diag_lapack
IF (TRIM(juDFT_string_for_argument("-diag"))=="magma") diag_solver=diag_magma
IF (TRIM(juDFT_string_for_argument("-diag"))=="chase") diag_solver=diag_chase
IF (TRIM(juDFT_string_for_argument("-diag"))=="cusolver") diag_solver=diag_cusolver
IF (TRIM(juDFT_string_for_argument("-diag"))=="debugout") diag_solver=diag_debugout
!Check if solver is possible
IF (diag_solver<0) CALL juDFT_error(&
"You selected a solver for the eigenvalue problem that is not available",&
hint="You most probably did not provide the appropriate libraries for compilation/linking")
IF (diag_solver<100.AND.parallel) CALL judft_error(&
"You selected an eigensolver that does not support distributed memory parallism",&
hint="Try scalapack,elpa or another supported solver for parallel matrices")
IF (diag_solver>100.AND.diag_solver<200.AND..NOT.parallel) CALL judft_error(&
"You selected an eigensolver for matrices that are memory distributed",&
hint="Try lapack, cusolver or another supported solver for non-distributed matrices")
END IF
END FUNCTION select_solver
END MODULE m_available_solvers
......@@ -6,55 +6,14 @@
MODULE m_eigen_diag
USE m_juDFT
! the parameters are set to negative values to indicate that a particular solver is not compiled
IMPLICIT NONE
PRIVATE
#ifdef CPP_ELPA
INTEGER,PARAMETER:: diag_elpa=1
#else
INTEGER,PARAMETER:: diag_elpa=-1
#endif
#ifdef CPP_ELEMENTAL
INTEGER,PARAMETER:: diag_elemental=2
#else
INTEGER,PARAMETER:: diag_elemental=-2
#endif
#ifdef CPP_SCALAPACK
INTEGER,PARAMETER:: diag_scalapack=3
#else
INTEGER,PARAMETER:: diag_scalapack=-3
#endif
#ifdef CPP_MAGMA
INTEGER,PARAMETER:: diag_magma=6
#else
INTEGER,PARAMETER:: diag_magma=-6
#endif
#ifdef CPP_CHASE
INTEGER,PARAMETER:: diag_chase=7
#else
INTEGER,PARAMETER:: diag_chase=-7
#endif
#ifdef CPP_CUSOLVER
INTEGER,PARAMETER:: diag_cusolver=8
#else
INTEGER,PARAMETER:: diag_cusolver=-8
#endif
INTEGER,PARAMETER:: diag_lapack=4
#ifdef CPP_ELPA_ONENODE
INTEGER,PARAMETER:: diag_elpa_1node=14
#else
INTEGER,PARAMETER:: diag_elpa_1node=-14
#endif
INTEGER,PARAMETER:: diag_debugout=99
PUBLIC eigen_diag,parallel_solver_available
CONTAINS
USE m_available_solvers
IMPLICIT NONE
PRIVATE
PUBLIC :: eigen_diag
LOGICAL FUNCTION parallel_solver_available()
parallel_solver_available=any((/diag_elpa,diag_elemental,diag_scalapack/)>0)
END FUNCTION parallel_solver_available
CONTAINS
SUBROUTINE eigen_diag(mpi,hmat,smat,ikpt,jsp,iter,ne,eig,ev)
SUBROUTINE eigen_diag(solver,hmat,smat,ne,eig,ev,ikpt,jsp,iter)
USE m_lapack_diag
USE m_magma
USE m_elpa
......@@ -67,50 +26,35 @@ CONTAINS
USE m_matrix_copy
USE m_cusolver_diag
USE m_judft_usage
USE m_writeout
IMPLICIT NONE
#ifdef CPP_MPI
include 'mpif.h'
#endif
TYPE(t_mpi), INTENT(IN) :: mpi
INTEGER, INTENT(INOUT) :: solver
CLASS(t_mat), INTENT(INOUT) :: smat,hmat
CLASS(t_mat), ALLOCATABLE, INTENT(OUT) :: ev
INTEGER, INTENT(IN) :: ikpt
INTEGER, INTENT(IN) :: jsp
INTEGER, INTENT(IN) :: iter
INTEGER, INTENT(INOUT) :: ne
REAL, INTENT(OUT) :: eig(:)
!Only for chase
INTEGER,OPTIONAL, INTENT(IN) :: ikpt
INTEGER,OPTIONAL, INTENT(IN) :: jsp
INTEGER,OPTIONAL, INTENT(IN) :: iter
!Locals
LOGICAL :: parallel
!For check-mode
TYPE(t_mat) :: s_store,h_store
SELECT TYPE(smat)
CLASS IS (t_mpimat)
CLASS IS (t_mpimat)
parallel=.TRUE.
CLASS default
CLASS default
parallel=.FALSE.
END SELECT
!Create a copy of the matrix if in test mode
IF (TRIM(judft_string_for_argument("-diag"))=="test") THEN
SELECT TYPE(hmat)
CLASS IS (t_mpimat)
CALL s_store%init(hmat%l_real,hmat%global_size1,hmat%global_size2)
CALL h_store%init(hmat%l_real,hmat%global_size1,hmat%global_size2)
CLASS default
CALL s_store%init(hmat%l_real,hmat%matsize1,hmat%matsize2)
CALL h_store%init(hmat%l_real,hmat%matsize1,hmat%matsize2)
END SELECT
CALL matrix_copy(smat,s_store)
CALL matrix_copy(hmat,h_store)
END IF
solver=select_solver(solver,parallel)
CALL timestart("Diagonalization")
!Select the solver
CALL add_usage_data("diag-solver", priv_select_solver(parallel))
SELECT CASE (priv_select_solver(parallel))
CALL add_usage_data("diag-solver", solver)
SELECT CASE (solver)
CASE (diag_elpa)
CALL elpa_diag(hmat,smat,ne,eig,ev)
CASE (diag_elpa_1node)
......@@ -126,138 +70,15 @@ CONTAINS
CASE (diag_lapack)
CALL lapack_diag(hmat,smat,ne,eig,ev)
CASE (diag_chase)
IF (.NOT.(PRESENT(ikpt).AND.PRESENT(jsp).AND.PRESENT(iter))) CALL judft_error("Optional arguments must be present for chase in eigen_diag")
CALL chase_diag(hmat,smat,ikpt,jsp,iter,ne,eig,ev)
CASE (diag_debugout)
CALL priv_debug_out(smat,hmat)
CALL diag_writeout(smat,hmat)
CASE default
CALL judft_error("No solver available to diagonalize matrix")
END SELECT
!Create test the solution
IF (TRIM(judft_string_for_argument("-diag"))=="test") THEN
CALL priv_test_solution(mpi,ne,s_store,h_store,eig,ev)
CALL judft_error("Diagonalization tested")
END IF
CALL timestop("Diagonalization")
!
END SUBROUTINE eigen_diag
SUBROUTINE priv_test_solution(mpi,ne,s_store,h_store,eig1,ev)
USE m_types
USE m_lapack_diag
USE m_matrix_copy
IMPLICIT NONE
TYPE(t_mpi),INTENT(in):: mpi
INTEGER,INTENT(INOUT) :: ne
TYPE(t_mat) :: s_store,h_store
REAL,INTENT(in) :: eig1(:)
CLASS(t_mat) :: ev
REAL,ALLOCATABLE::eig2(:)
TYPE(t_mat) ::ev1
CLASS(t_mat),ALLOCATABLE ::ev2
INTEGER :: i,irank
ALLOCATE(eig2(ne))
CALL ev1%init(s_store%l_real,s_store%matsize1,s_store%matsize2)
CALL matrix_copy(ev,ev1)
IF (mpi%irank==0) THEN
CALL lapack_diag(h_store,s_store,ne,eig2,ev2)
OPEN(99,file="diag.compare")
WRITE(99,*) "Eigenvalues"
DO i=1,ne
WRITE(99,*) i,eig1(i),eig2(i)
ENDDO
WRITE(99,*) "Eigenvectors"
DO i=1,ne
IF (ev1%l_real) THEN
WRITE(99,"(i0,20(1x,f10.5))") i,ev1%data_r(1:10,i)
WRITE(99,"(i0,20(1x,f10.5))") i,ev2%data_r(1:10,i)
ELSE
WRITE(99,"(i0,20(1x,f10.5))") i,ev1%data_c(1:10,i)
WRITE(99,"(i0,20(1x,f10.5))") i,ev2%data_c(1:10,i)
END IF
ENDDO
CLOSE(99)
END IF
END SUBROUTINE priv_test_solution
SUBROUTINE priv_debug_out(smat,hmat)
USE m_types
use m_judft
IMPLICIT NONE
CLASS(t_mat),INTENT(INOUT) :: hmat,smat
!small subroutine that does only wite the matrix to a file
INTEGER:: i,ii,irank,ierr
CHARACTER(len=20)::filename
#ifdef CPP_MPI
INCLUDE 'mpif.h'
CALL MPI_COMM_RANK(MPI_COMM_WORLD,irank,ierr)
#else
irank=0
#endif
WRITE(filename,"(a,i0)") "hmat",irank
OPEN(999,file=TRIM(filename))
WRITE(filename,"(a,i0)") "smat",irank
OPEN(998,file=TRIM(filename))
DO i=1,hmat%matsize2
DO ii=1,hmat%matsize1
IF (hmat%l_real) THEN
WRITE(999,"(2i6,f15.6)") ii,i,hmat%data_r(ii,i)
WRITE(998,"(2i6,f15.6)") ii,i,smat%data_r(ii,i)
ELSE
WRITE(999,"(2i6,2f15.6)") ii,i,hmat%data_c(ii,i)
WRITE(998,"(2i6,2f15.6)") ii,i,smat%data_c(ii,i)
ENDIF
END DO
ENDDO
CLOSE(999)
CLOSE(998)
#ifdef CPP_MPI
CALL MPI_BARRIER(MPI_COMM_WORLD,ierr)
#endif
CALL judft_error("STOP in eigen_diag:debug_diag")
END SUBROUTINE priv_debug_out
FUNCTION priv_select_solver(parallel) RESULT(diag_solver)
LOGICAL,INTENT(IN):: parallel
INTEGER :: diag_solver
diag_solver=-99
!Determine the default solver
IF (parallel) THEN
#ifdef CPP_ELPA
diag_solver=diag_elpa
#else
diag_solver=diag_scalapack
#endif
ELSE
diag_solver=diag_lapack
ENDIF
!check if a special solver was requested
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa") diag_solver=diag_elpa
IF (TRIM(juDFT_string_for_argument("-diag"))=="elpa_1node") diag_solver=diag_elpa_1node
IF (trim(juDFT_string_for_argument("-diag"))=="scalapack") diag_solver=diag_scalapack
IF (trim(juDFT_string_for_argument("-diag"))=="elemental") diag_solver=diag_elemental
IF (trim(juDFT_string_for_argument("-diag"))=="lapack") diag_solver=diag_lapack
IF (trim(juDFT_string_for_argument("-diag"))=="magma") diag_solver=diag_magma
IF (trim(juDFT_string_for_argument("-diag"))=="chase") diag_solver=diag_chase
IF (trim(juDFT_string_for_argument("-diag"))=="cusolver") diag_solver=diag_cusolver
IF (trim(juDFT_string_for_argument("-diag"))=="debugout") diag_solver=diag_debugout
!Check if solver is possible
IF (diag_solver<0) CALL juDFT_error("You selected a solver for the eigenvalue problem that is not available",hint="You most probably did not provide the appropriate libraries for compilation/linking")
IF (ANY((/diag_lapack,diag_magma,diag_cusolver/)==diag_solver).AND.parallel) CALL judft_error("You selected an eigensolver that does not support distributed memory parallism",hint="Try scalapack,elpa or another supported solver for parallel matrices")
IF (ANY((/diag_elpa,diag_elemental,diag_scalapack/)==diag_solver).AND..NOT.parallel) CALL judft_error("You selected an eigensolver for matrices that are memory distributed",hint="Try lapack, cusolver or another supported solver for non-distributed matrices")
END FUNCTION priv_select_solver
END MODULE m_eigen_diag
MODULE m_writeout
CONTAINS
SUBROUTINE diag_writeout(smat,hmat)
USE m_types
USE m_judft
USE m_io_matrix
USE m_types_mpimat
IMPLICIT NONE
CLASS(t_mat),INTENT(INOUT) :: hmat,smat
!small subroutine that does only wite the matrix to a file
INTEGER:: i,ii,irank,ierr,matsize
CHARACTER(len=20)::filename
#ifdef CPP_MPI
INCLUDE 'mpif.h'
CALL MPI_COMM_RANK(MPI_COMM_WORLD,irank,ierr)
#else
irank=0
#endif
SELECT TYPE(hmat)
TYPE is (t_mpimat)
matsize=hmat%global_size1
CLASS default
matsize=hmat%matsize1
END SELECT
!First write binary file
#ifdef CPP_HDF
i=open_matrix(hmat%l_real,matsize,2,2,"hs_mat")
#else
i=open_matrix(hmat%l_real,hmat%matsize1,1,2,"hs_mat")
#endif
CALL write_matrix(hmat,1,i)
CALL write_matrix(smat,2,i)
CALL close_matrix(i)
!Now the formatted matrix
WRITE(filename,"(a,i0)") "hmat",irank
OPEN(999,file=TRIM(filename))
WRITE(filename,"(a,i0)") "smat",irank
OPEN(998,file=TRIM(filename))
DO i=1,hmat%matsize2
DO ii=1,hmat%matsize1
IF (hmat%l_real) THEN
WRITE(999,"(2i6,f15.6)") ii,i,hmat%data_r(ii,i)
WRITE(998,"(2i6,f15.6)") ii,i,smat%data_r(ii,i)
ELSE
WRITE(999,"(2i6,2f15.6)") ii,i,hmat%data_c(ii,i)
WRITE(998,"(2i6,2f15.6)") ii,i,smat%data_c(ii,i)
ENDIF
END DO
ENDDO
CLOSE(999)
CLOSE(998)
#ifdef CPP_MPI
CALL MPI_BARRIER(MPI_COMM_WORLD,ierr)
#endif
CALL judft_error("STOP in eigen_diag:debug_diag")
END SUBROUTINE diag_writeout
END MODULE m_writeout
......@@ -80,7 +80,7 @@ CONTAINS
INTEGER ne,lh0
INTEGER isp,i,j,err
LOGICAL l_wu,l_file,l_real,l_zref
INTEGER :: solver=0
! Local Arrays
INTEGER :: ierr(3)
INTEGER :: neigBuffer(kpts%nkpt,input%jspins)
......@@ -196,8 +196,8 @@ CONTAINS
end select
END IF
CALL eigen_diag(mpi,hmat,smat,nk,jsp,iter,ne_all,eig,zMat)
CALL eigen_diag(solver,hmat,smat,ne_all,eig,zMat,nk,jsp,iter)
CALL smat%free()
CALL hmat%free()
DEALLOCATE(hmat,smat, stat=dealloc_stat, errmsg=errmsg)
......
......@@ -24,9 +24,9 @@ contains
opened=.true.
print *,"Open olap.mat"
id_olap=OPEN_MATRIX(l_real,dimension%nbasfcn,1,"olap.mat")
id_olap=OPEN_MATRIX(l_real,DIMENSION%nbasfcn,1,1,"olap.mat")
print *,"Open z.mat"
id_z=OPEN_MATRIX(l_real,dimension%nbasfcn,1,"z.mat")
id_z=OPEN_MATRIX(l_real,DIMENSION%nbasfcn,1,1,"z.mat")
END SUBROUTINE open_hybrid_io1
......@@ -40,7 +40,7 @@ contains
opened=.true.
print *,"Open v_x.mat"
id_v_x=OPEN_MATRIX(l_real,dimension%nbasfcn,1,"v_x.mat")
id_v_x=OPEN_MATRIX(l_real,DIMENSION%nbasfcn,1,1,"v_x.mat")
END SUBROUTINE open_hybrid_io1b
......
......@@ -7,47 +7,97 @@
MODULE m_io_matrix
USE m_types
USE m_judft
USE m_iomatrix_hdf
#ifdef CPP_HDF
USE hdf5
#endif
IMPLICIT NONE
private
INTEGER:: mode=1
public:: open_matrix,read_matrix,write_matrix
PRIVATE
TYPE t_iomatrix_handle
INTEGER:: mode=0 !can be 1 for DA or 2 for HDF
INTEGER:: id !file ID in direct-access mode
INTEGER(hid_t):: fid,did !file-handle in hdf mode
END TYPE t_iomatrix_handle
TYPE(t_iomatrix_handle)::fh(10)
PUBLIC:: t_iomatrix_handle,open_matrix,read_matrix,write_matrix,close_matrix
CONTAINS
INTEGER FUNCTION OPEN_matrix(l_real,matsize,no_rec,filename)
INTEGER FUNCTION OPEN_matrix(l_real,matsize,mode,no_rec,filename)
LOGICAL,INTENT(IN) :: l_real
INTEGER,INTENT(in) :: matsize,no_rec
INTEGER,INTENT(in) :: matsize,no_rec,mode
CHARACTER(len=*),INTENT(in) :: filename
!Find free handle
DO open_matrix=1,SIZE(fh)
IF (fh(open_matrix)%mode==0) EXIT
ENDDO
IF (open_matrix>SIZE(fh)) CALL judft_error("Too many filehandles for matrix IO")
SELECT CASE (mode)
CASE (1)
OPEN_matrix=open_DA(l_real,matsize,no_rec,filename)
fh(open_matrix)%mode=1
fh(OPEN_matrix)%id=open_DA(l_real,matsize,no_rec,filename)
CASE(2)
fh(open_matrix)%mode=2
CALL iomatrix_hdf_open(l_real,matsize,no_rec,filename,fh(open_matrix)%fid,fh(open_matrix)%did)
CASE default
CALL judft_error("BUG in io_matrix")
END SELECT
END FUNCTION OPEN_MATRIX
SUBROUTINE read_matrix(mat,rec,id)
TYPE(t_Mat),INTENT(INOUT):: mat
INTEGER,INTENT(IN) :: rec,id
USE m_types_mpimat
CLASS(t_Mat),INTENT(INOUT) :: mat
INTEGER,INTENT(IN) :: rec,id
CALL mat%alloc()
SELECT CASE (mode)
SELECT CASE (fh(id)%mode)
CASE (1)
CALL read_matrix_DA(mat,rec,id)
SELECT TYPE(mat)
TYPE is (t_mat)
CALL read_matrix_DA(mat,rec,fh(id)%id)
TYPE is (t_mpimat)
CALL judft_error("Matrix IO for parallel matrix only with HDF5")
END SELECT
CASE(2)
CALL iomatrix_hdf_read(mat,rec,fh(id)%did)
CASE default
CALL judft_error("BUG in io_matrix")
END SELECT
END SUBROUTINE read_matrix
SUBROUTINE write_matrix(mat,rec,id)
TYPE(t_Mat),INTENT(IN):: mat
INTEGER,INTENT(IN) :: rec,id
USE m_types_mpimat
CLASS(t_Mat),INTENT(IN) :: mat
INTEGER,INTENT(IN) :: rec,id
SELECT CASE (mode)
SELECT CASE (fh(id)%mode)
CASE (1)
CALL write_matrix_DA(mat,rec,id)
SELECT TYPE(mat)
TYPE is (t_mat)
CALL write_matrix_DA(mat,rec,fh(id)%id)
TYPE is (t_mpimat)
CALL judft_error("Matrix IO for parallel matrix only with HDF5")
END SELECT
CASE(2)
CALL iomatrix_hdf_write(mat,rec,fh(id)%did)
CASE default
CALL judft_error("BUG in io_matrix")
END SELECT
END SUBROUTINE write_matrix
SUBROUTINE close_matrix(id)
INTEGER,INTENT(IN):: id
SELECT CASE (fh(id)%mode)
CASE (1)
CALL close_matrix_DA(fh(id)%id)
CASE (2)
CALL iomatrix_hdf_close(fh(id)%fid,fh(id)%did)
CASE default
CALL judft_error("BUG in io_matrix")
END SELECT
fh(id)%mode=0
END SUBROUTINE CLOSE_MATRIX
!Now the implementation in terms of fortran DA-files
INTEGER FUNCTION open_DA(l_real,matsize,no_rec,filename)
......@@ -75,30 +125,26 @@ CONTAINS
open_DA=open_DA+1
END DO
!openfile
OPEN(unit=open_DA,file=filename,access='direct',recl=datasize*(matsize*matsize+4))!Three to include matsize
OPEN(unit=open_DA,file=filename,access='direct',recl=datasize*(matsize*matsize+6))!Three to include matsize
END FUNCTION open_DA
SUBROUTINE read_matrix_DA(mat,rec,id)
TYPE(t_Mat),INTENT(INOUT):: mat
TYPE(t_Mat),INTENT(OUT):: mat
INTEGER,INTENT(IN) :: rec,id
LOGICAL :: l_real
INTEGER:: err,matsize1,matsize2
l_real=mat%l_real
READ(id,rec=rec,iostat=err) matsize1,matsize2
if (matsize1<1) call judft_error("Data not found in file")
IF (mat%matsize1.NE.matsize1.OR.mat%matsize2.NE.matsize2) THEN
WRITE(*,*) 'mat%matsize1 = ', mat%matsize1
WRITE(*,*) 'mat%matsize2 = ', mat%matsize2
WRITE(*,*) 'stored matsize1 = ', matsize1
WRITE(*,*) 'stored matsize2 = ', matsize2
CALL juDFT_error("matrix size wrong", calledby="read_matrix_DA")
END IF
READ(id,rec=rec,iostat=err) l_real,matsize1,matsize2
IF (err.NE.0) CALL judft_error("Data not found in file")
CALL mat%init(l_real,matsize1,matsize2)
IF (mat%l_real) THEN
READ(id,rec=rec,iostat=err) matsize1,matsize2,mat%data_r
READ(id,rec=rec,iostat=err) l_real,matsize1,matsize2,mat%data_r
ELSE
READ(id,rec=rec,iostat=err) matsize1,matsize2,mat%data_c
READ(id,rec=rec,iostat=err) l_real,matsize1,matsize2,mat%data_c
END IF
IF (err.NE.0) CALL judft_error("Failed in reading of matrix")
END SUBROUTINE read_matrix_DA
......@@ -108,11 +154,18 @@ CONTAINS
INTEGER,INTENT(IN) :: rec,id
INTEGER:: err
IF (mat%l_real) THEN
WRITE(id,rec=rec,iostat=err) mat%matsize1,mat%matsize2,mat%data_r
WRITE(id,rec=rec,iostat=err) mat%l_real,mat%matsize1,mat%matsize2,mat%data_r
ELSE
WRITE(id,rec=rec,iostat=err) mat%matsize1,mat%matsize2,mat%data_c
WRITE(id,rec=rec,iostat=err) mat%l_real,mat%matsize1,mat%matsize2,mat%data_c
END IF
IF (err.NE.0) CALL judft_error("Failed in writing of matrix")
END SUBROUTINE write_matrix_DA
SUBROUTINE close_matrix_DA(id)
INTEGER,INTENT(IN) :: id
INTEGER:: err
close(id)
END SUBROUTINE close_matrix_DA
END MODULE m_io_matrix
MODULE m_iomatrix_hdf
USE m_judft
USE hdf5
USE m_hdf_tools
IMPLICIT NONE
PRIVATE
PUBLIC iomatrix_hdf_close,iomatrix_hdf_open,iomatrix_hdf_write,iomatrix_hdf_read
CONTAINS
SUBROUTINE iomatrix_hdf_read(mat,nrec,did)
USE