Commit 0d664403 authored by Daniel Wortmann's avatar Daniel Wortmann

Merge branch 'develop' of fleur-git:fleur into develop

parents 3272afd9 57254036
......@@ -30,7 +30,7 @@ endif ()
set(LAPACK_LIBS "-lxml2 -mkl -lmatmul -qopenmp -lpthread")
set(CMAKE_Fortran_FLAGS "${HDF5_INCL} -mkl -warn nousage -assume byterecl -r8" )
set(CMAKE_Fortran_FLAGS_RELEASE " -O4 -g -qopenmp -xSSE3 -axSSE4.2 -parallel" )
set(CMAKE_Fortran_FLAGS_RELEASE " -O4 -g -qopenmp -xSSE3 -axSSE4.2 " )
set(CMAKE_Fortran_FLAGS_DEBUG " -O0 -g -qopenmp" )
if (DEFINED ENV{SCALAPACK_ROOT})
......
......@@ -66,6 +66,9 @@ SUBROUTINE initParallelProcesses(atoms,vacuum,input,stars,sliceplot,banddos,&
CALL MPI_BCAST(stars%k1d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(stars%k2d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(stars%k3d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(stars%kq1d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(stars%kq2d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(stars%kq3d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(sphhar%nlhd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(sphhar%ntypsd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(sphhar%memd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
......@@ -74,12 +77,17 @@ SUBROUTINE initParallelProcesses(atoms,vacuum,input,stars,sliceplot,banddos,&
CALL MPI_BCAST(dimension%nn3d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(dimension%nn2d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(dimension%ncvd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(dimension%nvd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(dimension%neigd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(dimension%nv2d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(kpts%numSpecialPoints,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(kpts%nkpts,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(kpts%nkptd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(kpts%nkpt,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(input%jspins,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(vacuum%layerd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(vacuum%nmzxyd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(vacuum%nmzd,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(oneD%odd%k3,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(oneD%odd%M,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
CALL MPI_BCAST(oneD%odd%n2d,1,MPI_INTEGER,0,mpi%mpi_comm,ierr)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment