mirror of
https://code.it4i.cz/sccs/easyconfigs-it4i.git
synced 2025-04-08 07:52:11 +01:00

new file: v/VASP/VASP-5.4.4-intel-2020a-mkl=sequential-vtst-karolina.eb new file: v/VASP/VASP-5.4.4-intel-2020a-mkl=sequential-vtst-karolina.eb.bak_20220616084146_64543 new file: v/VASP/VASP-6.3.0-intel-2020b-mkl=sequential-vtst-karolina.eb deleted: v/VASP/VASP-6.3.1-intel-2020b-mkl=sequential-vtst-karolina.eb
309 lines
9.6 KiB
Diff
309 lines
9.6 KiB
Diff
Opatchovat a prekopirovat *.F soubory do src/
|
|
|
|
JK 2022
|
|
diff -ru vasp.5.4.4-orig/src/main.F vasp.5.4.4/src/main.F
|
|
--- vasp.5.4.4-orig/src/main.F 2022-06-16 08:12:15.774947000 +0200
|
|
+++ vasp.5.4.4/src/main.F 2022-06-16 08:14:25.016620153 +0200
|
|
@@ -3144,7 +3144,7 @@
|
|
ENDIF
|
|
|
|
CALL CHAIN_FORCE(T_INFO%NIONS,DYN%POSION,TOTEN,TIFOR, &
|
|
- LATT_CUR%A,LATT_CUR%B,IO%IU6)
|
|
+ TSIF,LATT_CUR%A,LATT_CUR%B,IO%IU6)
|
|
|
|
CALL PARALLEL_TEMPERING(NSTEP,T_INFO%NIONS,DYN%POSION,DYN%VEL,TOTEN,TIFOR,DYN%TEBEG,DYN%TEEND, &
|
|
LATT_CUR%A,LATT_CUR%B,IO%IU6)
|
|
diff -ru vasp.5.4.4-orig/src/main_mpi.F vasp.5.4.4/src/main_mpi.F
|
|
--- vasp.5.4.4-orig/src/main_mpi.F 2022-06-16 08:12:15.717067000 +0200
|
|
+++ vasp.5.4.4/src/main_mpi.F 2022-06-16 08:27:23.823905833 +0200
|
|
@@ -67,7 +67,31 @@
|
|
!! INTEGER :: I,resultlen,ierror
|
|
!!! test_
|
|
|
|
+#ifdef MPMD
|
|
+ LOGICAL LMPMD
|
|
+ INTEGER mpmd_group_number
|
|
+ LMPMD =.FALSE.
|
|
+ CALL RDATAB(.TRUE.,'INCAR',IO%IU5,'LMPMD','=','#',';','L', &
|
|
+ & IDUM,RDUM,CDUM,LMPMD,CHARAC,N,1,IERR)
|
|
+ IF (((IERR/=0).AND.(IERR/=3)).OR.((IERR==0).AND.(N<1))) THEN
|
|
+ IF (IO%IU0>=0) WRITE(IO%IU0,*)'Error reading item ''LMPMD'' from file INCAR.'
|
|
+ STOP
|
|
+ ENDIF
|
|
+
|
|
+ IF (LMPMD) THEN
|
|
+ mpmd_group_number = 0
|
|
+ ELSE
|
|
+ mpmd_group_number = -1
|
|
+ ENDIF
|
|
+
|
|
+ CALL M_init(COMM_WORLD, mpmd_group_number)
|
|
+ IF (LMPMD) THEN
|
|
+ WRITE (DIR_APP, "('vasp',I3.3,'/')") mpmd_group_number
|
|
+ DIR_LEN=LEN_TRIM(DIR_APP)
|
|
+ ENDIF
|
|
+#else
|
|
CALL M_init(COMM_WORLD)
|
|
+#endif
|
|
|
|
VCAIMAGES=-1
|
|
CALL RDATAB(IO%LOPEN,INCAR,IO%IU5,'VCAIMAGES','=','#',';','F', &
|
|
@@ -315,9 +339,20 @@
|
|
STOP
|
|
ENDIF
|
|
|
|
+#if defined(MPMD)
|
|
+ IF (LMPMD) THEN
|
|
+ IF (IO%IU0>=0) WRITE (IO%IU0,'(A,A)') "MPMD: output directory ", DIR_APP(1:DIR_LEN)
|
|
+ ELSE
|
|
+ node=COMM_CHAIN%NODE_ME
|
|
+ CALL MAKE_DIR_APP(node)
|
|
+ ENDIF
|
|
+#endif
|
|
+
|
|
+#ifndef MPMD
|
|
node=COMM_CHAIN%NODE_ME
|
|
|
|
CALL MAKE_DIR_APP(node)
|
|
+#endif
|
|
|
|
! if all nodes should write (giving complete mess) do not use the
|
|
! following line
|
|
diff -ru vasp.5.4.4-orig/src/mpi.F vasp.5.4.4/src/mpi.F
|
|
--- vasp.5.4.4-orig/src/mpi.F 2022-06-16 08:12:15.929065000 +0200
|
|
+++ vasp.5.4.4/src/mpi.F 2022-06-16 08:27:23.829045581 +0200
|
|
@@ -87,6 +87,9 @@
|
|
INTEGER NODE_ME ! node id starting from 1 ... NCPU
|
|
INTEGER IONODE ! node which has to do IO (set to 0 for no IO)
|
|
INTEGER NCPU ! total number of proc in this communicator
|
|
+#ifdef MPMD
|
|
+ INTEGER mpmd_client_rank
|
|
+#endif
|
|
END TYPE
|
|
|
|
! Standard MPI include file.
|
|
@@ -116,6 +119,161 @@
|
|
#endif
|
|
|
|
CONTAINS
|
|
+
|
|
+#ifdef MPMD
|
|
+ SUBROUTINE mpmd_mpi_init(mpmd_mpi_comm, mpmd_group_number, mpmd_client_rank)
|
|
+ IMPLICIT NONE
|
|
+ INTEGER, INTENT(OUT) :: mpmd_mpi_comm, mpmd_client_rank
|
|
+ INTEGER :: i, j, ierr, process_type, irank, isize
|
|
+ INTEGER :: npotentials, nclients, mpmd_group_number
|
|
+ INTEGER :: potential_group_size
|
|
+ INTEGER :: orig_group, new_group, new_comm, my_comm
|
|
+ INTEGER, ALLOCATABLE, DIMENSION(:) :: process_types
|
|
+ INTEGER, ALLOCATABLE, DIMENSION(:) :: potential_ranks
|
|
+ CHARACTER (LEN=16) nclients_str
|
|
+ CHARACTER (LEN=10) :: DIR_APP
|
|
+ INTEGER :: DIR_LEN=0
|
|
+#if defined(CLIENT_MPI)
|
|
+ INTEGER, ALLOCATABLE, DIMENSION(:) :: client_ranks
|
|
+#endif
|
|
+
|
|
+ CALLMPI(MPI_Barrier(MPI_COMM_WORLD, ierr))
|
|
+
|
|
+ CALLMPI(MPI_Comm_rank(MPI_COMM_WORLD, irank, ierr))
|
|
+ CALLMPI(MPI_Comm_size(MPI_COMM_WORLD, isize, ierr))
|
|
+
|
|
+ ALLOCATE(process_types(isize))
|
|
+ ALLOCATE(potential_ranks(0:isize-1))
|
|
+#if defined(CLIENT_MPI)
|
|
+ ALLOCATE(client_ranks(0:isize-1))
|
|
+ WRITE(*,'(a,2i4)') 'mpi started: iproc_world,nproc_world ',irank,isize
|
|
+#endif
|
|
+ process_type = 2
|
|
+ CALLMPI(MPI_Allgather(process_type, 1, MPI_INTEGER, process_types, 1 , MPI_INTEGER, MPI_COMM_WORLD, ierr))
|
|
+#ifndef CLIENT_MPI
|
|
+ CALL GETENV('EON_NUMBER_OF_CLIENTS', nclients_str)
|
|
+ IF (LEN_TRIM(nclients_str) .GT. 0) THEN
|
|
+ READ(nclients_str,*) nclients
|
|
+ ELSE
|
|
+ nclients = 1
|
|
+ ENDIF
|
|
+
|
|
+ !WRITE(*,'(a,i4)') 'number of EON clients: ',nclients
|
|
+#endif
|
|
+#if defined(CLIENT_MPI)
|
|
+ nclients = 0
|
|
+ j = 0
|
|
+ DO i=1,isize
|
|
+ IF (process_types(i) .EQ. 1) THEN
|
|
+ client_ranks(j) = i-1
|
|
+ j = j + 1
|
|
+ nclients = nclients + 1
|
|
+ ENDIF
|
|
+ ENDDO
|
|
+ !WRITE(*,'(a,i4)') 'number of EON clients: ',nclients
|
|
+ CALLMPI(MPI_Comm_group(MPI_COMM_WORLD, orig_group, ierr))
|
|
+ CALLMPI(MPI_Group_incl(orig_group, nclients, client_ranks, new_group, ierr))
|
|
+ CALLMPI(MPI_Comm_create(MPI_COMM_WORLD, new_group, new_comm, ierr))
|
|
+#endif
|
|
+
|
|
+ npotentials = 0
|
|
+ j = 0
|
|
+ DO i=1,isize
|
|
+ IF (process_types(i) .EQ. 2) THEN
|
|
+ potential_ranks(j) = i-1
|
|
+ j = j + 1
|
|
+ npotentials = npotentials + 1
|
|
+ ENDIF
|
|
+ ENDDO
|
|
+
|
|
+ potential_group_size = npotentials/nclients
|
|
+
|
|
+ DO i=1,nclients
|
|
+ CALLMPI(MPI_Comm_group(MPI_COMM_WORLD, orig_group, ierr))
|
|
+ CALLMPI(MPI_Group_incl(orig_group, potential_group_size, potential_ranks((i-1)*potential_group_size:), new_group, ierr))
|
|
+ CALLMPI(MPI_Comm_create(MPI_COMM_WORLD, new_group, new_comm, ierr))
|
|
+ IF (new_comm /= MPI_COMM_NULL) THEN
|
|
+ my_comm = new_comm
|
|
+ mpmd_group_number = i-1
|
|
+ ENDIF
|
|
+ ENDDO
|
|
+
|
|
+ mpmd_mpi_comm = my_comm
|
|
+
|
|
+ CALLMPI(MPI_Comm_rank(my_comm, irank, ierr))
|
|
+ IF (irank==0 ) THEN
|
|
+ !GH write vasp directories
|
|
+ WRITE (DIR_APP, "('vasp',I3.3,'/')") mpmd_group_number
|
|
+ DIR_LEN=LEN_TRIM(DIR_APP)
|
|
+ CALL SYSTEM('mkdir -p ' // DIR_APP)
|
|
+ !GH end write vasp directories
|
|
+ CALL mpmd_recv_poscar(mpmd_group_number, mpmd_client_rank)
|
|
+ WRITE(*,*) "finished mpmd_mpi_init"
|
|
+ ENDIF
|
|
+ CALLMPI(MPI_Barrier(my_comm, ierr))
|
|
+
|
|
+ ENDSUBROUTINE mpmd_mpi_init
|
|
+
|
|
+ SUBROUTINE mpmd_recv_poscar(mpmd_group_number, mpmd_client_rank)
|
|
+ INTEGER :: N
|
|
+ INTEGER :: pbc, i, j, ierr, mpmd_group_number, prev_type
|
|
+ INTEGER :: mpmd_client_rank
|
|
+ INTEGER,ALLOCATABLE,DIMENSION(:) :: atomicNrs, ntype
|
|
+ INTEGER,DIMENSION(1024) :: icwd
|
|
+ REAL(q),ALLOCATABLE,DIMENSION(:) :: R
|
|
+ REAL(q),DIMENSION(9) :: box
|
|
+ CHARACTER(LEN=20) :: poscar_file
|
|
+
|
|
+ INTEGER stat(MPI_STATUS_SIZE)
|
|
+
|
|
+ CALLMPI(MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, stat, ierr))
|
|
+ mpmd_client_rank = stat(MPI_SOURCE)
|
|
+
|
|
+ CALLMPI(MPI_Recv(N, 1, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
|
|
+
|
|
+ ALLOCATE(atomicNrs(N))
|
|
+ ALLOCATE(R(3*N))
|
|
+ ALLOCATE(ntype(N))
|
|
+
|
|
+ CALLMPI(MPI_Recv(atomicNrs, N, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
|
|
+ CALLMPI(MPI_Recv(R, 3*N, MPI_DOUBLE_PRECISION, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
|
|
+ CALLMPI(MPI_Recv(box, 9, MPI_DOUBLE_PRECISION, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
|
|
+ CALLMPI(MPI_Recv(pbc, 1, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
|
|
+ CALLMPI(MPI_Recv(icwd, 1024, MPI_INTEGER, mpmd_client_rank, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE, ierr))
|
|
+
|
|
+ WRITE(poscar_file, "('vasp',I3.3,'/POSCAR')") mpmd_group_number
|
|
+ OPEN(UNIT=30,FILE=poscar_file, ACCESS='SEQUENTIAL', STATUS='REPLACE')
|
|
+
|
|
+ WRITE(30,*) ' '
|
|
+ WRITE(30,'(F)') 1.0
|
|
+ WRITE(30,'(F,F,F)') box(1), box(2), box(3)
|
|
+ WRITE(30,'(F,F,F)') box(4), box(5), box(6)
|
|
+ WRITE(30,'(F,F,F)') box(7), box(8), box(9)
|
|
+
|
|
+ ntype=0
|
|
+ prev_type=atomicNrs(1)
|
|
+ j=1
|
|
+ DO i=1,N
|
|
+ IF (atomicNrs(i) /= prev_type) THEN
|
|
+ j = j+1
|
|
+ ENDIF
|
|
+ ntype(j) = ntype(j) + 1
|
|
+ prev_type = atomicNrs(i)
|
|
+ ENDDO
|
|
+
|
|
+ WRITE(30, *) ntype(1:j)
|
|
+ WRITE(30, "('Selective dynamics')")
|
|
+ WRITE(30, "('Cartesian')")
|
|
+
|
|
+ DO i=1,N
|
|
+ WRITE(30,"(F,F,F' T T T')") R(3*(i-1)+1), R(3*(i-1)+2), R(3*(i-1)+3)
|
|
+ ENDDO
|
|
+
|
|
+ CLOSE(30)
|
|
+
|
|
+ END SUBROUTINE
|
|
+#endif
|
|
+
|
|
!----------------------------------------------------------------------
|
|
!
|
|
! M_init: initialise the basic communications
|
|
@@ -123,12 +281,19 @@
|
|
!
|
|
!----------------------------------------------------------------------
|
|
!
|
|
- SUBROUTINE M_init( COMM)
|
|
+#ifdef MPMD
|
|
+ SUBROUTINE M_init(COMM, mpmd_group_number)
|
|
+#else
|
|
+ SUBROUTINE M_init(COMM)
|
|
+#endif
|
|
IMPLICIT NONE
|
|
INCLUDE "pm.inc"
|
|
|
|
TYPE(communic) COMM
|
|
INTEGER i, ierror
|
|
+#ifdef MPMD
|
|
+ INTEGER mpmd_mpi_comm, mpmd_group_number, mpmd_client_rank
|
|
+#endif
|
|
|
|
call MPI_init( ierror )
|
|
IF ( ierror /= MPI_success ) THEN
|
|
@@ -140,7 +305,17 @@
|
|
! set only NCPU, NODE_ME and IONODE
|
|
! no internal setup done at this point
|
|
!
|
|
+#if defined(MPMD)
|
|
+ if (mpmd_group_number .EQ. -1) THEN
|
|
+ COMM%MPI_COMM = MPI_comm_world
|
|
+ ELSE
|
|
+ CALL mpmd_mpi_init(mpmd_mpi_comm, mpmd_group_number, mpmd_client_rank)
|
|
+ COMM%MPI_COMM = mpmd_mpi_comm
|
|
+ COMM%mpmd_client_rank = mpmd_client_rank
|
|
+ ENDIF
|
|
+#else
|
|
COMM%MPI_COMM= MPI_comm_world
|
|
+#endif
|
|
|
|
call MPI_comm_rank( COMM%MPI_COMM, COMM%NODE_ME, ierror )
|
|
IF ( ierror /= MPI_success ) &
|
|
diff -ru vasp.5.4.4-orig/src/.objects vasp.5.4.4/src/.objects
|
|
--- vasp.5.4.4-orig/src/.objects 2022-06-16 08:12:16.079679000 +0200
|
|
+++ vasp.5.4.4/src/.objects 2022-06-16 08:31:37.082468000 +0200
|
|
@@ -69,6 +69,19 @@
|
|
tet.o \
|
|
tetweight.o \
|
|
hamil_rot.o \
|
|
+ dynmat.o \
|
|
+ neb.o \
|
|
+ dimer.o \
|
|
+ bbm.o \
|
|
+ lanczos.o \
|
|
+ instanton.o \
|
|
+ sd.o \
|
|
+ cg.o \
|
|
+ qm.o \
|
|
+ lbfgs.o \
|
|
+ bfgs.o \
|
|
+ fire.o \
|
|
+ opt.o \
|
|
chain.o \
|
|
dyna.o \
|
|
k-proj.o \
|