source: main_p.f

Last change on this file was 38d77eb, checked in by baerbaer <baerbaer@…>, 14 years ago

Redirected standard out to logString.

SMMP produced a lot of log messages. This became an issue when run in massively
parallel environments. I replaced all writes to standard out to a write to logString.
The next step is to pass this string to a function that writes the messages to a log
file according to their importance and the chosen log level.

git-svn-id: svn+ssh://svn.berlios.de/svnroot/repos/smmp/trunk@34 26dc1dd8-5c4e-0410-9ffe-d298b4865968

  • Property mode set to 100644
File size: 8.3 KB
RevLine 
[bd2278d]1! **************************************************************
[32289cd]2!
[bd2278d]3! This file contains the main (PARALLEL TEMPERING JOBS ONLY,
4! FOR SINGULAR PROCESSOR JOBS USE main)
[32289cd]5!
[bd2278d]6! This file contains also the subroutine: p_init_molecule
[32289cd]7!
[bd2278d]8! Copyright 2003-2005 Frank Eisenmenger, U.H.E. Hansmann,
[32289cd]9! Shura Hayryan, Chin-Ku
[bd2278d]10! Copyright 2007 Frank Eisenmenger, U.H.E. Hansmann,
11! Jan H. Meinke, Sandipan Mohanty
[32289cd]12!
[bd2278d]13! CALLS init_energy,p_init_molecule,partem_p
[32289cd]14!
[bd2278d]15! **************************************************************
[e40e335]16 program pmain
17
18 include 'INCL.H'
19 include 'INCP.H'
20 include 'incl_lund.h'
21 include 'mpif.h'
22
[32289cd]23 double precision startwtime, group_world, error, endwtime
24
25 integer ierr, num_proc, iabin, nequi, nswp, nmes, nsave, ifrm, j
26 integer i, nml, nresi, my_pt_rank, ncalls, nacalls
27
[e40e335]28 character*80 libdir
29 character*80 in_fil,ou_fil,filebase, varfile
30 character*80 fileNameMP
31
32 character grpn*4,grpc*4
33 logical newsta
34
[32289cd]35!c Number of replicas
[e40e335]36 integer num_replica
[bd2278d]37!c Number of processors per replica
[e40e335]38 integer num_ppr
[bd2278d]39!c Range of processor for crating communicators
[e40e335]40 integer proc_range(3)
[bd2278d]41!c Array of MPI groups
[e40e335]42 integer group(MAX_REPLICA), group_partem
[bd2278d]43!c Array of MPI communicators
[e40e335]44 integer comm(MAX_REPLICA), partem_comm
[bd2278d]45!c Array of nodes acting as masters for the energy calculation.
[e40e335]46 integer ranks(MAX_REPLICA)
[bd2278d]47!c Configuration switch
[e40e335]48 integer switch
49 integer rep_id
[bd2278d]50! set number of replicas
[e40e335]51 double precision eols(MAX_REPLICA)
52
53
54 common/updstats/ncalls(5),nacalls(5)
55
56
[bd2278d]57! MPI stuff, and random number generator initialisation
[e40e335]58
59 call mpi_init(ierr)
60 call mpi_comm_rank(mpi_comm_world,myrank,ierr)
61 call mpi_comm_size(mpi_comm_world,num_proc,ierr)
62
63! call VTSetup()
64 enysolct = 0
65 seed = 8368
66 call sgrnd(seed) ! Initialize the random number generator
67
[bd2278d]68! =================================================== Energy setup
[32289cd]69 libdir='SMMP/'
[bd2278d]70! Directory for SMMP libraries
[e40e335]71
[bd2278d]72! The switch in the following line is now not used.
[e40e335]73 flex=.false. ! .true. for Flex / .false. for ECEPP
74
[bd2278d]75! Choose energy type with the following switch instead ...
[e40e335]76 ientyp = 0
[bd2278d]77! 0 => ECEPP2 or ECEPP3 depending on the value of sh2
[32289cd]78! 1 => FLEX
[bd2278d]79! 2 => Lund force field
80! 3 => ECEPP with Abagyan corrections
[32289cd]81!
[e40e335]82
83 sh2=.false. ! .true. for ECEPP/2; .false. for ECEPP3
84 epsd=.false. ! .true. for distance-dependent epsilon
85
86 itysol= 1 ! 0: vacuum
87 ! >0: numerical solvent energy
88 ! <0: analytical solvent energy & gradients
89 isolscl=.false.
90 tesgrd=.false. ! .true. to check analytical gradients
91
92 call init_energy(libdir)
93
[bd2278d]94! calculate CPU time using MPI_Wtime()
[e40e335]95 startwtime = MPI_Wtime()
96
97
[bd2278d]98! ================================================= Structure setup
[e40e335]99 grpn = 'nh2' ! N-terminal group
100 grpc = 'cooh' ! C-terminal group
101
102 iabin = 1 ! =0: read from PDB-file
103 ! =1: ab Initio from sequence (& variables)
104
105 in_fil='EXAMPLES/1bdd.seq' ! Sequence file
106 varfile = ' '
107
108 newsta=.true.
109 boxsize = 1000.0d0 ! Only relevant for multi-molecule systems
110 num_replica = 1 ! Number of independent replicas. The file
111 ! temperatures must have at least as many
112 ! entries
[32289cd]113 nequi=10 ! Number of MC sweeps before measurements
114 ! and replica exchanges are started
[e40e335]115 nswp=500000 ! Number of sweeps
116 nmes=10 ! Interval for measurements and replica exchange
117 nsave=1000 ! Not used at the moment
[32289cd]118
119 switch = -1 ! How should the configuration be
[e40e335]120 ! initialized?
[32289cd]121 ! -1 stretched chain
[e40e335]122 ! 0 don't do anything
123 ! 1 initialize each angle to a random value
[32289cd]124
[e40e335]125 ifrm=0
126 ntlml = 0
127
[32289cd]128! Decide if and when to use BGS, and initialize Lund data structures
[e40e335]129 bgsprob=0.6 ! Prob for BGS, given that it is possible
[32289cd]130! upchswitch= 0 => No BGS 1 => BGS with probability bgsprob
131! 2 => temperature dependent choice
[e40e335]132 upchswitch=1
133 rndord=.true.
[bd2278d]134! =================================================================
135! Distribute nodes to parallel tempering tasks
[32289cd]136! I assume that the number of nodes available is an integer
[bd2278d]137! multiple n of the number of replicas. Each replica then gets n
138! processors to do its energy calculation.
[e40e335]139 num_ppr = num_proc / num_replica
140
141 call mpi_comm_group(mpi_comm_world, group_world, error)
142
[bd2278d]143! The current version doesn't require a separate variable j. I
144! could just use i * num_ppr but this way it's more flexible.
[e40e335]145 j = 0
[32289cd]146 do i = 1, num_replica
147 ranks(i) = j
[e40e335]148 proc_range(1) = j
149 proc_range(2) = j + num_ppr - 1
150 proc_range(3) = 1
151 call mpi_group_range_incl(group_world, 1, proc_range, group(i)
152 & ,error)
[38d77eb]153 write (logString, *) "Assigning rank ", j, proc_range,
[e40e335]154 & "to group", group(i)
155 call flush(6)
156 j = j + num_ppr
157 enddo
158
159 do i = 1, num_replica
160 call mpi_comm_create(mpi_comm_world, group(i), comm(i),error)
161 if (comm(i).ne.MPI_COMM_NULL) then
162 my_mpi_comm = comm(i)
163 rep_id = i - 1
[38d77eb]164 write (logString, *) rep_id, "has comm", my_mpi_comm
[e40e335]165 call flush(6)
166 endif
167 enddo
168
[bd2278d]169! Setup the communicator used for parallel tempering
[38d77eb]170 write (logString, *) "PTGroup=", ranks(:num_replica)
[e40e335]171 call flush(6)
172 call mpi_group_incl(group_world, num_replica, ranks, group_partem,
173 & error)
[32289cd]174 call mpi_comm_create(mpi_comm_world, group_partem, partem_comm,
[e40e335]175 & error)
176
177 if (partem_comm.ne.MPI_COMM_NULL) then
[38d77eb]178 write (logString, *) partem_comm,myrank, "is master for ",
179 & rep_id, "."
[e40e335]180 endif
181
182 call mpi_comm_rank(my_mpi_comm,myrank,ierr)
183 call mpi_comm_size(my_mpi_comm,no,ierr)
184
[38d77eb]185 write (logString, *) "My new rank is ", myrank, "of", no
[e40e335]186 call flush(6)
187! = Done setting up communicators =====================================
188
189 if (newsta) then
190 varfile = 'EXAMPLES/1bdd.var'
191 call init_molecule(iabin, grpn, grpc,in_fil,varfile)
[32289cd]192 else
[e40e335]193 filebase = "conf_0000.var"
194 call init_molecule(iabin, grpn, grpc,in_fil,
195 & fileNameMP(filebase, 6, 9, rep_id + 1))
196 endif
[4e219a3]197 call init_lund
198! Must call init_lundff *after* molecule has been loaded.
199 if (ientyp.eq.2) call init_lundff
[e40e335]200 if (ientyp.eq.3) call init_abgn
201
202 nml = 1
203
[4e219a3]204
[bd2278d]205! RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
[e40e335]206 call rmsinit(nml,'EXAMPLES/1bdd.pdb')
[bd2278d]207! RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
[e40e335]208
209! READ REFERENCE CONTACT MAP
210 open(12, file = 'EXAMPLES/1bdd.ref', status ="old")
211 nresi=irsml2(nml)-irsml1(nml)+1
212 do i=1,nresi
213 read(12,*) (iref(i,j), j=1,nresi)
214 end do
215 nci = 0
216 do i=1,nresi
217 do j=nresi,i+3,-1
218 if(iref(i,j).eq.1) nci = nci + 1
219 end do
220 end do
221
[bd2278d]222! ======================================== start of parallel tempering run
[38d77eb]223 write (logString, *) "There are ", no,
[e40e335]224 & " processors available for ",rep_id
225 call flush(6)
226 nml = 1
227 call distributeWorkLoad(no, nml)
[32289cd]228
[e40e335]229 call partem_p(num_replica, nequi, nswp, nmes, nsave, newsta,
230 & switch, rep_id, partem_comm)
[bd2278d]231! ======================================== end of parallel tempering run
232! calculate CPU time using MPI_Wtime()
[e40e335]233 endwtime = MPI_Wtime()
234
235
236 if(my_pt_rank.eq.0) then
[38d77eb]237 write (logString, *) "time for simulation using ", num_proc,
[e40e335]238 & " processors =", endwtime - startwtime, " seconds"
239 call flush(6)
240 endif
241
242 print *,'update type, num calls, accepted calls '
243 do i=1,5
244 print *,i,ncalls(i),nacalls(i)
245 enddo
246
[bd2278d]247! ======================================== End of main
[e40e335]248 CALL mpi_finalize(ierr)
249
250 end
251
Note: See TracBrowser for help on using the repository browser.