source: main_p.f@ 32289cd

Last change on this file since 32289cd was 32289cd, checked in by baerbaer <baerbaer@…>, 14 years ago

Explicitly declare variables.

git-svn-id: svn+ssh://svn.berlios.de/svnroot/repos/smmp/trunk@33 26dc1dd8-5c4e-0410-9ffe-d298b4865968

  • Property mode set to 100644
File size: 8.3 KB
Line 
1! **************************************************************
2!
3! This file contains the main (PARALLEL TEMPERING JOBS ONLY,
4! FOR SINGULAR PROCESSOR JOBS USE main)
5!
6! This file contains also the subroutine: p_init_molecule
7!
8! Copyright 2003-2005 Frank Eisenmenger, U.H.E. Hansmann,
9! Shura Hayryan, Chin-Ku
10! Copyright 2007 Frank Eisenmenger, U.H.E. Hansmann,
11! Jan H. Meinke, Sandipan Mohanty
12!
13! CALLS init_energy,p_init_molecule,partem_p
14!
15! **************************************************************
16 program pmain
17
18 include 'INCL.H'
19 include 'INCP.H'
20 include 'incl_lund.h'
21 include 'mpif.h'
22
23 double precision startwtime, group_world, error, endwtime
24
25 integer ierr, num_proc, iabin, nequi, nswp, nmes, nsave, ifrm, j
26 integer i, nml, nresi, my_pt_rank, ncalls, nacalls
27
28 character*80 libdir
29 character*80 in_fil,ou_fil,filebase, varfile
30 character*80 fileNameMP
31
32 character grpn*4,grpc*4
33 logical newsta
34
35!c Number of replicas
36 integer num_replica
37!c Number of processors per replica
38 integer num_ppr
39!c Range of processor for crating communicators
40 integer proc_range(3)
41!c Array of MPI groups
42 integer group(MAX_REPLICA), group_partem
43!c Array of MPI communicators
44 integer comm(MAX_REPLICA), partem_comm
45!c Array of nodes acting as masters for the energy calculation.
46 integer ranks(MAX_REPLICA)
47!c Configuration switch
48 integer switch
49 integer rep_id
50! set number of replicas
51 double precision eols(MAX_REPLICA)
52
53
54 common/updstats/ncalls(5),nacalls(5)
55
56
57! MPI stuff, and random number generator initialisation
58
59 call mpi_init(ierr)
60 call mpi_comm_rank(mpi_comm_world,myrank,ierr)
61 call mpi_comm_size(mpi_comm_world,num_proc,ierr)
62
63! call VTSetup()
64 enysolct = 0
65 seed = 8368
66 call sgrnd(seed) ! Initialize the random number generator
67
68! =================================================== Energy setup
69 libdir='SMMP/'
70! Directory for SMMP libraries
71
72! The switch in the following line is now not used.
73 flex=.false. ! .true. for Flex / .false. for ECEPP
74
75! Choose energy type with the following switch instead ...
76 ientyp = 0
77! 0 => ECEPP2 or ECEPP3 depending on the value of sh2
78! 1 => FLEX
79! 2 => Lund force field
80! 3 => ECEPP with Abagyan corrections
81!
82
83 sh2=.false. ! .true. for ECEPP/2; .false. for ECEPP3
84 epsd=.false. ! .true. for distance-dependent epsilon
85
86 itysol= 1 ! 0: vacuum
87 ! >0: numerical solvent energy
88 ! <0: analytical solvent energy & gradients
89 isolscl=.false.
90 tesgrd=.false. ! .true. to check analytical gradients
91
92 call init_energy(libdir)
93
94! calculate CPU time using MPI_Wtime()
95 startwtime = MPI_Wtime()
96
97
98! ================================================= Structure setup
99 grpn = 'nh2' ! N-terminal group
100 grpc = 'cooh' ! C-terminal group
101
102 iabin = 1 ! =0: read from PDB-file
103 ! =1: ab Initio from sequence (& variables)
104
105 in_fil='EXAMPLES/1bdd.seq' ! Sequence file
106 varfile = ' '
107
108 newsta=.true.
109 boxsize = 1000.0d0 ! Only relevant for multi-molecule systems
110 num_replica = 1 ! Number of independent replicas. The file
111 ! temperatures must have at least as many
112 ! entries
113 nequi=10 ! Number of MC sweeps before measurements
114 ! and replica exchanges are started
115 nswp=500000 ! Number of sweeps
116 nmes=10 ! Interval for measurements and replica exchange
117 nsave=1000 ! Not used at the moment
118
119 switch = -1 ! How should the configuration be
120 ! initialized?
121 ! -1 stretched chain
122 ! 0 don't do anything
123 ! 1 initialize each angle to a random value
124
125 ifrm=0
126 ntlml = 0
127
128! Decide if and when to use BGS, and initialize Lund data structures
129 bgsprob=0.6 ! Prob for BGS, given that it is possible
130! upchswitch= 0 => No BGS 1 => BGS with probability bgsprob
131! 2 => temperature dependent choice
132 upchswitch=1
133 rndord=.true.
134! =================================================================
135! Distribute nodes to parallel tempering tasks
136! I assume that the number of nodes available is an integer
137! multiple n of the number of replicas. Each replica then gets n
138! processors to do its energy calculation.
139 num_ppr = num_proc / num_replica
140
141 call mpi_comm_group(mpi_comm_world, group_world, error)
142
143! The current version doesn't require a separate variable j. I
144! could just use i * num_ppr but this way it's more flexible.
145 j = 0
146 do i = 1, num_replica
147 ranks(i) = j
148 proc_range(1) = j
149 proc_range(2) = j + num_ppr - 1
150 proc_range(3) = 1
151 call mpi_group_range_incl(group_world, 1, proc_range, group(i)
152 & ,error)
153 write (*,*) "Assigning rank ", j, proc_range,
154 & "to group", group(i)
155 call flush(6)
156 j = j + num_ppr
157 enddo
158
159 do i = 1, num_replica
160 call mpi_comm_create(mpi_comm_world, group(i), comm(i),error)
161 if (comm(i).ne.MPI_COMM_NULL) then
162 my_mpi_comm = comm(i)
163 rep_id = i - 1
164 write (*,*) rep_id, "has comm", my_mpi_comm
165 call flush(6)
166 endif
167 enddo
168
169! Setup the communicator used for parallel tempering
170 write (*,*) "PTGroup=", ranks(:num_replica)
171 call flush(6)
172 call mpi_group_incl(group_world, num_replica, ranks, group_partem,
173 & error)
174 call mpi_comm_create(mpi_comm_world, group_partem, partem_comm,
175 & error)
176
177 if (partem_comm.ne.MPI_COMM_NULL) then
178 write (*,*) partem_comm,myrank, "is master for ", rep_id, "."
179 endif
180
181 call mpi_comm_rank(my_mpi_comm,myrank,ierr)
182 call mpi_comm_size(my_mpi_comm,no,ierr)
183
184 write (*,*) "My new rank is ", myrank, "of", no
185 call flush(6)
186! = Done setting up communicators =====================================
187
188 if (newsta) then
189 varfile = 'EXAMPLES/1bdd.var'
190 call init_molecule(iabin, grpn, grpc,in_fil,varfile)
191 else
192 filebase = "conf_0000.var"
193 call init_molecule(iabin, grpn, grpc,in_fil,
194 & fileNameMP(filebase, 6, 9, rep_id + 1))
195 endif
196 call init_lund
197! Must call init_lundff *after* molecule has been loaded.
198 if (ientyp.eq.2) call init_lundff
199 if (ientyp.eq.3) call init_abgn
200
201 nml = 1
202
203
204! RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
205 call rmsinit(nml,'EXAMPLES/1bdd.pdb')
206! RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
207
208! READ REFERENCE CONTACT MAP
209 open(12, file = 'EXAMPLES/1bdd.ref', status ="old")
210 nresi=irsml2(nml)-irsml1(nml)+1
211 do i=1,nresi
212 read(12,*) (iref(i,j), j=1,nresi)
213 end do
214 nci = 0
215 do i=1,nresi
216 do j=nresi,i+3,-1
217 if(iref(i,j).eq.1) nci = nci + 1
218 end do
219 end do
220
221! ======================================== start of parallel tempering run
222 write (*,*) "There are ", no,
223 & " processors available for ",rep_id
224 call flush(6)
225 nml = 1
226 call distributeWorkLoad(no, nml)
227
228 call partem_p(num_replica, nequi, nswp, nmes, nsave, newsta,
229 & switch, rep_id, partem_comm)
230! ======================================== end of parallel tempering run
231! calculate CPU time using MPI_Wtime()
232 endwtime = MPI_Wtime()
233
234
235 if(my_pt_rank.eq.0) then
236 write(*,*) "time for simulation using ", num_proc,
237 & " processors =", endwtime - startwtime, " seconds"
238 call flush(6)
239 endif
240
241 print *,'update type, num calls, accepted calls '
242 do i=1,5
243 print *,i,ncalls(i),nacalls(i)
244 enddo
245
246! ======================================== End of main
247 CALL mpi_finalize(ierr)
248
249 end
250
Note: See TracBrowser for help on using the repository browser.