Changeset bd2278d for main_bgl_p.f


Ignore:
Timestamp:
09/05/08 11:49:42 (16 years ago)
Author:
baerbaer <baerbaer@…>
Branches:
master
Children:
fafe4d6
Parents:
2ebb8b6
Message:

Reformatting comments and continuation marks.

Fortran 90 and higher use ! to mark comments no matter where they are in the
code. The only valid continuation marker is &.
I also added the SMMP.kdevelop.filelist to the repository to make it easier
to use kdevelop.

git-svn-id: svn+ssh://svn.berlios.de/svnroot/repos/smmp/trunk@12 26dc1dd8-5c4e-0410-9ffe-d298b4865968

File:
1 edited

Legend:

Unmodified
Added
Removed
  • main_bgl_p.f

    r2ebb8b6 rbd2278d  
    1 c     **************************************************************
    2 c     
    3 c     This file contains the   main (PARALLEL TEMPERING  JOBS ONLY,
    4 C     FOR SINGULAR PROCESSOR JOBS USE main)
    5 C     
    6 C     This file contains also the subroutine: p_init_molecule
    7 c     
    8 c     Copyright 2003-2005  Frank Eisenmenger, U.H.E. Hansmann,
    9 c     Shura Hayryan, Chin-Ku
    10 c Copyright 2007       Frank Eisenmenger, U.H.E. Hansmann,
    11 c                      Jan H. Meinke, Sandipan Mohanty
    12 c     
    13 C     CALLS init_energy,p_init_molecule,partem_p
    14 C     
    15 c     **************************************************************
     1!     **************************************************************
     2!     
     3!     This file contains the   main (PARALLEL TEMPERING  JOBS ONLY,
     4!     FOR SINGULAR PROCESSOR JOBS USE main)
     5!     
     6!     This file contains also the subroutine: p_init_molecule
     7!     
     8!     Copyright 2003-2005  Frank Eisenmenger, U.H.E. Hansmann,
     9!     Shura Hayryan, Chin-Ku
     10! Copyright 2007       Frank Eisenmenger, U.H.E. Hansmann,
     11!                      Jan H. Meinke, Sandipan Mohanty
     12!     
     13!     CALLS init_energy,p_init_molecule,partem_p
     14!     
     15!     **************************************************************
    1616      program pmain
    1717
     
    2828      logical newsta
    2929
    30 cc    Number of replicas
     30!c    Number of replicas
    3131      integer num_replica
    32 cc    Number of processors per replica
     32!c    Number of processors per replica
    3333      integer num_ppr
    34 cc    Range of processor for crating communicators
     34!c    Range of processor for crating communicators
    3535      integer proc_range(3)
    36 cc    Array of MPI groups
     36!c    Array of MPI groups
    3737      integer group(MAX_REPLICA), group_partem
    38 cc    Array of MPI communicators
     38!c    Array of MPI communicators
    3939      integer comm(MAX_REPLICA), partem_comm
    40 cc    Array of nodes acting as masters for the energy calculation.
     40!c    Array of nodes acting as masters for the energy calculation.
    4141      integer ranks(MAX_REPLICA)
    42 cc    Configuration switch
     42!c    Configuration switch
    4343      integer switch
    4444      integer rep_id
    45 c     set number of replicas
     45!     set number of replicas
    4646      double precision eols(MAX_REPLICA)
    4747      integer ndims, nldims, log2ppr, color
     
    5353
    5454
    55 c     MPI stuff, and random number generator initialisation
     55!     MPI stuff, and random number generator initialisation
    5656
    5757      call mpi_init(ierr)
     
    8888      call sgrnd(seed)          ! Initialize the random number generator
    8989
    90 c     =================================================== Energy setup
     90!     =================================================== Energy setup
    9191      libdir='SMMP/'     
    92 c     Directory for SMMP libraries
    93 
    94 c     The switch in the following line is now not used.
     92!     Directory for SMMP libraries
     93
     94!     The switch in the following line is now not used.
    9595      flex=.false.              ! .true. for Flex  / .false. for ECEPP
    9696
    97 c     Choose energy type with the following switch instead ...
     97!     Choose energy type with the following switch instead ...
    9898      ientyp = 0
    99 c     0  => ECEPP2 or ECEPP3 depending on the value of sh2
    100 c     1  => FLEX
    101 c     2  => Lund force field
    102 c     3  => ECEPP with Abagyan corrections
    103 c     
     99!     0  => ECEPP2 or ECEPP3 depending on the value of sh2
     100!     1  => FLEX
     101!     2  => Lund force field
     102!     3  => ECEPP with Abagyan corrections
     103!     
    104104
    105105      sh2=.false.               ! .true. for ECEPP/2; .false. for ECEPP3
     
    114114      call init_energy(libdir)
    115115
    116 c     calculate CPU time using MPI_Wtime()
     116!     calculate CPU time using MPI_Wtime()
    117117      startwtime = MPI_Wtime()
    118118
    119119
    120 c     ================================================= Structure setup
     120!     ================================================= Structure setup
    121121      grpn = 'nh2'              ! N-terminal group
    122122      grpc = 'cooh'             ! C-terminal group
     
    153153      ntlml = 0
    154154
    155 c Decide if and when to use BGS, and initialize Lund data structures
     155! Decide if and when to use BGS, and initialize Lund data structures
    156156      bgsprob=0.6    ! Prob for BGS, given that it is possible
    157 c upchswitch= 0 => No BGS 1 => BGS with probability bgsprob
    158 c 2 => temperature dependent choice
     157! upchswitch= 0 => No BGS 1 => BGS with probability bgsprob
     158! 2 => temperature dependent choice
    159159      upchswitch=1
    160160      rndord=.true.
    161161      if (ientyp.eq.2) call init_lundff
    162 c     =================================================================
    163 c     Distribute nodes to parallel tempering tasks
    164 c     I assume that the number of nodes available is an integer
    165 c     multiple n of the number of replicas. Each replica then gets n
    166 c     processors to do its energy calculation.
     162!     =================================================================
     163!     Distribute nodes to parallel tempering tasks
     164!     I assume that the number of nodes available is an integer
     165!     multiple n of the number of replicas. Each replica then gets n
     166!     processors to do its energy calculation.
    167167      num_ppr = num_proc / num_replica
    168168
     
    206206!      call mpi_comm_group(mpi_comm_world,  group_world, error)
    207207
    208 c     The current version doesn't require a separate variable j. I
    209 c     could just use i * num_ppr but this way it's more flexible.
     208!     The current version doesn't require a separate variable j. I
     209!     could just use i * num_ppr but this way it's more flexible.
    210210!       j = 0
    211211!       do i = 1, num_replica
     
    277277      nml = 1
    278278
    279 c     RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
     279!     RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
    280280      call rmsinit(nml,ref_pdb)
    281 c     RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
     281!     RRRRRRRRRRMMMMMMMMMMMMSSSSSSSSSSDDDDDDDDDDDDD
    282282
    283283!     READ  REFERENCE CONTACT MAP
     
    294294      end do
    295295
    296 c     ========================================  start of parallel tempering run
     296!     ========================================  start of parallel tempering run
    297297      write (*,*) "There are ", no,
    298298     &            " processors available for ",rep_id
     
    303303      call partem_p(num_replica, nequi, nswp, nmes, nsave, newsta,
    304304     &              switch, rep_id, partem_comm)
    305 c     ========================================  end of parallel tempering run
    306 c     calculate CPU time using MPI_Wtime()
     305!     ========================================  end of parallel tempering run
     306!     calculate CPU time using MPI_Wtime()
    307307      endwtime = MPI_Wtime()
    308308
     
    319319      enddo
    320320
    321 c     ========================================  End of main
     321!     ========================================  End of main
    322322      CALL mpi_finalize(ierr)
    323323
Note: See TracChangeset for help on using the changeset viewer.