program xbeach use params use spaceparams use xmpi_module use initialize use boundaryconditions use flow_timestep_module use morphevolution use outputmod use readtide_module use wave_stationary_module use wave_timestep_module use timestep_module use readkey_module IMPLICIT NONE type(parameters) :: par type(spacepars), pointer :: s type(spacepars), target :: sglobal type(spacepars), target :: slocal character(len=80) :: dummystring integer :: it real*8 :: tbegin,tend #ifdef USEMPI real*8 :: t0,t01,t1 #endif #ifdef USEMPI s=>slocal call xmpi_initialize t0 = MPI_Wtime() #endif call cpu_time(tbegin) if (xmaster) then write(*,*) 'Welcome to Xbeach' write(*,*) 'General Input Module' #ifdef USEMPI if(xmaster) then write(*,*) 'MPI version, running on ',xmpi_size,'processes' endif #endif endif ! General input per module ! ! the basic input routines, used by the following three subroutines ! are MPI-aware, no need to do something special here ! par%t=0.d0 it=0 call wave_input(par) call flow_input(par) call sed_input(par) #ifdef USEMPI call distribute_par(par) #endif if (xmaster) then write(*,*) 'Building Grid and Bathymetry and....' write(*,*) 'Distributing wave energy across the directional space ....' endif ! Grid and bathymetry ! ! grid_bathy will allocate x,y,xz,yz,xu,yv,xw,yw,zb,zb0 only ! on master process ! call space_alloc_scalars(sglobal) s => sglobal call grid_bathy(s,par) ! s%nx and s%ny are available now #ifdef USEMPI call xmpi_determine_processor_grid(s%nx,s%ny) if(xmaster) then write(*,*) 'processor grid: ',xmpi_m,' X ',xmpi_n endif #endif ! Jump into subroutine readtide call readtide (s,par) !Ap 15/10 ! runs oonly on master wwvv if (xmaster) then write(*,*) 'Initializing .....' endif ! Initialisations call wave_init (s,par) ! wave_init only works on master process #ifdef USEMPI ! some of par has been changed, so: call distribute_par(par) #endif call flow_init (s,par) ! works only on master process call sed_init (s,par) ! works only on master process call init_output(sglobal,slocal,par,it) #ifdef USEMPI ! some par has changed, so: call distribute_par(par) #endif #ifdef USEMPI s => slocal ! ! determine how to divide the submatrices on the processor grid ! distribute all values in sglobal to slocal ! nx and ny will be adjusted in slocal ! arrays is,js,lm,ln (describing the distribution) will ! be filled in slocal ! Note: slocal is available on all nodes, including master ! call space_distribute_space(sglobal,slocal,par) !call space_consistency(slocal,'ALL') #endif call printit(sglobal,slocal,par,it,'after space_distribute_space') if (xmaster) then call readkey('params.txt','checkparams',dummystring) write(*,*) 'Stepping into the time loop ....' endif !#ifdef USEMPI !t01 = MPI_Wtime() !#endif do while (par%t