The UFZ services GitLab and Mattermost will be unavailable on Monday, October 25 from 06:00 AM to 08:00 AM due to maintenance work.

mhm_driver.f90 23 KB
Newer Older
Robert Schweppe's avatar
Robert Schweppe committed
1
!>       \file mhm_driver.f90
2

Robert Schweppe's avatar
Robert Schweppe committed
3
!>       \brief Distributed precipitation-runoff model mHM
4

Robert Schweppe's avatar
Robert Schweppe committed
5
!>       \details This is the main driver of mHM, which calls
6
!>       one instance of mHM for a multiple domains and a given period.
Robert Schweppe's avatar
Robert Schweppe committed
7
8
!>       \image html  mhm5-logo.png "Typical mHM cell"
!>       \image latex mhm5-logo.pdf "Typical mHM cell" width=10cm
9

10
!>       \authors Luis Samaniego & Rohini Kumar (UFZ)
11

Robert Schweppe's avatar
Robert Schweppe committed
12
!>       \date Jun 2018
13

14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
!>       \version \htmlinclude version.txt \latexinclude version.txt

!>       \copyright (c) \f$2005 - \the\year{}\f$, Helmholtz-Zentrum fuer Umweltforschung GmbH - UFZ.
!!       All rights reserved.
!!
!!       This code is a property of:
!!
!!       ----------------------------------------------------------
!!
!!       Helmholtz-Zentrum fuer Umweltforschung GmbH - UFZ
!!       Registered Office: Leipzig
!!       Registration Office: Amtsgericht Leipzig
!!       Trade Register: Nr. B 4703
!!       Chairman of the Supervisory Board: MinDirig Wilfried Kraus
!!       Scientific Director: Prof. Dr. Georg Teutsch
!!       Administrative Director: Dr. Heike Grassmann
!!
!!       ----------------------------------------------------------
!!
!!       NEITHER UFZ NOR THE DEVELOPERS MAKES ANY WARRANTY,
!!       EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE
!!       OF THIS SOFTWARE. If software is modified to produce
!!       derivative works, such modified software should be
!!       clearly marked, so as not to confuse it with the version
!!       available from UFZ.  This code can be used for research
!!       purposes ONLY provided that the following sources are
!!       acknowledged:
!!
!!       Samaniego L., Kumar R., Attinger S. (2010): Multiscale
!!       parameter regionalization of a grid-based hydrologic
!!       model at the mesoscale.  Water Resour. Res., 46,
!!       W05523, doi:10.1029/2008WR007327.
!!
!!       Kumar, R., L. Samaniego, and S. Attinger (2013), Implications
!!       of distributed hydrologic model parameterization on water
!!       fluxes at multiple scales and locations, Water Resour. Res.,
!!       49, doi:10.1029/2012WR012195.
!!
!!       For commercial applications you have to consult the
!!       authorities of the UFZ.
54

Robert Schweppe's avatar
Robert Schweppe committed
55
56
57
! Modifications:
! Stephan Thober                Nov 2013 - added read in of latitude longitude fields
! Matthias Zink                 Mar 2013 - edited screen output for gauges added inflow gauges
Robert Schweppe's avatar
-trunk:  
Robert Schweppe committed
58
59
60
61
! Matthias Cuntz & Juliane Mai  Mar 2014 - Likelihood Kavetski uses 2 more parameters for the
!                                          error model global_parameters -> local_parameters
! Rohini Kumar                  Apr 2014 - implementation of the mHM run on a single cell configuration
!                                          also for the routing mode.
Robert Schweppe's avatar
Robert Schweppe committed
62
63
64
65
66
67
68
69
!                                        - run mHM at the input data level i.e. L0 grid
! Rohini Kumar                  May 2014 - model run on a regular lat-lon grid or on a regular X-Y coordinate system
! Stephan Thober                May 2014 - moved read meteo forcings to mo_mhm_eval
! Matthias Cuntz & Juliane Mai  Nov 2014 - LAI input from daily, monthly or yearly files
! Matthias Zink                 Mar 2015 - added optional soil mositure read in for calibration
! Luis Samaniego                Jul 2015 - added temporal directories for optimization
! Stephan Thober                Aug 2015 - removed routing related variables
! Stephan Thober                Oct 2015 - reorganized optimization (now compatible with mRM)
70
! Oldrich Rakovec, Rohini Kumar Oct 2015 - added reading of domain averaged TWS and objective function 15
Robert Schweppe's avatar
-trunk:  
Robert Schweppe committed
71
72
73
!                                          for simultaneous calibration based on runoff and TWS
! Rohini Kumar                  Mar 2016 - options to handle different soil databases modified MPR to included
!                                          soil horizon specific properties/parameters
Robert Schweppe's avatar
Robert Schweppe committed
74
! Stephan Thober                Nov 2016 - implemented adaptive timestep for routing
Robert Schweppe's avatar
-trunk:  
Robert Schweppe committed
75
76
! Rohini Kumar                  Dec 2016 - options to read (monthly mean) LAI fields
! Robert Schweppe               Jun 2018 - refactoring and reformatting
Maren Kaluza's avatar
Maren Kaluza committed
77
! Maren Kaluza                  Oct 2019 - TWS to data structure
78
! M.C. Demirel, Simon Stisen    Jun 2020 - New Soil Moisture Process: Feddes and FC dependency on root fraction coefficient processCase(3) = 4
79
PROGRAM mhm_driver
80

81
82
83
84
85
86
87
88
89
90
91
92
93
  USE mo_file, ONLY : &
          version, version_date, file_main, &      ! main info
          file_namelist_mhm, unamelist_mhm, &      ! filename of namelist: main setup
          file_namelist_mhm_param, unamelist_mhm_param, &      ! filename of namelist: mhm model parameter
          file_defOutput                                               ! filename of namelist: output setup
  USE mo_finish, ONLY : finish                         ! Finish with style
  USE mo_global_variables, ONLY : &
          dirPrecipitation, &      ! directories
          dirTemperature, &      ! directories
          dirReferenceET, &      ! PET input path  if process 5 is 'PET is input' (case 0)
          dirMinTemperature, dirMaxTemperature, &      ! PET input paths if process 5 is Hargreaves-Samani  (case 1)
          dirNetRadiation, &      ! PET input paths if process 5 is Priestley-Taylor (case 2)
          dirabsVapPressure, dirwindspeed, &      ! PET input paths if process 5 is Penman-Monteith  (case 3)
Maren Kaluza's avatar
Maren Kaluza committed
94
          timestep_model_inputs, & !frequency of input read
95
          L1_twsaObs, &
96
97
98
          L1_etObs, &
          L1_neutronsObs, &
          L1_smObs
99
100
  USE mo_optimization_types, ONLY : &
          optidata ! type for opti data
101
102
103
104
105
106
107
  USE mo_common_mHM_mRM_variables, ONLY : &
          nTstepDay, &      ! number of timesteps per day (former: NAGG)
          simPer, &      ! simulation period
          optimize, opti_function, &                                   ! optimization on/off and optimization method
          mrm_coupling_mode
  USE mo_common_variables, ONLY : &
          write_restart, &      ! restart writing flags
108
          mhmFileRestartOut, &
109
110
111
          dirConfigOut, &
          dirMorpho, dirLCover, &                                         ! directories
          dirOut, &      ! directories
112
          domainMeta, &
113
#ifdef MPI
114
          comm, &
115
#endif
116
          processMatrix, &      ! domain information,  processMatrix
117
118
119
120
121
          global_parameters, global_parameters_name      ! mhm parameters (gamma) and their clear names
  USE mo_kind, ONLY : i4, dp                         ! number precision
  USE mo_message, ONLY : message, message_text          ! For print out
  USE mo_meteo_forcings, ONLY : prepare_meteo_forcings_data
  USE mo_mhm_eval, ONLY : mhm_eval
Maren Kaluza's avatar
Maren Kaluza committed
122
  USE mo_read_optional_data, ONLY : readOptidataObs ! read optional observed data
123
124
125
126
127
128
129
130
131
132
133
134
135
136
  USE mo_common_read_config, ONLY : common_read_config                    ! Read main configuration files
  USE mo_common_mHM_mRM_read_config, ONLY : &
          common_mHM_mRM_read_config, check_optimization_settings ! Read main configuration files
  USE mo_mpr_read_config, ONLY : mpr_read_config                    ! Read main configuration files
  USE mo_mhm_read_config, ONLY : mhm_read_config                    ! Read main configuration files
  USE mo_read_wrapper, ONLY : read_data                      ! Read all input data
  USE mo_restart, ONLY : write_restart_files
  USE mo_startup, ONLY : mhm_initialize
  USE mo_string_utils, ONLY : num2str, separator             ! String magic
  USE mo_timer, ONLY : &
          timers_init, timer_start, timer_stop, timer_get              ! Timing of processes
  USE mo_write_ascii, ONLY : &
          write_configfile, &      ! Writing Configuration file
          write_optifile, &      ! Writing optimized parameter set and objective
Robert Schweppe's avatar
-trunk:  
Robert Schweppe committed
137
          write_optinamelist     ! Writing optimized parameter set to a namelist
138
139
140
141
142
143
  USE mo_objective_function, ONLY : &
#ifdef MPI
          objective_subprocess, &
          objective_master, &
#endif
          objective                 ! objective functions and likelihoods
144
  USE mo_optimization, ONLY : optimization
145
146
147
148
149
150
  USE mo_mrm_objective_function_runoff, ONLY : &
#ifdef MPI
          single_objective_runoff_master, &
          single_objective_runoff_subprocess, &
#endif
          single_objective_runoff
151
  USE mo_mrm_init, ONLY : mrm_init, mrm_configuration
152
153
  USE mo_mrm_write, only : mrm_write

Robert Schweppe's avatar
-trunk:  
Robert Schweppe committed
154
  !$ USE omp_lib, ONLY : OMP_GET_NUM_THREADS           ! OpenMP routines
155
#ifdef MPI
Maren Kaluza's avatar
Maren Kaluza committed
156
  USE mpi_f08
157
#endif
158

Sebastian Müller's avatar
Sebastian Müller committed
159
160
161
#ifdef NAG
  USE f90_unix_dir, ONLY : CHDIR, GETCWD
#endif
Sebastian Müller's avatar
Sebastian Müller committed
162
  USE mo_check, ONLY: check_dir
Sebastian Müller's avatar
Sebastian Müller committed
163

164
165
166
  IMPLICIT NONE

  ! local
167
  integer(i4), dimension(8) :: datetime         ! Date and time
168
  !$ integer(i4)                        :: n_threads        ! OpenMP number of parallel threads
169
  integer(i4) :: domainID, iDomain               ! Counters
170
  integer(i4) :: itimer           ! Current timer number
171
172
  integer(i4) :: nTimeSteps
  real(dp) :: funcbest         ! best objective function achivied during optimization
Robert Schweppe's avatar
-trunk:  
Robert Schweppe committed
173
174
  logical, dimension(:), allocatable :: maskpara ! true  = parameter will be optimized, = parameter(i,4) = 1
  !                                              ! false = parameter will not be optimized = parameter(i,4) = 0
175
176
  procedure(mhm_eval), pointer :: eval
  procedure(objective), pointer :: obj_func
177

Sebastian Müller's avatar
Sebastian Müller committed
178
179
  character(len=255)  :: cur_work_dir, new_work_dir

180
181
  logical :: ReadLatLon

182
183
  logical :: compiled_with_openmp = .false.

184
#ifdef MPI
Maren Kaluza's avatar
Maren Kaluza committed
185
186
  integer             :: ierror
  integer(i4)         :: nproc
187
  integer(i4)         :: rank, oldrank
Sebastian Müller's avatar
Sebastian Müller committed
188
  logical :: compiled_with_mpi = .true.
Maren Kaluza's avatar
Maren Kaluza committed
189

Sebastian Müller's avatar
Sebastian Müller committed
190
  ! Initialize MPI
Maren Kaluza's avatar
Maren Kaluza committed
191
192
193
194
195
196
  call MPI_Init(ierror)
  call MPI_Comm_dup(MPI_COMM_WORLD, comm, ierror)
  ! find number of processes nproc
  call MPI_Comm_size(comm, nproc, ierror)
  ! find the number the process is referred to, called rank
  call MPI_Comm_rank(comm, rank, ierror)
197
198
  oldrank = rank
  write(*,*) 'MPI!, comm', rank, nproc
Sebastian Müller's avatar
Sebastian Müller committed
199
200
#else
  logical :: compiled_with_mpi = .false.
201
#endif
Sebastian Müller's avatar
Sebastian Müller committed
202
203
204
205
206
207

  ! check for working dir (added argument to the executable)
  CALL get_command_argument(1, new_work_dir)
  IF (len_trim(new_work_dir) .ne. 0) call chdir(trim(new_work_dir))
  CALL getcwd(cur_work_dir)

208
209
210
211
212
213
214
  ! --------------------------------------------------------------------------
  ! START
  ! --------------------------------------------------------------------------
  call message(separator)
  call message('              mHM-UFZ')
  call message()
  call message('    MULTISCALE HYDROLOGIC MODEL')
215
216
  call message('           Version: ', trim(version))
  call message('           Date:    ', trim(version_date))
217
  call message()
218
  call message('Originally by L. Samaniego & R. Kumar')
219

220
221
222
  call message(separator)

  call message()
223
224
225
226
227
228
  !$ compiled_with_openmp = .true.
  if (compiled_with_openmp) then
    call message('OpenMP used.')
  else
    call message('Openmp not used.')
  end if
Sebastian Müller's avatar
Sebastian Müller committed
229
230
231
232
233
  if (compiled_with_mpi) then
    call message('MPI used.')
  else
    call message('MPI not used.')
  end if
234

235
236
237
238
  call date_and_time(values = datetime)
  message_text = trim(num2str(datetime(3), '(I2.2)')) // "." // trim(num2str(datetime(2), '(I2.2)')) &
          // "." // trim(num2str(datetime(1), '(I4.4)')) // " " // trim(num2str(datetime(5), '(I2.2)')) &
          // ":" // trim(num2str(datetime(6), '(I2.2)')) // ":" // trim(num2str(datetime(7), '(I2.2)'))
239
  call message('Start at ', trim(message_text), '.')
Sebastian Müller's avatar
Sebastian Müller committed
240
  call message('Working directory: ', trim(cur_work_dir))
241
  call message('Using main file ', trim(file_main), ' and namelists: ')
242
243
244
  call message('     ', trim(file_namelist_mhm))
  call message('     ', trim(file_namelist_mhm_param))
  call message('     ', trim(file_defOutput))
245
246
247
248
249
250
251
252
  call message()

  !$OMP PARALLEL
  !$ n_threads = OMP_GET_NUM_THREADS()
  !$OMP END PARALLEL
  !$ call message('Run with OpenMP with ', trim(num2str(n_threads)), ' threads.')

  call message()
253
254
  call message('Read namelist file: ', trim(file_namelist_mhm))
  call message('Read namelist file: ', trim(file_namelist_mhm_param))
255
  call message('Read namelist file: ', trim(file_defOutput))
256
  call common_read_config(file_namelist_mhm, unamelist_mhm)
257
258
259
260
261
#ifdef MPI
  call MPI_Comm_size(domainMeta%comMaster, nproc, ierror)
  ! find the number the process is referred to, called rank
  call MPI_Comm_rank(domainMeta%comMaster, rank, ierror)
#endif
262
263
264
265
  call mpr_read_config(file_namelist_mhm, unamelist_mhm, file_namelist_mhm_param, unamelist_mhm_param)
  call common_mHM_mRM_read_config(file_namelist_mhm, unamelist_mhm)
  call mhm_read_config(file_namelist_mhm, unamelist_mhm)
  call check_optimization_settings()
266
267
268
  mrm_coupling_mode = 2_i4
  call mrm_configuration(file_namelist_mhm, unamelist_mhm, &
          file_namelist_mhm_param, unamelist_mhm_param, ReadLatLon)
269
  call message()
270
  call message('# of domains:         ', trim(num2str(domainMeta%overallNumberOfDomains)))
271
272
  call message()
  call message('  Input data directories:')
273
274
  do iDomain = 1, domainMeta%nDomains
    domainID = domainMeta%indices(iDomain)
275
    call message('  --------------')
276
    call message('      DOMAIN                  ', num2str(domainID, '(I3)'))
277
    call message('  --------------')
Sebastian Müller's avatar
Sebastian Müller committed
278
279
280
281
    call check_dir(dirMorpho(iDomain), "Morphological directory:", .false., 4, 30)
    call check_dir(dirLCover(iDomain), "Land cover directory:", .false., 4, 30)
    call check_dir(dirPrecipitation(iDomain), "Precipitation directory:", .false., 4, 30)
    call check_dir(dirTemperature(iDomain), "Temperature directory:", .false., 4, 30)
282
    select case (processMatrix(5, 1))
Sebastian Müller's avatar
Sebastian Müller committed
283
284
285
286
287
288
289
290
291
292
293
      case(-1 : 0) ! PET is input
        call check_dir(dirReferenceET(iDomain), "PET directory:", .false., 4, 30)
      case(1) ! Hargreaves-Samani
        call check_dir(dirMinTemperature(iDomain), "Min. temperature directory:", .false., 4, 30)
        call check_dir(dirMaxTemperature(iDomain), "Max. temperature directory:", .false., 4, 30)
      case(2) ! Priestely-Taylor
        call check_dir(dirNetRadiation(iDomain), "Net radiation directory:", .false., 4, 30)
      case(3) ! Penman-Monteith
        call check_dir(dirNetRadiation(iDomain), "Net radiation directory:", .false., 4, 30)
        call check_dir(dirabsVapPressure(iDomain), "Abs. vap. press. directory:", .false., 4, 30)
        call check_dir(dirwindspeed(iDomain), "Windspeed directory:", .false., 4, 30)
294
    end select
Sebastian Müller's avatar
Sebastian Müller committed
295
296
    call check_dir(dirOut(iDomain), "Output directory:", .true., 4, 30)
    call message()
297
298
299
300
301
302
  end do

  ! Start timings
  call timers_init

  ! --------------------------------------------------------------------------
303
  ! READ AND INITIALIZE
304
  ! --------------------------------------------------------------------------
305
  itimer = 1
306
#ifdef MPI
307
308
309
310
  ! ComLocal is a communicator, i.e. a group of processes assigned to the same
  ! domain, with a master and subprocesses. Only the master processes of these
  ! groups need to read the data. The master process with rank 0 only
  ! coordinates the other processes and does not need to read the data.
311
  if (rank > 0 .and. domainMeta%isMasterInComLocal) then
312
#endif
313
314
  call message()

315
  call message('  Read data ...')
316
317
  call timer_start(itimer)
  ! for DEM, slope, ... define nGvar local
318
  ! read_data has a domain loop inside
319
  call read_data(simPer)
320
  call timer_stop(itimer)
321
  call message('    in ', trim(num2str(timer_get(itimer), '(F9.3)')), ' seconds.')
322

323
  ! read data for every domain
324
  itimer = itimer + 1
325
  call message('  Initialize domains ...')
326
327
328
329
330
331
332
333
334
  call timer_start(itimer)
  call mhm_initialize()
  call timer_stop(itimer)
  call message('  in ', trim(num2str(timer_get(itimer), '(F9.3)')), ' seconds.')

  itimer = itimer + 1
  call message('  Read forcing and optional data ...')
  call timer_start(itimer)

335
336
  do iDomain = 1, domainMeta%nDomains
    domainID = domainMeta%indices(iDomain)
337
338
    ! read meteorology now, if optimization is switched on
    ! meteorological forcings (reading, upscaling or downscaling)
339
340
    if (timestep_model_inputs(iDomain) .eq. 0_i4) then
      call prepare_meteo_forcings_data(iDomain, domainID, 1)
341
342
343
344
345
    end if

    ! read optional optional data if necessary
    if (optimize) then
      select case (opti_function)
346
347
348
349
350
351
352
353
        case(10 : 13, 28)
          ! read optional spatio-temporal soil mositure data
          call readOptidataObs(iDomain, domainID, L1_smObs(iDomain))
        case(17)
          ! read optional spatio-temporal neutrons data
          call readOptidataObs(iDomain, domainID, L1_neutronsObs(iDomain))
        case(27, 29, 30)
          ! read optional spatio-temporal evapotranspiration data
Maren Kaluza's avatar
Maren Kaluza committed
354
          call readOptidataObs(iDomain, domainID, L1_etObs(iDomain))
355
356
        case(15)
          ! read optional spatio-temporal tws data
357
          call readOptidataObs(iDomain, domainID, L1_twsaObs(iDomain))
358
359
360
361
362
363
364
365
366
367
368
        case(33)
          ! read optional spatio-temporal evapotranspiration data
          if (domainMeta%optidata(iDomain) == 0 .or. domainMeta%optidata(iDomain) == 5 .or. &
            domainMeta%optidata(iDomain) == 6 ) then
            call readOptidataObs(iDomain, domainID, L1_etObs(iDomain))
          end if
          ! read optional spatio-temporal tws data
          if (domainMeta%optidata(iDomain) == 0 .or. domainMeta%optidata(iDomain) == 3 .or. &
            domainMeta%optidata(iDomain) == 6 ) then
            call readOptidataObs(iDomain, domainID, L1_twsaObs(iDomain))
          end if
369
370
      end select
    end if
371

372
  end do
373
374
  call timer_stop(itimer)
  call message('    in ', trim(num2str(timer_get(itimer), '(F9.3)')), ' seconds.')
375

376
377
378
  ! --------------------------------------------------------------------------
  ! READ and INITIALISE mRM ROUTING
  ! --------------------------------------------------------------------------
379
  if (processMatrix(8, 1) > 0) call mrm_init(file_namelist_mhm, unamelist_mhm, &
380
          file_namelist_mhm_param, unamelist_mhm_param, ReadLatLon=ReadLatLon)
381

382
  !this call may be moved to another position as it writes the master config out file for all domains
383
384
  call write_configfile()

385
386
387
#ifdef MPI
  end if
#endif
388
389
390
  ! --------------------------------------------------------------------------
  ! RUN OR OPTIMIZE
  ! --------------------------------------------------------------------------
391
  itimer = itimer + 1
392
  call message()
393
394
  if (optimize) then
    eval => mhm_eval
395

396
    select case(opti_function)
Stephan Thober's avatar
Stephan Thober committed
397
     case(1 : 9, 14, 31 : 32)
398
399
      ! call optimization against only runoff (no other variables)
      obj_func => single_objective_runoff
400
#ifdef MPI
401
      if (rank == 0 .and. domainMeta%isMasterInComLocal) then
402
403
        obj_func => single_objective_runoff_master
        call optimization(eval, obj_func, dirConfigOut, funcBest, maskpara)
404
      else if (domainMeta%isMasterInComLocal) then
405
406
407
408
        ! In case of a master process from ComLocal, i.e. a master of a group of
        ! processes that are assigned to a single domain, this process calls the
        ! objective subroutine directly. The master over all processes collects
        ! the data and runs the dds/sce/other opti method.
409
410
411
        call single_objective_runoff_subprocess(eval)
      end if
#else
412
      call optimization(eval, obj_func, dirConfigOut, funcBest, maskpara)
413
#endif
414
     case(10 : 13, 15, 17, 27, 28, 29, 30, 33)
415
416
      ! call optimization for other variables
      obj_func => objective
417
#ifdef MPI
418
      if (rank == 0 .and. domainMeta%isMasterInComLocal) then
419
420
        obj_func => objective_master
        call optimization(eval, obj_func, dirConfigOut, funcBest, maskpara)
421
      else if (domainMeta%isMasterInComLocal) then
422
423
424
425
        ! In case of a master process from ComLocal, i.e. a master of a group of
        ! processes that are assigned to a single domain, this process calls the
        ! objective subroutine directly. The master over all processes collects
        ! the data and runs the dds/sce/other opti method.
426
427
428
        call objective_subprocess(eval)
      end if
#else
429
      call optimization(eval, obj_func, dirConfigOut, funcBest, maskpara)
430
#endif
431
432
433
434
435
    case default
      call message('***ERROR: mhm_driver: The given objective function number ', &
              trim(adjustl(num2str(opti_function))), ' in mhm.nml is not valid!')
      stop 1
    end select
436
#ifdef MPI
437
  if (rank == 0 .and. domainMeta%isMasterInComLocal) then
438
#endif
439
440
441
442
443
    ! write a file with final objective function and the best parameter set
    call write_optifile(funcbest, global_parameters(:, 3), global_parameters_name(:))
    ! write a file with final best parameter set in a namlist format
    call write_optinamelist(processMatrix, global_parameters, maskpara, global_parameters_name(:))
    deallocate(maskpara)
444
445
446
#ifdef MPI
  end if
#endif
447
448
  else

449
#ifdef MPI
450
    if (rank > 0 .and. domainMeta%isMasterInComLocal) then
451
452
453
#endif
      ! --------------------------------------------------------------------------
      ! call mHM
454
455
      ! get runoff timeseries if possible (i.e. when domainMeta%doRouting,
      ! processMatrix(8,1) > 0)
456
457
458
459
460
461
462
463
464
465
      ! get other model outputs  (i.e. gridded fields of model output)
      ! --------------------------------------------------------------------------
      call message('  Run mHM')
      call timer_start(itimer)
      call mhm_eval(global_parameters(:, 3))
      call timer_stop(itimer)
      call message('    in ', trim(num2str(timer_get(itimer), '(F12.3)')), ' seconds.')
#ifdef MPI
    endif
#endif
466

467
  end if
468

469
#ifdef MPI
470
  if (rank > 0 .and. domainMeta%isMasterInComLocal) then
471
#endif
472
473
474
  ! --------------------------------------------------------------------------
  ! WRITE RESTART files
  ! --------------------------------------------------------------------------
475
476
477
478
479
  if (write_restart  .AND. (.NOT. optimize)) then
    itimer = itimer + 1
    call message()
    call message('  Write restart file')
    call timer_start(itimer)
480
    call write_restart_files(mhmFileRestartOut)
481
482
    call timer_stop(itimer)
    call message('    in ', trim(num2str(timer_get(itimer), '(F9.3)')), ' seconds.')
483
  end if
484

485
486
487
488
  ! --------------------------------------------------------------------------
  ! WRITE RUNOFF (INCLUDING RESTART FILES, has to be called after mHM restart
  ! files are written)
  ! --------------------------------------------------------------------------
489
  if (processMatrix(8, 1) > 0) call mrm_write()
490

491
492
493
#ifdef MPI
  end if
#endif
494

495
496
497
498
  ! --------------------------------------------------------------------------
  ! FINISH UP
  ! --------------------------------------------------------------------------
  itimer = itimer + 1
499
500
501
502
503
504
  ! call message()
  ! call message('  Write ouput data')
  ! call timer_start(itimer)
  ! ! call write_data()
  ! call timer_stop(itimer)
  ! call message('    in ', trim(num2str(timer_get(itimer),'(F9.3)')), ' seconds.')
505

506
  nTimeSteps = maxval(simPer(1 : domainMeta%nDomains)%julEnd - simPer(1 : domainMeta%nDomains)%julStart + 1) * nTstepDay
507
  call date_and_time(values = datetime)
508
  call message()
509
  message_text = 'Done ' // trim(num2str(nTimeSteps, '(I10)')) // " time steps."
510
  call message(trim(message_text))
511
512
513
  message_text = trim(num2str(datetime(3), '(I2.2)')) // "." // trim(num2str(datetime(2), '(I2.2)')) &
          // "." // trim(num2str(datetime(1), '(I4.4)')) // " " // trim(num2str(datetime(5), '(I2.2)')) &
          // ":" // trim(num2str(datetime(6), '(I2.2)')) // ":" // trim(num2str(datetime(7), '(I2.2)'))
514
515
  call message('Finished at ', trim(message_text), '.')
  call message()
516
  call finish('mHM', 'Finished!')
517

518
#ifdef MPI
519
520
521
522
  ! find number of processes nproc
  call MPI_Comm_size(comm, nproc, ierror)
  call MPI_Comm_rank(comm, rank, ierror)
  write(*,*) 'MPI finished', rank, nproc
Maren Kaluza's avatar
Maren Kaluza committed
523
  call MPI_Finalize(ierror)
524
#endif
Maren Kaluza's avatar
Maren Kaluza committed
525

526
END PROGRAM mhm_driver