MODFLOW 6  version 6.6.0.dev0
USGS Modular Hydrologic Model
parallelsolutionmodule Module Reference

Data Types

type  parallelsolutiontype
 

Functions/Subroutines

logical(lgp) function par_has_converged (this, max_dvc)
 Check global convergence. The local maximum dependent variable change is reduced over MPI with all other processes. More...
 
integer(i4b) function par_package_convergence (this, dpak, cpakout, iend)
 
integer(i4b) function par_sync_newtonur_flag (this, inewtonur)
 
logical(lgp) function par_nur_has_converged (this, dxold_max, hncg)
 
subroutine par_calc_ptc (this, iptc, ptcf)
 Calculate pseudo-transient continuation factor. More...
 
subroutine par_underrelax (this, kiter, bigch, neq, active, x, xtemp)
 apply under-relaxation in sync over all processes More...
 
subroutine par_backtracking_xupdate (this, bt_flag)
 synchronize backtracking flag over processes More...
 

Function/Subroutine Documentation

◆ par_backtracking_xupdate()

subroutine parallelsolutionmodule::par_backtracking_xupdate ( class(parallelsolutiontype), intent(inout)  this,
integer(i4b), intent(inout)  bt_flag 
)
private
Parameters
[in,out]thisParallelSolutionType instance
[in,out]bt_flagglobal backtracking flag (1) backtracking performed (0) backtracking not performed

Definition at line 186 of file ParallelSolution.f90.

187  ! -- dummy variables
188  class(ParallelSolutionType), intent(inout) :: this !< ParallelSolutionType instance
189  integer(I4B), intent(inout) :: bt_flag !< global backtracking flag (1) backtracking performed (0) backtracking not performed
190  ! -- local variables
191  integer(I4B) :: btflag_local
192  type(MpiWorldType), pointer :: mpi_world
193  integer :: ierr
194 
195  mpi_world => get_mpi_world()
196 
197  ! get local bt flag
198  btflag_local = this%NumericalSolutionType%get_backtracking_flag()
199 
200  ! reduce into global decision (if any, then all)
201  call mpi_allreduce(btflag_local, bt_flag, 1, mpi_integer, &
202  mpi_max, mpi_world%comm, ierr)
203  call check_mpi(ierr)
204 
205  ! perform backtracking if ...
206  if (bt_flag > 0) then
207  call this%NumericalSolutionType%apply_backtracking()
208  end if
209 
Here is the call graph for this function:

◆ par_calc_ptc()

subroutine parallelsolutionmodule::par_calc_ptc ( class(parallelsolutiontype this,
integer(i4b)  iptc,
real(dp)  ptcf 
)
private
Parameters
thisparallel solution
iptcPTC (1) or not (0)
ptcfthe (global) PTC factor calculated

Definition at line 121 of file ParallelSolution.f90.

122  class(ParallelSolutionType) :: this !< parallel solution
123  integer(I4B) :: iptc !< PTC (1) or not (0)
124  real(DP) :: ptcf !< the (global) PTC factor calculated
125  ! local
126  integer(I4B) :: iptc_loc
127  real(DP) :: ptcf_loc, ptcf_glo_max
128  integer :: ierr
129  type(MpiWorldType), pointer :: mpi_world
130 
131  mpi_world => get_mpi_world()
132  call this%NumericalSolutionType%sln_calc_ptc(iptc_loc, ptcf_loc)
133  if (iptc_loc == 0) ptcf_loc = dzero
134 
135  ! now reduce
136  call mpi_allreduce(ptcf_loc, ptcf_glo_max, 1, mpi_double_precision, &
137  mpi_max, mpi_world%comm, ierr)
138  call check_mpi(ierr)
139 
140  iptc = 0
141  ptcf = dzero
142  if (ptcf_glo_max > dzero) then
143  iptc = 1
144  ptcf = ptcf_glo_max
145  end if
146 
Here is the call graph for this function:

◆ par_has_converged()

logical(lgp) function parallelsolutionmodule::par_has_converged ( class(parallelsolutiontype this,
real(dp)  max_dvc 
)
private
Parameters
thisparallel solution
max_dvcthe LOCAL maximum dependent variable change
Returns
True, when GLOBALLY converged

Definition at line 30 of file ParallelSolution.f90.

31  class(ParallelSolutionType) :: this !< parallel solution
32  real(DP) :: max_dvc !< the LOCAL maximum dependent variable change
33  logical(LGP) :: has_converged !< True, when GLOBALLY converged
34  ! local
35  real(DP) :: global_max_dvc
36  real(DP) :: abs_max_dvc
37  integer :: ierr
38  type(MpiWorldType), pointer :: mpi_world
39 
40  mpi_world => get_mpi_world()
41 
42  has_converged = .false.
43  abs_max_dvc = abs(max_dvc)
44  call mpi_allreduce(abs_max_dvc, global_max_dvc, 1, mpi_double_precision, &
45  mpi_max, mpi_world%comm, ierr)
46  call check_mpi(ierr)
47  if (global_max_dvc <= this%dvclose) then
48  has_converged = .true.
49  end if
50 
Here is the call graph for this function:

◆ par_nur_has_converged()

logical(lgp) function parallelsolutionmodule::par_nur_has_converged ( class(parallelsolutiontype this,
real(dp), intent(in)  dxold_max,
real(dp), intent(in)  hncg 
)
private
Parameters
thisparallel solution instance
[in]dxold_maxthe maximum dependent variable change for cells not adjusted by NUR
[in]hncglargest dep. var. change at end of Picard iter.
Returns
True, when converged

Definition at line 91 of file ParallelSolution.f90.

93  class(ParallelSolutionType) :: this !< parallel solution instance
94  real(DP), intent(in) :: dxold_max !< the maximum dependent variable change for cells not adjusted by NUR
95  real(DP), intent(in) :: hncg !< largest dep. var. change at end of Picard iter.
96  logical(LGP) :: has_converged !< True, when converged
97  ! local
98  integer(I4B) :: icnvg_local
99  integer(I4B) :: icnvg_global
100  integer :: ierr
101  type(MpiWorldType), pointer :: mpi_world
102 
103  mpi_world => get_mpi_world()
104 
105  has_converged = .false.
106  icnvg_local = 0
107  if (this%NumericalSolutionType%sln_nur_has_converged( &
108  dxold_max, hncg)) then
109  icnvg_local = 1
110  end if
111 
112  call mpi_allreduce(icnvg_local, icnvg_global, 1, mpi_integer, &
113  mpi_min, mpi_world%comm, ierr)
114  call check_mpi(ierr)
115  if (icnvg_global == 1) has_converged = .true.
116 
Here is the call graph for this function:

◆ par_package_convergence()

integer(i4b) function parallelsolutionmodule::par_package_convergence ( class(parallelsolutiontype this,
real(dp), intent(in)  dpak,
character(len=lenpakloc), intent(in)  cpakout,
integer(i4b), intent(in)  iend 
)
private
Parameters
thisparallel solution instance
[in]dpakNewton Under-relaxation flag

Definition at line 53 of file ParallelSolution.f90.

55  class(ParallelSolutionType) :: this !< parallel solution instance
56  real(DP), intent(in) :: dpak !< Newton Under-relaxation flag
57  character(len=LENPAKLOC), intent(in) :: cpakout
58  integer(I4B), intent(in) :: iend
59  ! local
60  integer(I4B) :: icnvg_global
61  integer(I4B) :: icnvg_local
62  integer :: ierr
63  type(MpiWorldType), pointer :: mpi_world
64 
65  mpi_world => get_mpi_world()
66 
67  icnvg_local = &
68  this%NumericalSolutionType%sln_package_convergence(dpak, cpakout, iend)
69 
70  call mpi_allreduce(icnvg_local, icnvg_global, 1, mpi_integer, &
71  mpi_min, mpi_world%comm, ierr)
72  call check_mpi(ierr)
73 
Here is the call graph for this function:

◆ par_sync_newtonur_flag()

integer(i4b) function parallelsolutionmodule::par_sync_newtonur_flag ( class(parallelsolutiontype this,
integer(i4b), intent(in)  inewtonur 
)
private
Parameters
thisparallel solution instance
[in]inewtonurlocal Newton Under-relaxation flag
Returns
Maximum of all local values (1 = under-relaxation applied)

Definition at line 76 of file ParallelSolution.f90.

77  class(ParallelSolutionType) :: this !< parallel solution instance
78  integer(I4B), intent(in) :: inewtonur !< local Newton Under-relaxation flag
79  ! local
80  integer(I4B) :: ivalue !< Maximum of all local values (1 = under-relaxation applied)
81  integer :: ierr
82  type(MpiWorldType), pointer :: mpi_world
83 
84  mpi_world => get_mpi_world()
85  call mpi_allreduce(inewtonur, ivalue, 1, mpi_integer, &
86  mpi_max, mpi_world%comm, ierr)
87  call check_mpi(ierr)
88 
Here is the call graph for this function:

◆ par_underrelax()

subroutine parallelsolutionmodule::par_underrelax ( class(parallelsolutiontype this,
integer(i4b), intent(in)  kiter,
real(dp), intent(in)  bigch,
integer(i4b), intent(in)  neq,
integer(i4b), dimension(neq), intent(in)  active,
real(dp), dimension(neq), intent(inout)  x,
real(dp), dimension(neq), intent(in)  xtemp 
)
private
Parameters
thisparallel instance
[in]kiterPicard iteration number
[in]bigchmaximum dependent-variable change
[in]neqnumber of equations
[in]activeactive cell flag (1)
[in,out]xcurrent dependent-variable
[in]xtempprevious dependent-variable

Definition at line 151 of file ParallelSolution.f90.

152  class(ParallelSolutionType) :: this !< parallel instance
153  integer(I4B), intent(in) :: kiter !< Picard iteration number
154  real(DP), intent(in) :: bigch !< maximum dependent-variable change
155  integer(I4B), intent(in) :: neq !< number of equations
156  integer(I4B), dimension(neq), intent(in) :: active !< active cell flag (1)
157  real(DP), dimension(neq), intent(inout) :: x !< current dependent-variable
158  real(DP), dimension(neq), intent(in) :: xtemp !< previous dependent-variable
159  ! local
160  real(DP) :: dvc_global_max, dvc_global_min
161  integer :: ierr
162  type(MpiWorldType), pointer :: mpi_world
163 
164  mpi_world => get_mpi_world()
165 
166  ! first reduce largest change over all processes
167  call mpi_allreduce(bigch, dvc_global_max, 1, mpi_double_precision, &
168  mpi_max, mpi_world%comm, ierr)
169  call check_mpi(ierr)
170  call mpi_allreduce(bigch, dvc_global_min, 1, mpi_double_precision, &
171  mpi_min, mpi_world%comm, ierr)
172  call check_mpi(ierr)
173 
174  if (abs(dvc_global_min) > abs(dvc_global_max)) then
175  dvc_global_max = dvc_global_min
176  end if
177 
178  ! call local underrelax routine
179  call this%NumericalSolutionType%sln_underrelax(kiter, dvc_global_max, &
180  neq, active, x, xtemp)
181 
Here is the call graph for this function: