MoDeNa  1.0
Software framework facilitating sequential multi-scale modelling
SolverSetup.F90
1 
2 
7 
8 
9 
10 
11 Subroutine solversetup
12 
13 Use basic_variables, Only: ncomp
14 
15 !PETSc modules
17 Use f90moduleinterfaces
18 Use global_x !(snes,ngrid,ngp,x,r,timer)
19 
20 Implicit None
21 
22 #include <finclude/petscsys.h>
23 #include <finclude/petscvec.h>
24 #include <finclude/petscvec.h90>
25 #include <finclude/petscdmda.h>
26 #include <finclude/petscdm.h>
27 #include <finclude/petscdmda.h90>
28 #include <finclude/petscis.h>
29 #include <finclude/petscmat.h>
30 #include <finclude/petscsnes.h>
31 #include <finclude/petscsnes.h90>
32 
33 
34 dm :: da
35 petscerrorcode :: ierr
36 type(userctx) :: user
37 mat :: j
38 petscreal :: erel
39 petscbool :: flg
40 INTEGER :: jac_id
41 petscint :: jaclocal
42 
43 character(80) :: filename=''
44 
45 !external subroutines associated with Solver
46 external forminitialguess
47 external formfunction
48 external jac_shell_ad
49 external jac_matrix_empty
50 external monitortimer
51 
52 
53  call mpi_comm_rank(petsc_comm_world,user%rank,ierr)
54  call mpi_comm_size(petsc_comm_world,user%num_procs,ierr)
55 
56 
57 ! Create solver context
58  call snescreate(petsc_comm_world,snes,ierr)
59 
60 ! Create distributed array (DMDA) to manage parallel grid and vectors
61  call dmdacreate1d(petsc_comm_world, & !MPI communicator
62  & dmda_boundary_ghosted, & !Boundary type at boundary of physical domain
63  & ngrid, & !global dimension of array (if negative number, then value can be changed by user via command line!)
64  & ncomp, & !number of degrees of freedom per grid point (number of unknowns at each grid point)
65  & ngp, & !number of ghost points accessible for local vectors
66  & petsc_null_integer, & !could be an array to specify the number of grid points per processor
67  & da, & !the resulting distributed array object
68  & ierr)
69 
70 ! Extract global arrays from DMDA: x: unknowns, r: residual
71  call dmcreateglobalvector(da,x,ierr)
72  call vecduplicate(x,r,ierr)
73 
74 ! Get local grid boundaries (for 1-dimensional DMDA)
75  call dmdagetcorners(da, & !the distributed array
76  & user%xs, & !corner index in x direction
77  & petsc_null_integer, & !corner index in y direction
78  & petsc_null_integer, & !corner index in z direction
79  & user%xm, & !width of locally owned part in x direction
80  & petsc_null_integer, & !width of locally owned part in y direction
81  & petsc_null_integer, & !width of locally owned part in z direction
82  & ierr) !error check
83 
84  call dmdagetghostcorners(da, & !the distributed array
85  & user%gxs, & !corner index in x direction (but now counting includes ghost points)
86  & petsc_null_integer, & !corner index in y direction (but now counting includes ghost points)
87  & petsc_null_integer, & !corner index in z direction (but now counting includes ghost points)
88  & user%gxm, & !width of locally owned part in x direction (but now including ghost points)
89  & petsc_null_integer, & !width of locally owned part in y direction (but now including ghost points)
90  & petsc_null_integer, & !width of locally owned part in z direction (but now including ghost points)
91  & ierr) !error check
92 
93 ! Here we shift the starting indices up by one so that we can easily
94 ! use the Fortran convention of 1-based indices (rather 0-based indices).
95  user%xs = user%xs+1
96  user%gxs = user%gxs+1
97  user%xe = user%xs+user%xm-1
98  user%gxe = user%gxs+user%gxm-1
99 
100  call snessetfunction(snes,r,formfunction,user,ierr)
101  call snessetapplicationcontext(snes,user,ierr)
102  call snessetdm(snes,da,ierr)
103 
104 ! !Set up matrix free jacobian
105 ! erel = 1e-08
106 ! call MatCreateSNESMF(snes,J,ierr) !matrix free jacobi matrix
107 ! call MatMFFDSetFunctionError(J,erel,ierr)
108 ! call SNESSetJacobian(snes,J,J,MatMFFDComputeJacobian,PETSC_NULL_OBJECT,ierr)
109 
110 
111 ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112 ! Set up nonlinear solver depending on option set in makefile (with -jac)
113 ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114  call petscoptionsgetint(petsc_null_character,'-jac',jac_id,flg,ierr)
115  If(flg) Then
116 
117  Else
118  jac_id = 0
119  write(*,*)'Option -jac not set in makefile, using matrix-free with numerical approximations.'
120  End If
121 
122  Select Case(jac_id)
123 
124  Case(-1) !for options that dont need a jacobian: Anderson mixing, Picard
125 
126  Case (0) !matrix-free with numerical approximation of J(x)v
127  !default value of erel
128  erel = 1e-08
129  !check if value for erel is specified in makefile
130  call petscoptionsgetreal(petsc_null_character,'-erel',erel,flg,ierr)
131  If(user%rank == 0) write(*,*)'Using matrix-free Jacobian with numerical approximations. erel:',erel
132  call matcreatesnesmf(snes,j,ierr) !matrix free jacobi matrix
133  call matmffdsetfunctionerror(j,erel,ierr)
134  call snessetjacobian(snes,j,j,matmffdcomputejacobian,petsc_null_object,ierr)
135 
136  Case (1) !matrix-free with AD-calculated J(x)v
137  If(user%rank == 0) write(*,*)'Using matrix-free AD Jacobian.'
138  !determine local size of shell matrix
139  jaclocal = int(ngrid / user%num_procs)
140  If(mod(ngrid,user%num_procs) /= 0 ) Then
141  write(*,*)'Surface Tension Code: Dimensions and number of cores dont match! mod(ngrid,n_cores) must equal 0'
142  stop 5
143  End if
144 
145  !Make sure that local parts of JacShell have the same size as the local DMDA-Arrays (local size JacShell != user%xm!!)
146  If(jaclocal /= user%xm) Then
147  write(*,*)'Surface Tension Code: Shell-Jacobi-Matrix and DMDA have to be parallelized accordingly.'
148 ! write(*,*) 'Surface Tension Code: Shell-Jacobi-Matrix and DMDA have to be parallelized accordingly!!(JacLocal = user%xm)'
149  stop 6
150  End If
151 
152  call matcreateshell(petsc_comm_world,ncomp*jaclocal,ncomp*jaclocal,ncomp*ngrid,ncomp*ngrid,petsc_null_object,j,ierr)
153  call matshellsetoperation( j, matop_mult, jac_shell_ad, ierr )
154  call snessetjacobian( snes, j, j, jac_matrix_empty, petsc_null_object, ierr)
155 
156  Case (2) !build complete Jacobi matrix via finite-differences
157  If(user%rank == 0) write(*,*)'Using finite-difference Jacobian.'
158 !Now in makefile: -snes_fd
159 ! call MatCreate(PETSC_COMM_WORLD,J,ierr)
160 ! call MatSetSizes(J,PETSC_DECIDE,PETSC_DECIDE,ngrid,ngrid,ierr)
161 ! call MatSetFromOptions(J,ierr)
162 ! call MatSetUp(J,ierr) !!sets up the internal matrix data structure
163 ! call SNESSetJacobian(snes,J,J,SNESComputeJacobianDefault,PETSC_NULL_OBJECT,ierr)
164 
165 
166  Case (3) !build the complete Jacobi matrix via AD
167  If(user%rank == 0) write(*,*)'Surface Tension Code: Using AD-generated Jacobian.'
168  write(*,*)'Not working yet!'
169  stop 6
170  !create Matrix that has the appropriate sparsity pattern for the da
171  !But that also means that when values are inserted in positions that
172  !dont fit this structure, an error occurs!
173  !i.e. when calculating the jacobi matrix it is not possible anymore
174  !to ignore the sparsity pattern and just create a dense jacobian!!
175 
176  !call DMCreateMatrix(da,MATMPIAIJ,J,ierr) !CAUTION: in newer PETSc Versions DMCreateMatrix does not contain the MatType argument anymore!
177  !call SNESSetJacobian(snes,J,J,Jac_AD,PETSC_NULL_OBJECT,ierr)
178 
179  Case default
180  write(*,*) 'No valid option set for -jac in makefile, using full Jacobi via finite-differences.'
181  call snessetjacobian(snes,j,j,snescomputejacobiandefault,petsc_null_object,ierr)
182 
183  End Select
184 
185 !Set monitoring function that ouputs the surface tension at every iteration
186  call snesmonitorset(snes,monitortimer,petsc_null_object,petsc_null_function,ierr)
187 
188 !Enable modification from makefile
189  call snessetfromoptions(snes,ierr)
190 
191 !Evaluate Initial Guess
192  call forminitialguess(snes,x,ierr)
193  !write(filename,*)'0_initial_profile_global.xlo'
194  !call PrintGlobalVec(x,filename)
195 
196  !start timer
197  total_time = 0.0
198  timer_old = mpi_wtime()
199 
200 !Solve system
201  call snessolve(snes,petsc_null_object,x,ierr)
202  !write(filename,*)'1_final_profile_global.xlo'
203  !call PrintGlobalVec(x,filename)
204  !write(filename,'(a,I3.3,a)') '2_final_profile_local_proc_',user%rank,'.xlo'
205  !call PrintLocalVec(x,da,filename)
206 
207  !plot residual vs time graph
208  !call system('gnuplot gnuplot_script.srp')
209 
210 
211 End Subroutine solversetup
212 
213 
214 
215 
216 
217 
218 
219 subroutine monitortimer(snes,its,norm,dummy,ierr)
220 
221  Use global_x, Only: timer,timer_old,total_time,r
222  Use mod_dft, Only: free
223  Use parameters, Only: kbol,pi
224  Use basic_variables, Only: t,parame, ncomp, surfactant, wif_surfactant
225  Use vle_var, Only: rhob,tc
226 
227 ! ! !PETSc modules
228 ! ! use f90module
229 ! ! !DFT modules
230 ! ! use DFT_MODULE, ONLY: free
231 ! ! use VLE_VAR, ONLY: tc,rhob
232 ! ! use BASIC_VARIABLES, ONLY:t,parame,ncomp
233 ! ! use PARAMETERS, ONLY: PI,KBOL
234 
235  implicit none
236 
237 #include <finclude/petscsys.h>
238 #include <finclude/petscvec.h>
239 
240 ! Input/output variables:
241  snes snes
242  petscint dummy
243  Integer :: its
244  REAL :: norm
245  petscerrorcode ierr
246 ! local
247  petscreal :: f2norm
248  vec :: current_solution
249  DOUBLE PRECISION :: delta_time
250  INTEGER :: rank
251  REAL :: m_average,surftens,st_macro
252  character(80) :: filename=''
253 
254  Real :: scale_factor !to scale surface tension in case surfactant is present
255 
256 
257  !calculate interfacial tension
258  !sum up the variable free from all procs on proc 0
259  call mpi_reduce(free,free,1,mpi_double_precision,mpi_sum,0,petsc_comm_world,ierr)
260 
261  !Only proc 0 calculates surface tension
262  call mpi_comm_rank(petsc_comm_world,rank,ierr)
263  IF(rank == 0) THEN
264 
265  !calculate surface tension
266  surftens = kbol * t *1.e20*1000.0 *free
267  m_average = sum( rhob(1,1:ncomp)*parame(1:ncomp,1) ) / rhob(1,0)
268 
269  st_macro = surftens / ( 1.0 + 3.0/8.0/pi *t/tc &
270  * (1.0/2.55)**2 / (0.0674*m_average+0.0045) )
271 
272  write(*,*)'ST',st_macro
273 
274  !calculate scaling factor to implicitly account for surfactant
275  If(surfactant) Then
276  scale_factor = 0.8 - 2.4828*wif_surfactant !adjusted to experimental results
277  Else
278  scale_factor = 1.
279  End If
280 
281  If(surfactant) write(*,*)'ST including surfactant', st_macro*scale_factor
282 
283 
284  !write result to outputfile
285  filename='./out.txt'
286  CALL file_open(filename,99)
287  WRITE (99,*) st_macro*scale_factor
288  close(99)
289 
290  End If
291 
292 
293  !calculate elapsed time
294  timer = mpi_wtime()
295  delta_time = timer - timer_old
296  timer_old = timer
297  total_time = total_time + delta_time
298 
299  !get norm of residual array
300  call vecnorm(r,2,f2norm,ierr)
301 
302  IF(rank == 0) THEN
303  !print results to file
304  filename = 'ItsTimeNorm.dat'
305  open(unit = 44, file = filename)
306  write(44,*) its,total_time,f2norm
307  End If
308 
309 end subroutine monitortimer
310 
311 
312 
313 
314 
315 
316 
317 
318 
319 
320 
321 
322 
323 
324 
325 
326 
WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW This module contains constant...
Definition: modules.f90:6
This module contains variables associated with the PETSc solver.
Definition: mod_PETSc.F90:24
subroutine formfunction(snes, X, F, user, ierr)
The subroutine FormFunction is a wrapper function which takes care of handling the global PETSc data ...
Definition: Function.F90:14
In this module, the application context is defined.
Definition: mod_PETSc.F90:7
WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW This module contains paramete...
Definition: modules.f90:29