Actual source code: ex7f.F90
petsc-3.12.0 2019-09-29
2: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
3: ! The code indicates the procedures for setting the particular block sizes and
4: ! for using different linear solvers on the individual blocks
6: ! This example focuses on ways to customize the block Jacobi preconditioner.
7: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
8: ! (including working with matrices and vectors)
10: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.
12: !/*T
13: ! Concepts: KSP^customizing the block Jacobi preconditioner
14: ! Processors: n
15: !T*/
18: program main
19: #include <petsc/finclude/petscksp.h>
20: use petscksp
21:
22: implicit none
23: Vec :: x,b,u ! approx solution, RHS, exact solution
24: Mat :: A ! linear system matrix
25: KSP :: ksp ! KSP context
26: PC :: myPc ! PC context
27: PC :: subpc ! PC context for subdomain
28: PetscReal :: norm ! norm of solution error
29: PetscReal,parameter :: tol = 1.e-6
30: PetscErrorCode :: ierr
31: PetscInt :: i,j,Ii,JJ,n
32: PetscInt, parameter :: m = 4
33: PetscMPIInt :: myRank,mySize
34: PetscInt :: its,nlocal,first,Istart,Iend
35: PetscScalar :: v
36: PetscScalar, parameter :: &
37: myNone = -1.0, &
38: sone = 1.0
39: PetscBool :: isbjacobi,flg
40: KSP,allocatable,dimension(:) :: subksp ! array of local KSP contexts on this processor
41: PetscInt,allocatable,dimension(:) :: blks
42: character(len=80) :: outputString
43: PetscInt,parameter :: one = 1, five = 5
44:
45: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
46: if (ierr /= 0) then
47: write(6,*)'Unable to initialize PETSc'
48: stop
49: endif
50:
51: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
52: CHKERRA(ierr)
53: call MPI_Comm_rank(PETSC_COMM_WORLD,myRank,ierr)
54: CHKERRA(ierr)
55: call MPI_Comm_size(PETSC_COMM_WORLD,mySize,ierr)
56: CHKERRA(ierr)
57: n=m+2
58:
59: !-------------------------------------------------------------------
60: ! Compute the matrix and right-hand-side vector that define
61: ! the linear system, Ax = b.
62: !---------------------------------------------------------------
64: ! Create and assemble parallel matrix
65:
66: call MatCreate(PETSC_COMM_WORLD,A,ierr)
67: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
68: call MatSetFromOptions(A,ierr)
69: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)
70: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
71: call MatGetOwnershipRange(A,Istart,Iend,ierr)
72:
73: do Ii=Istart,Iend-1
74: v =-1.0; i = Ii/n; j = Ii - i*n
75: if (i>0) then
76: JJ = Ii - n
77: call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
78: endif
79:
80: if (i<m-1) then
81: JJ = Ii + n
82: call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
83: endif
84:
85: if (j>0) then
86: JJ = Ii - 1
87: call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
88: endif
89:
90: if (j<n-1) then
91: JJ = Ii + 1
92: call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
93: endif
94:
95: v=4.0
96: call MatSetValues(A,one,Ii,one,Ii,v,ADD_VALUES,ierr);CHKERRA(ierr)
97:
98: enddo
99:
100: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr);CHKERRA(ierr)
101: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr);CHKERRA(ierr)
103: ! Create parallel vectors
105: call VecCreate(PETSC_COMM_WORLD,u,ierr)
106: CHKERRA(ierr)
107: call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
108: CHKERRA(ierr)
109: call VecSetFromOptions(u,ierr)
110: CHKERRA(ierr)
111: call VecDuplicate(u,b,ierr)
112: call VecDuplicate(b,x,ierr)
113:
114: ! Set exact solution; then compute right-hand-side vector.
115:
116: call Vecset(u,sone,ierr)
117: CHKERRA(ierr)
118: call MatMult(A,u,b,ierr)
119: CHKERRA(ierr)
120:
121: ! Create linear solver context
122:
123: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
124: CHKERRA(ierr)
125:
126: ! Set operators. Here the matrix that defines the linear system
127: ! also serves as the preconditioning matrix.
128:
129: call KSPSetOperators(ksp,A,A,ierr)
130: CHKERRA(ierr)
131:
132: ! Set default preconditioner for this program to be block Jacobi.
133: ! This choice can be overridden at runtime with the option
134: ! -pc_type <type>
135:
136: call KSPGetPC(ksp,myPc,ierr)
137: CHKERRA(ierr)
138: call PCSetType(myPc,PCBJACOBI,ierr)
139: CHKERRA(ierr)
141: ! -----------------------------------------------------------------
142: ! Define the problem decomposition
143: !-------------------------------------------------------------------
145: ! Call PCBJacobiSetTotalBlocks() to set individually the size of
146: ! each block in the preconditioner. This could also be done with
147: ! the runtime option -pc_bjacobi_blocks <blocks>
148: ! Also, see the command PCBJacobiSetLocalBlocks() to set the
149: ! local blocks.
150:
151: ! Note: The default decomposition is 1 block per processor.
152:
153: allocate(blks(m),source = n)
154:
155: call PCBJacobiSetTotalBlocks(myPc,m,blks,ierr)
156: CHKERRA(ierr)
157: deallocate(blks)
158:
159: !-------------------------------------------------------------------
160: ! Set the linear solvers for the subblocks
161: !-------------------------------------------------------------------
162:
163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: ! Basic method, should be sufficient for the needs of most users.
165: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: ! By default, the block Jacobi method uses the same solver on each
167: ! block of the problem. To set the same solver options on all blocks,
168: ! use the prefix -sub before the usual PC and KSP options, e.g.,
169: ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
170:
171: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172: ! Advanced method, setting different solvers for various blocks.
173: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174:
175: ! Note that each block's KSP context is completely independent of
176: ! the others, and the full range of uniprocessor KSP options is
177: ! available for each block. The following section of code is intended
178: ! to be a simple illustration of setting different linear solvers for
179: ! the individual blocks. These choices are obviously not recommended
180: ! for solving this particular problem.
182: call PetscObjectTypeCompare(myPc,PCBJACOBI,isbjacobi,ierr)
183:
184:
185: if (isbjacobi) then
186:
187: ! Call KSPSetUp() to set the block Jacobi data structures (including
188: ! creation of an internal KSP context for each block).
189: ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()
191: call KSPSetUp(ksp,ierr)
192:
193: ! Extract the array of KSP contexts for the local blocks
195: call PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr)
197: call PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr)
198: allocate(subksp(nlocal))
199: call PCBJacobiGetSubKSP(myPc,nlocal,first,subksp,ierr)
200:
202: ! Loop over the local blocks, setting various KSP options for each block
203:
204: do i=0,nlocal-1
205:
206: call KSPGetPC(subksp(i+1),subpc,ierr)
207:
208: if (myRank>0) then
209:
210: if (mod(i,2)==1) then
211: call PCSetType(subpc,PCILU,ierr); CHKERRA(ierr)
212:
213: else
214: call PCSetType(subpc,PCNONE,ierr); CHKERRA(ierr)
215: call KSPSetType(subksp(i+1),KSPBCGS,ierr); CHKERRA(ierr)
216: call KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
217: CHKERRA(ierr)
218: endif
219:
220: else
221: call PCSetType(subpc,PCJACOBI,ierr); CHKERRA(ierr)
222: call KSPSetType(subksp(i+1),KSPGMRES,ierr); CHKERRA(ierr)
223: call KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
224: CHKERRA(ierr)
225: endif
226:
227: end do
228:
229: endif
231: !----------------------------------------------------------------
232: ! Solve the linear system
233: !-----------------------------------------------------------------
234:
235: ! Set runtime options
236:
237: call KSPSetFromOptions(ksp,ierr); CHKERRA(ierr)
238:
239: ! Solve the linear system
240:
241: call KSPSolve(ksp,b,x,ierr); CHKERRA(ierr)
242:
243: ! -----------------------------------------------------------------
244: ! Check solution and clean up
245: !-------------------------------------------------------------------
246:
247:
248: ! -----------------------------------------------------------------
249: ! Check the error
250: ! -----------------------------------------------------------------
251:
252: !call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
254: call VecAXPY(x,myNone,u,ierr)
255:
256: !call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
259: call VecNorm(x,NORM_2,norm,ierr); CHKERRA(ierr)
260: call KSPGetIterationNumber(ksp,its,ierr); CHKERRA(ierr)
261: write(outputString,*)'Norm of error',real(norm),'Iterations',its,'\n' ! PETScScalar might be of complex type
262: call PetscPrintf(PETSC_COMM_WORLD,outputString,ierr); CHKERRA(ierr)
263:
264: ! Free work space. All PETSc objects should be destroyed when they
265: ! are no longer needed.
266: deallocate(subksp)
267: call KSPDestroy(ksp,ierr);CHKERRA(ierr)
268: call VecDestroy(u,ierr); CHKERRA(ierr)
269: call VecDestroy(b,ierr); CHKERRA(ierr)
270: call MatDestroy(A,ierr); CHKERRA(ierr)
271: call VecDestroy(x,ierr); CHKERRA(ierr)
272: call PetscFinalize(ierr); CHKERRA(ierr)
273:
274: end program main
276: !/*TEST
277: !
278: ! test:
279: ! nsize: 2
280: ! args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always> ex7_1.tmp 2>&1
281: !
282: ! test:
283: ! suffix: 2
284: ! nsize: 2
285: ! args: -ksp_view
286: !
287: !TEST*/