Actual source code: ex21f.F
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: !
5: ! Program usage: mpirun ex21f [-help] [all PETSc options]
6: !
7: !/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !T*/
12: !
13: ! -------------------------------------------------------------------------
15: program main
16: implicit none
18: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
19: ! Include files
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: !
22: ! petsc.h - base PETSc routines petscvec.h - vectors
23: ! petscsys.h - system routines petscmat.h - matrices
24: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
26: #include include/finclude/petsc.h
27: #include include/finclude/petscvec.h
28: #include include/finclude/petscmat.h
29: #include include/finclude/petscpc.h
30: #include include/finclude/petscksp.h
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: ! Variable declarations
34: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: !
36: ! Variables:
37: ! ksp - linear solver context
38: ! ksp - Krylov subspace method context
39: ! pc - preconditioner context
40: ! x, b, u - approx solution, right-hand-side, exact solution vectors
41: ! A - matrix that defines linear system
42: ! its - iterations for convergence
43: ! norm - norm of solution error
45: Vec x,b,u
46: Mat A
47: PC pc
48: KSP ksp
49: PetscScalar v,one,neg_one
50: double precision norm,tol
51: PetscInt i,j,II,JJ,Istart
52: PetscInt Iend,m,n,its,ione
53: PetscMPIInt rank
54: PetscTruth flg
55: PetscErrorCode ierr
57: ! Note: Any user-defined Fortran routines MUST be declared as external.
59: external SampleShellPCSetUp,SampleShellPCApply
61: ! Common block to store data for user-provided preconditioner
62: common /mypcs/ jacobi,sor,work
63: PC jacobi,sor
64: Vec work
66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
67: ! Beginning of program
68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
71: one = 1.0
72: neg_one = -1.0
73: m = 8
74: n = 7
75: ione = 1
76: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
77: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
78: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
80: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81: ! Compute the matrix and right-hand-side vector that define
82: ! the linear system, Ax = b.
83: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85: ! Create parallel matrix, specifying only its global dimensions.
86: ! When using MatCreate(), the matrix format can be specified at
87: ! runtime. Also, the parallel partitioning of the matrix is
88: ! determined by PETSc at runtime.
90: call MatCreate(PETSC_COMM_WORLD,A,ierr)
91: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
93: call MatSetFromOptions(A,ierr)
95: ! Currently, all PETSc parallel matrix formats are partitioned by
96: ! contiguous chunks of rows across the processors. Determine which
97: ! rows of the matrix are locally owned.
99: call MatGetOwnershipRange(A,Istart,Iend,ierr)
101: ! Set matrix elements for the 2-D, five-point stencil in parallel.
102: ! - Each processor needs to insert only elements that it owns
103: ! locally (but any non-local elements will be sent to the
104: ! appropriate processor during matrix assembly).
105: ! - Always specify global row and columns of matrix entries.
106: ! - Note that MatSetValues() uses 0-based row and column numbers
107: ! in Fortran as well as in C.
109: do 10, II=Istart,Iend-1
110: v = -1.0
111: i = II/n
112: j = II - i*n
113: if (i.gt.0) then
114: JJ = II - n
115: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
116: endif
117: if (i.lt.m-1) then
118: JJ = II + n
119: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
120: endif
121: if (j.gt.0) then
122: JJ = II - 1
123: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
124: endif
125: if (j.lt.n-1) then
126: JJ = II + 1
127: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
128: endif
129: v = 4.0
130: call MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)
131: 10 continue
133: ! Assemble matrix, using the 2-step process:
134: ! MatAssemblyBegin(), MatAssemblyEnd()
135: ! Computations can be done while messages are in transition,
136: ! by placing code between these two statements.
138: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
139: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
141: ! Create parallel vectors.
142: ! - Here, the parallel partitioning of the vector is determined by
143: ! PETSc at runtime. We could also specify the local dimensions
144: ! if desired -- or use the more general routine VecCreate().
145: ! - When solving a linear system, the vectors and matrices MUST
146: ! be partitioned accordingly. PETSc automatically generates
147: ! appropriately partitioned matrices and vectors when MatCreate()
148: ! and VecCreate() are used with the same communicator.
149: ! - Note: We form 1 vector from scratch and then duplicate as needed.
151: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
152: call VecDuplicate(u,b,ierr)
153: call VecDuplicate(b,x,ierr)
155: ! Set exact solution; then compute right-hand-side vector.
157: call VecSet(u,one,ierr)
158: call MatMult(A,u,b,ierr)
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: ! Create the linear solver and set various options
162: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: ! Create linear solver context
166: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
168: ! Set operators. Here the matrix that defines the linear system
169: ! also serves as the preconditioning matrix.
171: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
173: ! Set linear solver defaults for this problem (optional).
174: ! - By extracting the KSP and PC contexts from the KSP context,
175: ! we can then directly directly call any KSP and PC routines
176: ! to set various options.
178: call KSPGetPC(ksp,pc,ierr)
179: tol = 1.e-7
180: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION, &
181: & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
183: !
184: ! Set a user-defined shell preconditioner
185: !
187: ! (Required) Indicate to PETSc that we are using a shell preconditioner
188: call PCSetType(pc,PCSHELL,ierr)
190: ! (Required) Set the user-defined routine for applying the preconditioner
191: call PCShellSetApply(pc,SampleShellPCApply,ierr)
193: ! (Optional) Do any setup required for the preconditioner
194: call SampleShellPCSetUp(A,x,ierr)
197: ! Set runtime options, e.g.,
198: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
199: ! These options will override those specified above as long as
200: ! KSPSetFromOptions() is called _after_ any other customization
201: ! routines.
203: call KSPSetFromOptions(ksp,ierr)
205: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: ! Solve the linear system
207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: call KSPSolve(ksp,b,x,ierr)
211: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212: ! Check solution and clean up
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: ! Check the error
217: call VecAXPY(x,neg_one,u,ierr)
218: call VecNorm(x,NORM_2,norm,ierr)
219: call KSPGetIterationNumber(ksp,its,ierr)
221: if (rank .eq. 0) then
222: if (norm .gt. 1.e-12) then
223: write(6,100) norm,its
224: else
225: write(6,110) its
226: endif
227: endif
228: 100 format('Norm of error ',1pe10.4,' iterations ',i5)
229: 110 format('Norm of error < 1.e-12,iterations ',i5)
232: ! Free work space. All PETSc objects should be destroyed when they
233: ! are no longer needed.
235: call KSPDestroy(ksp,ierr)
236: call VecDestroy(u,ierr)
237: call VecDestroy(x,ierr)
238: call VecDestroy(b,ierr)
239: call MatDestroy(A,ierr)
241: ! Free up PCShell data
242: call PCDestroy(sor,ierr)
243: call PCDestroy(jacobi,ierr)
244: call VecDestroy(work,ierr)
247: ! Always call PetscFinalize() before exiting a program.
249: call PetscFinalize(ierr)
250: end
252: !/***********************************************************************/
253: !/* Routines for a user-defined shell preconditioner */
254: !/***********************************************************************/
256: !
257: ! SampleShellPCSetUp - This routine sets up a user-defined
258: ! preconditioner context.
259: !
260: ! Input Parameters:
261: ! pmat - preconditioner matrix
262: ! x - vector
263: !
264: ! Output Parameter:
265: ! ierr - error code (nonzero if error has been detected)
266: !
267: ! Notes:
268: ! In this example, we define the shell preconditioner to be Jacobi
269: ! method. Thus, here we create a work vector for storing the reciprocal
270: ! of the diagonal of the preconditioner matrix; this vector is then
271: ! used within the routine SampleShellPCApply().
272: !
273: subroutine SampleShellPCSetUp(pmat,x,ierr)
275: implicit none
277: #include include/finclude/petsc.h
278: #include include/finclude/petscvec.h
279: #include include/finclude/petscmat.h
281: Vec x
282: Mat pmat
283: PetscErrorCode ierr
285: ! Common block to store data for user-provided preconditioner
286: common /mypcs/ jacobi,sor,work
287: PC jacobi,sor
288: Vec work
290: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
291: call PCSetType(jacobi,PCJACOBI,ierr)
292: call PCSetOperators(jacobi,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
293: & ierr)
294: call PCSetUp(jacobi,ierr)
296: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
297: call PCSetType(sor,PCSOR,ierr)
298: call PCSetOperators(sor,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
299: & ierr)
300: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
301: call PCSetUp(sor,ierr)
303: call VecDuplicate(x,work,ierr)
305: end
307: ! -------------------------------------------------------------------
308: !
309: ! SampleShellPCApply - This routine demonstrates the use of a
310: ! user-provided preconditioner.
311: !
312: ! Input Parameters:
313: ! dummy - optional user-defined context, not used here
314: ! x - input vector
315: !
316: ! Output Parameters:
317: ! y - preconditioned vector
318: ! ierr - error code (nonzero if error has been detected)
319: !
320: ! Notes:
321: ! This code implements the Jacobi preconditioner plus the
322: ! SOR preconditioner
323: !
324: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
325: ! mpirun -np 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
326: !
327: subroutine SampleShellPCApply(dummy,x,y,ierr)
329: implicit none
331: #include include/finclude/petsc.h
332: #include include/finclude/petscvec.h
333: #include include/finclude/petscpc.h
335: Vec x,y
336: integer dummy
337: PetscErrorCode ierr
338: PetscScalar one
339:
340: ! Common block to store data for user-provided preconditioner
341: common /mypcs/ jacobi,sor,work
342: PC jacobi,sor
343: Vec work
345: one = 1.0
346: call PCApply(jacobi,x,y,ierr)
347: call PCApply(sor,x,work,ierr)
348: call VecAXPY(y,one,work,ierr)
350: end