Actual source code: ex8.c
2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
3: The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
4: code indicates the procedure for setting user-defined subdomains. Input\n\
5: parameters include:\n\
6: -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\
7: -user_set_subdomains: Activate user-defined subdomains\n\n";
9: /*
10: Note: This example focuses on setting the subdomains for the ASM
11: preconditioner for a problem on a 2D rectangular grid. See ex1.c
12: and ex2.c for more detailed comments on the basic usage of KSP
13: (including working with matrices and vectors).
15: The ASM preconditioner is fully parallel, but currently the routine
16: PCASMCreateSubDomains2D(), which is used in this example to demonstrate
17: user-defined subdomains (activated via -user_set_subdomains), is
18: uniprocessor only.
20: This matrix in this linear system arises from the discretized Laplacian,
21: and thus is not very interesting in terms of experimenting with variants
22: of the ASM preconditioner.
23: */
25: /*T
26: Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains
27: Processors: n
28: T*/
30: /*
31: Include "petscksp.h" so that we can use KSP solvers. Note that this file
32: automatically includes:
33: petsc.h - base PETSc routines petscvec.h - vectors
34: petscsys.h - system routines petscmat.h - matrices
35: petscis.h - index sets petscksp.h - Krylov subspace methods
36: petscviewer.h - viewers petscpc.h - preconditioners
37: */
38: #include petscksp.h
42: int main(int argc,char **args)
43: {
44: Vec x,b,u; /* approx solution, RHS, exact solution */
45: Mat A; /* linear system matrix */
46: KSP ksp; /* linear solver context */
47: PC pc; /* PC context */
48: IS *is; /* array of index sets that define the subdomains */
49: PetscInt overlap = 1; /* width of subdomain overlap */
50: PetscInt Nsub; /* number of subdomains */
51: PetscInt m = 15,n = 17; /* mesh dimensions in x- and y- directions */
52: PetscInt M = 2,N = 1; /* number of subdomains in x- and y- directions */
53: PetscInt i,j,I,J,Istart,Iend;
55: PetscMPIInt size;
56: PetscTruth flg;
57: PetscTruth user_subdomains; /* flag - 1 indicates user-defined subdomains */
58: PetscScalar v, one = 1.0;
60: PetscInitialize(&argc,&args,(char *)0,help);
61: MPI_Comm_size(PETSC_COMM_WORLD,&size);
62: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
63: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
64: PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
65: PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
66: PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
67: PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);
69: /* -------------------------------------------------------------------
70: Compute the matrix and right-hand-side vector that define
71: the linear system, Ax = b.
72: ------------------------------------------------------------------- */
74: /*
75: Assemble the matrix for the five point stencil, YET AGAIN
76: */
77: MatCreate(PETSC_COMM_WORLD,&A);
78: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
79: MatSetFromOptions(A);
80: MatGetOwnershipRange(A,&Istart,&Iend);
81: for (I=Istart; I<Iend; I++) {
82: v = -1.0; i = I/n; j = I - i*n;
83: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
84: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
85: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
86: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
87: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
88: }
89: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
90: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
92: /*
93: Create and set vectors
94: */
95: VecCreate(PETSC_COMM_WORLD,&b);
96: VecSetSizes(b,PETSC_DECIDE,m*n);
97: VecSetFromOptions(b);
98: VecDuplicate(b,&u);
99: VecDuplicate(b,&x);
100: VecSet(u,one);
101: MatMult(A,u,b);
103: /*
104: Create linear solver context
105: */
106: KSPCreate(PETSC_COMM_WORLD,&ksp);
108: /*
109: Set operators. Here the matrix that defines the linear system
110: also serves as the preconditioning matrix.
111: */
112: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
114: /*
115: Set the default preconditioner for this program to be ASM
116: */
117: KSPGetPC(ksp,&pc);
118: PCSetType(pc,PCASM);
120: /* -------------------------------------------------------------------
121: Define the problem decomposition
122: ------------------------------------------------------------------- */
124: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125: Basic method, should be sufficient for the needs of many users.
126: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128: Set the overlap, using the default PETSc decomposition via
129: PCASMSetOverlap(pc,overlap);
130: Could instead use the option -pc_asm_overlap <ovl>
132: Set the total number of blocks via -pc_asm_blocks <blks>
133: Note: The ASM default is to use 1 block per processor. To
134: experiment on a single processor with various overlaps, you
135: must specify use of multiple blocks!
136: */
138: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139: More advanced method, setting user-defined subdomains
140: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: Firstly, create index sets that define the subdomains. The utility
143: routine PCASMCreateSubdomains2D() is a simple example (that currently
144: supports 1 processor only!). More generally, the user should write
145: a custom routine for a particular problem geometry.
147: Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
148: to set the subdomains for the ASM preconditioner.
149: */
151: if (!user_subdomains) { /* basic version */
152: PCASMSetOverlap(pc,overlap);
153: } else { /* advanced version */
154: if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!");
155: PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);
156: PCASMSetLocalSubdomains(pc,Nsub,is);
157: }
159: /* -------------------------------------------------------------------
160: Set the linear solvers for the subblocks
161: ------------------------------------------------------------------- */
163: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: Basic method, should be sufficient for the needs of most users.
165: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: By default, the ASM preconditioner uses the same solver on each
168: block of the problem. To set the same solver options on all blocks,
169: use the prefix -sub before the usual PC and KSP options, e.g.,
170: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
172: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: Advanced method, setting different solvers for various blocks.
174: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: Note that each block's KSP context is completely independent of
177: the others, and the full range of uniprocessor KSP options is
178: available for each block.
180: - Use PCASMGetSubKSP() to extract the array of KSP contexts for
181: the local blocks.
182: - See ex7.c for a simple example of setting different linear solvers
183: for the individual blocks for the block Jacobi method (which is
184: equivalent to the ASM method with zero overlap).
185: */
187: PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);
188: if (flg) {
189: KSP *subksp; /* array of KSP contexts for local subblocks */
190: PetscInt nlocal,first; /* number of local subblocks, first local subblock */
191: PC subpc; /* PC context for subblock */
192: PetscTruth isasm;
194: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
196: /*
197: Set runtime options
198: */
199: KSPSetFromOptions(ksp);
201: /*
202: Flag an error if PCTYPE is changed from the runtime options
203: */
204: PetscTypeCompare((PetscObject)pc,PCASM,&isasm);
205: if (isasm) {
206: SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
207: }
208: /*
209: Call KSPSetUp() to set the block Jacobi data structures (including
210: creation of an internal KSP context for each block).
212: Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
213: */
214: KSPSetUp(ksp);
216: /*
217: Extract the array of KSP contexts for the local blocks
218: */
219: PCASMGetSubKSP(pc,&nlocal,&first,&subksp);
221: /*
222: Loop over the local blocks, setting various KSP options
223: for each block.
224: */
225: for (i=0; i<nlocal; i++) {
226: KSPGetPC(subksp[i],&subpc);
227: PCSetType(subpc,PCILU);
228: KSPSetType(subksp[i],KSPGMRES);
229: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
230: }
231: } else {
232: /*
233: Set runtime options
234: */
235: KSPSetFromOptions(ksp);
236: }
238: /* -------------------------------------------------------------------
239: Solve the linear system
240: ------------------------------------------------------------------- */
242: KSPSolve(ksp,b,x);
244: /*
245: Free work space. All PETSc objects should be destroyed when they
246: are no longer needed.
247: */
249: if (user_subdomains) {
250: for (i=0; i<Nsub; i++) {
251: ISDestroy(is[i]);
252: }
253: PetscFree(is);
254: }
255: KSPDestroy(ksp);
256: VecDestroy(u);
257: VecDestroy(x);
258: VecDestroy(b);
259: MatDestroy(A);
260: PetscFinalize();
261: return 0;
262: }