Actual source code: asm.c
petsc-3.14.1 2020-11-03
1: /*
2: This file defines an additive Schwarz preconditioner for any Mat implementation.
4: Note that each processor may have any number of subdomains. But in order to
5: deal easily with the VecScatter(), we treat each processor as if it has the
6: same number of subdomains.
8: n - total number of true subdomains on all processors
9: n_local_true - actual number of subdomains on this processor
10: n_local = maximum over all processors of n_local_true
11: */
13: #include <../src/ksp/pc/impls/asm/asm.h>
15: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
16: {
17: PC_ASM *osm = (PC_ASM*)pc->data;
19: PetscMPIInt rank;
20: PetscInt i,bsz;
21: PetscBool iascii,isstring;
22: PetscViewer sviewer;
25: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
26: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
27: if (iascii) {
28: char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
29: if (osm->overlap >= 0) {PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);}
30: if (osm->n > 0) {PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);}
31: PetscViewerASCIIPrintf(viewer," %s, %s\n",blocks,overlaps);
32: PetscViewerASCIIPrintf(viewer," restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
33: if (osm->dm_subdomains) {PetscViewerASCIIPrintf(viewer," Additive Schwarz: using DM to define subdomains\n");}
34: if (osm->loctype != PC_COMPOSITE_ADDITIVE) {PetscViewerASCIIPrintf(viewer," Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);}
35: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
36: if (osm->same_local_solves) {
37: if (osm->ksp) {
38: PetscViewerASCIIPrintf(viewer," Local solver is the same for all blocks, as in the following KSP and PC objects on rank 0:\n");
39: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
40: if (!rank) {
41: PetscViewerASCIIPushTab(viewer);
42: KSPView(osm->ksp[0],sviewer);
43: PetscViewerASCIIPopTab(viewer);
44: }
45: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
46: }
47: } else {
48: PetscViewerASCIIPushSynchronized(viewer);
49: PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
50: PetscViewerFlush(viewer);
51: PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");
52: PetscViewerASCIIPushTab(viewer);
53: PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
54: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
55: for (i=0; i<osm->n_local_true; i++) {
56: ISGetLocalSize(osm->is[i],&bsz);
57: PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
58: KSPView(osm->ksp[i],sviewer);
59: PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
60: }
61: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
62: PetscViewerASCIIPopTab(viewer);
63: PetscViewerFlush(viewer);
64: PetscViewerASCIIPopSynchronized(viewer);
65: }
66: } else if (isstring) {
67: PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
68: PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
69: if (osm->ksp) {KSPView(osm->ksp[0],sviewer);}
70: PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
71: }
72: return(0);
73: }
75: static PetscErrorCode PCASMPrintSubdomains(PC pc)
76: {
77: PC_ASM *osm = (PC_ASM*)pc->data;
78: const char *prefix;
79: char fname[PETSC_MAX_PATH_LEN+1];
80: PetscViewer viewer, sviewer;
81: char *s;
82: PetscInt i,j,nidx;
83: const PetscInt *idx;
84: PetscMPIInt rank, size;
88: MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
89: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
90: PCGetOptionsPrefix(pc,&prefix);
91: PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,sizeof(fname),NULL);
92: if (fname[0] == 0) { PetscStrcpy(fname,"stdout"); };
93: PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
94: for (i=0; i<osm->n_local; i++) {
95: if (i < osm->n_local_true) {
96: ISGetLocalSize(osm->is[i],&nidx);
97: ISGetIndices(osm->is[i],&idx);
98: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
99: #define len 16*(nidx+1)+512
100: PetscMalloc1(len,&s);
101: PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);
102: #undef len
103: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
104: for (j=0; j<nidx; j++) {
105: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
106: }
107: ISRestoreIndices(osm->is[i],&idx);
108: PetscViewerStringSPrintf(sviewer,"\n");
109: PetscViewerDestroy(&sviewer);
110: PetscViewerASCIIPushSynchronized(viewer);
111: PetscViewerASCIISynchronizedPrintf(viewer, s);
112: PetscViewerFlush(viewer);
113: PetscViewerASCIIPopSynchronized(viewer);
114: PetscFree(s);
115: if (osm->is_local) {
116: /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
117: #define len 16*(nidx+1)+512
118: PetscMalloc1(len, &s);
119: PetscViewerStringOpen(PETSC_COMM_SELF, s, len, &sviewer);
120: #undef len
121: PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
122: ISGetLocalSize(osm->is_local[i],&nidx);
123: ISGetIndices(osm->is_local[i],&idx);
124: for (j=0; j<nidx; j++) {
125: PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
126: }
127: ISRestoreIndices(osm->is_local[i],&idx);
128: PetscViewerStringSPrintf(sviewer,"\n");
129: PetscViewerDestroy(&sviewer);
130: PetscViewerASCIIPushSynchronized(viewer);
131: PetscViewerASCIISynchronizedPrintf(viewer, s);
132: PetscViewerFlush(viewer);
133: PetscViewerASCIIPopSynchronized(viewer);
134: PetscFree(s);
135: }
136: } else {
137: /* Participate in collective viewer calls. */
138: PetscViewerASCIIPushSynchronized(viewer);
139: PetscViewerFlush(viewer);
140: PetscViewerASCIIPopSynchronized(viewer);
141: /* Assume either all ranks have is_local or none do. */
142: if (osm->is_local) {
143: PetscViewerASCIIPushSynchronized(viewer);
144: PetscViewerFlush(viewer);
145: PetscViewerASCIIPopSynchronized(viewer);
146: }
147: }
148: }
149: PetscViewerFlush(viewer);
150: PetscViewerDestroy(&viewer);
151: return(0);
152: }
154: static PetscErrorCode PCSetUp_ASM(PC pc)
155: {
156: PC_ASM *osm = (PC_ASM*)pc->data;
158: PetscBool flg;
159: PetscInt i,m,m_local;
160: MatReuse scall = MAT_REUSE_MATRIX;
161: IS isl;
162: KSP ksp;
163: PC subpc;
164: const char *prefix,*pprefix;
165: Vec vec;
166: DM *domain_dm = NULL;
169: if (!pc->setupcalled) {
170: PetscInt m;
172: /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
173: if (osm->n_local_true == PETSC_DECIDE) {
174: /* no subdomains given */
175: /* try pc->dm first, if allowed */
176: if (osm->dm_subdomains && pc->dm) {
177: PetscInt num_domains, d;
178: char **domain_names;
179: IS *inner_domain_is, *outer_domain_is;
180: DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);
181: osm->overlap = -1; /* We do not want to increase the overlap of the IS.
182: A future improvement of this code might allow one to use
183: DM-defined subdomains and also increase the overlap,
184: but that is not currently supported */
185: if (num_domains) {
186: PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
187: }
188: for (d = 0; d < num_domains; ++d) {
189: if (domain_names) {PetscFree(domain_names[d]);}
190: if (inner_domain_is) {ISDestroy(&inner_domain_is[d]);}
191: if (outer_domain_is) {ISDestroy(&outer_domain_is[d]);}
192: }
193: PetscFree(domain_names);
194: PetscFree(inner_domain_is);
195: PetscFree(outer_domain_is);
196: }
197: if (osm->n_local_true == PETSC_DECIDE) {
198: /* still no subdomains; use one subdomain per processor */
199: osm->n_local_true = 1;
200: }
201: }
202: { /* determine the global and max number of subdomains */
203: struct {PetscInt max,sum;} inwork,outwork;
204: PetscMPIInt size;
206: inwork.max = osm->n_local_true;
207: inwork.sum = osm->n_local_true;
208: MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));
209: osm->n_local = outwork.max;
210: osm->n = outwork.sum;
212: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
213: if (outwork.max == 1 && outwork.sum == size) {
214: /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
215: MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);
216: }
217: }
218: if (!osm->is) { /* create the index sets */
219: PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
220: }
221: if (osm->n_local_true > 1 && !osm->is_local) {
222: PetscMalloc1(osm->n_local_true,&osm->is_local);
223: for (i=0; i<osm->n_local_true; i++) {
224: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
225: ISDuplicate(osm->is[i],&osm->is_local[i]);
226: ISCopy(osm->is[i],osm->is_local[i]);
227: } else {
228: PetscObjectReference((PetscObject)osm->is[i]);
229: osm->is_local[i] = osm->is[i];
230: }
231: }
232: }
233: PCGetOptionsPrefix(pc,&prefix);
234: flg = PETSC_FALSE;
235: PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);
236: if (flg) { PCASMPrintSubdomains(pc); }
238: if (osm->overlap > 0) {
239: /* Extend the "overlapping" regions by a number of steps */
240: MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
241: }
242: if (osm->sort_indices) {
243: for (i=0; i<osm->n_local_true; i++) {
244: ISSort(osm->is[i]);
245: if (osm->is_local) {
246: ISSort(osm->is_local[i]);
247: }
248: }
249: }
251: if (!osm->ksp) {
252: /* Create the local solvers */
253: PetscMalloc1(osm->n_local_true,&osm->ksp);
254: if (domain_dm) {
255: PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
256: }
257: for (i=0; i<osm->n_local_true; i++) {
258: KSPCreate(PETSC_COMM_SELF,&ksp);
259: KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
260: PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
261: PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
262: KSPSetType(ksp,KSPPREONLY);
263: KSPGetPC(ksp,&subpc);
264: PCGetOptionsPrefix(pc,&prefix);
265: KSPSetOptionsPrefix(ksp,prefix);
266: KSPAppendOptionsPrefix(ksp,"sub_");
267: if (domain_dm) {
268: KSPSetDM(ksp, domain_dm[i]);
269: KSPSetDMActive(ksp, PETSC_FALSE);
270: DMDestroy(&domain_dm[i]);
271: }
272: osm->ksp[i] = ksp;
273: }
274: if (domain_dm) {
275: PetscFree(domain_dm);
276: }
277: }
279: ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);
280: ISSortRemoveDups(osm->lis);
281: ISGetLocalSize(osm->lis, &m);
283: scall = MAT_INITIAL_MATRIX;
284: } else {
285: /*
286: Destroy the blocks from the previous iteration
287: */
288: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
289: MatDestroyMatrices(osm->n_local_true,&osm->pmat);
290: scall = MAT_INITIAL_MATRIX;
291: }
292: }
294: /*
295: Extract out the submatrices
296: */
297: MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
298: if (scall == MAT_INITIAL_MATRIX) {
299: PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
300: for (i=0; i<osm->n_local_true; i++) {
301: PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
302: PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
303: }
304: }
306: /* Convert the types of the submatrices (if needbe) */
307: if (osm->sub_mat_type) {
308: for (i=0; i<osm->n_local_true; i++) {
309: MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));
310: }
311: }
313: if (!pc->setupcalled) {
314: VecType vtype;
316: /* Create the local work vectors (from the local matrices) and scatter contexts */
317: MatCreateVecs(pc->pmat,&vec,NULL);
319: if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
320: if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
321: PetscMalloc1(osm->n_local_true,&osm->lprolongation);
322: }
323: PetscMalloc1(osm->n_local_true,&osm->lrestriction);
324: PetscMalloc1(osm->n_local_true,&osm->x);
325: PetscMalloc1(osm->n_local_true,&osm->y);
327: ISGetLocalSize(osm->lis,&m);
328: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
329: MatGetVecType(osm->pmat[0],&vtype);
330: VecCreate(PETSC_COMM_SELF,&osm->lx);
331: VecSetSizes(osm->lx,m,m);
332: VecSetType(osm->lx,vtype);
333: VecDuplicate(osm->lx, &osm->ly);
334: VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);
335: ISDestroy(&isl);
337: for (i=0; i<osm->n_local_true; ++i) {
338: ISLocalToGlobalMapping ltog;
339: IS isll;
340: const PetscInt *idx_is;
341: PetscInt *idx_lis,nout;
343: ISGetLocalSize(osm->is[i],&m);
344: MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);
345: VecDuplicate(osm->x[i],&osm->y[i]);
347: /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
348: ISLocalToGlobalMappingCreateIS(osm->lis,<og);
349: ISGetLocalSize(osm->is[i],&m);
350: ISGetIndices(osm->is[i], &idx_is);
351: PetscMalloc1(m,&idx_lis);
352: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);
353: if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
354: ISRestoreIndices(osm->is[i], &idx_is);
355: ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);
356: ISLocalToGlobalMappingDestroy(<og);
357: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
358: VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);
359: ISDestroy(&isll);
360: ISDestroy(&isl);
361: if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overlapping is_local[i] entries */
362: ISLocalToGlobalMapping ltog;
363: IS isll,isll_local;
364: const PetscInt *idx_local;
365: PetscInt *idx1, *idx2, nout;
367: ISGetLocalSize(osm->is_local[i],&m_local);
368: ISGetIndices(osm->is_local[i], &idx_local);
370: ISLocalToGlobalMappingCreateIS(osm->is[i],<og);
371: PetscMalloc1(m_local,&idx1);
372: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);
373: ISLocalToGlobalMappingDestroy(<og);
374: if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
375: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);
377: ISLocalToGlobalMappingCreateIS(osm->lis,<og);
378: PetscMalloc1(m_local,&idx2);
379: ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);
380: ISLocalToGlobalMappingDestroy(<og);
381: if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
382: ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);
384: ISRestoreIndices(osm->is_local[i], &idx_local);
385: VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);
387: ISDestroy(&isll);
388: ISDestroy(&isll_local);
389: }
390: }
391: VecDestroy(&vec);
392: }
394: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
395: IS *cis;
396: PetscInt c;
398: PetscMalloc1(osm->n_local_true, &cis);
399: for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
400: MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);
401: PetscFree(cis);
402: }
404: /* Return control to the user so that the submatrices can be modified (e.g., to apply
405: different boundary conditions for the submatrices than for the global problem) */
406: PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);
408: /*
409: Loop over subdomains putting them into local ksp
410: */
411: for (i=0; i<osm->n_local_true; i++) {
412: KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
413: if (!pc->setupcalled) {
414: KSPSetFromOptions(osm->ksp[i]);
415: }
416: }
417: return(0);
418: }
420: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
421: {
422: PC_ASM *osm = (PC_ASM*)pc->data;
423: PetscErrorCode ierr;
424: PetscInt i;
425: KSPConvergedReason reason;
428: for (i=0; i<osm->n_local_true; i++) {
429: KSPSetUp(osm->ksp[i]);
430: KSPGetConvergedReason(osm->ksp[i],&reason);
431: if (reason == KSP_DIVERGED_PC_FAILED) {
432: pc->failedreason = PC_SUBPC_ERROR;
433: }
434: }
435: return(0);
436: }
438: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
439: {
440: PC_ASM *osm = (PC_ASM*)pc->data;
442: PetscInt i,n_local_true = osm->n_local_true;
443: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
446: /*
447: support for limiting the restriction or interpolation to only local
448: subdomain values (leaving the other values 0).
449: */
450: if (!(osm->type & PC_ASM_RESTRICT)) {
451: forward = SCATTER_FORWARD_LOCAL;
452: /* have to zero the work RHS since scatter may leave some slots empty */
453: VecSet(osm->lx, 0.0);
454: }
455: if (!(osm->type & PC_ASM_INTERPOLATE)) {
456: reverse = SCATTER_REVERSE_LOCAL;
457: }
459: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
460: /* zero the global and the local solutions */
461: VecSet(y, 0.0);
462: VecSet(osm->ly, 0.0);
464: /* copy the global RHS to local RHS including the ghost nodes */
465: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
466: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
468: /* restrict local RHS to the overlapping 0-block RHS */
469: VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
470: VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
472: /* do the local solves */
473: for (i = 0; i < n_local_true; ++i) {
475: /* solve the overlapping i-block */
476: PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i],0);
477: KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);
478: KSPCheckSolve(osm->ksp[i], pc, osm->y[i]);
479: PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[i], osm->x[i], osm->y[i], 0);
481: if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution (only for restrictive additive) */
482: VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
483: VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
484: } else { /* interpolate the overlapping i-block solution to the local solution */
485: VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
486: VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
487: }
489: if (i < n_local_true-1) {
490: /* restrict local RHS to the overlapping (i+1)-block RHS */
491: VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
492: VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
494: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
495: /* update the overlapping (i+1)-block RHS using the current local solution */
496: MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);
497: VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]);
498: }
499: }
500: }
501: /* add the local solution to the global solution including the ghost nodes */
502: VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
503: VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
504: } else SETERRQ1(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
505: return(0);
506: }
508: static PetscErrorCode PCMatApply_ASM(PC pc,Mat X,Mat Y)
509: {
510: PC_ASM *osm = (PC_ASM*)pc->data;
511: Mat Z,W;
512: Vec x;
513: PetscInt i,m,N;
514: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
518: if (osm->n_local_true > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Not yet implemented");
519: /*
520: support for limiting the restriction or interpolation to only local
521: subdomain values (leaving the other values 0).
522: */
523: if (!(osm->type & PC_ASM_RESTRICT)) {
524: forward = SCATTER_FORWARD_LOCAL;
525: /* have to zero the work RHS since scatter may leave some slots empty */
526: VecSet(osm->lx, 0.0);
527: }
528: if (!(osm->type & PC_ASM_INTERPOLATE)) {
529: reverse = SCATTER_REVERSE_LOCAL;
530: }
531: VecGetLocalSize(osm->x[0], &m);
532: MatGetSize(X, NULL, &N);
533: MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &Z);
534: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE) {
535: /* zero the global and the local solutions */
536: MatZeroEntries(Y);
537: VecSet(osm->ly, 0.0);
539: for (i = 0; i < N; ++i) {
540: MatDenseGetColumnVecRead(X, i, &x);
541: /* copy the global RHS to local RHS including the ghost nodes */
542: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
543: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
544: MatDenseRestoreColumnVecRead(X, i, &x);
546: MatDenseGetColumnVecWrite(Z, i, &x);
547: /* restrict local RHS to the overlapping 0-block RHS */
548: VecScatterBegin(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward);
549: VecScatterEnd(osm->lrestriction[0], osm->lx, x, INSERT_VALUES, forward);
550: MatDenseRestoreColumnVecWrite(Z, i, &x);
551: }
552: MatCreateSeqDense(PETSC_COMM_SELF, m, N, NULL, &W);
553: /* solve the overlapping 0-block */
554: PetscLogEventBegin(PC_ApplyOnBlocks, osm->ksp[0], Z, W, 0);
555: KSPMatSolve(osm->ksp[0], Z, W);
556: KSPCheckSolve(osm->ksp[0], pc, NULL);
557: PetscLogEventEnd(PC_ApplyOnBlocks, osm->ksp[0], Z, W,0);
558: MatDestroy(&Z);
560: for (i = 0; i < N; ++i) {
561: VecSet(osm->ly, 0.0);
562: MatDenseGetColumnVecRead(W, i, &x);
563: if (osm->lprolongation) { /* interpolate the non-overlapping 0-block solution to the local solution (only for restrictive additive) */
564: VecScatterBegin(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward);
565: VecScatterEnd(osm->lprolongation[0], x, osm->ly, ADD_VALUES, forward);
566: } else { /* interpolate the overlapping 0-block solution to the local solution */
567: VecScatterBegin(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse);
568: VecScatterEnd(osm->lrestriction[0], x, osm->ly, ADD_VALUES, reverse);
569: }
570: MatDenseRestoreColumnVecRead(W, i, &x);
572: MatDenseGetColumnVecWrite(Y, i, &x);
573: /* add the local solution to the global solution including the ghost nodes */
574: VecScatterBegin(osm->restriction, osm->ly, x, ADD_VALUES, reverse);
575: VecScatterEnd(osm->restriction, osm->ly, x, ADD_VALUES, reverse);
576: MatDenseRestoreColumnVecWrite(Y, i, &x);
577: }
578: MatDestroy(&W);
579: } else SETERRQ1(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
580: return(0);
581: }
583: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
584: {
585: PC_ASM *osm = (PC_ASM*)pc->data;
587: PetscInt i,n_local_true = osm->n_local_true;
588: ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
591: /*
592: Support for limiting the restriction or interpolation to only local
593: subdomain values (leaving the other values 0).
595: Note: these are reversed from the PCApply_ASM() because we are applying the
596: transpose of the three terms
597: */
599: if (!(osm->type & PC_ASM_INTERPOLATE)) {
600: forward = SCATTER_FORWARD_LOCAL;
601: /* have to zero the work RHS since scatter may leave some slots empty */
602: VecSet(osm->lx, 0.0);
603: }
604: if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
606: /* zero the global and the local solutions */
607: VecSet(y, 0.0);
608: VecSet(osm->ly, 0.0);
610: /* Copy the global RHS to local RHS including the ghost nodes */
611: VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
612: VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);
614: /* Restrict local RHS to the overlapping 0-block RHS */
615: VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
616: VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);
618: /* do the local solves */
619: for (i = 0; i < n_local_true; ++i) {
621: /* solve the overlapping i-block */
622: PetscLogEventBegin(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
623: KSPSolveTranspose(osm->ksp[i], osm->x[i], osm->y[i]);
624: KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);
625: PetscLogEventEnd(PC_ApplyOnBlocks,osm->ksp[i],osm->x[i],osm->y[i],0);
627: if (osm->lprolongation) { /* interpolate the non-overlapping i-block solution to the local solution */
628: VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
629: VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);
630: } else { /* interpolate the overlapping i-block solution to the local solution */
631: VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
632: VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);
633: }
635: if (i < n_local_true-1) {
636: /* Restrict local RHS to the overlapping (i+1)-block RHS */
637: VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
638: VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);
639: }
640: }
641: /* Add the local solution to the global solution including the ghost nodes */
642: VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
643: VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);
645: return(0);
647: }
649: static PetscErrorCode PCReset_ASM(PC pc)
650: {
651: PC_ASM *osm = (PC_ASM*)pc->data;
653: PetscInt i;
656: if (osm->ksp) {
657: for (i=0; i<osm->n_local_true; i++) {
658: KSPReset(osm->ksp[i]);
659: }
660: }
661: if (osm->pmat) {
662: if (osm->n_local_true > 0) {
663: MatDestroySubMatrices(osm->n_local_true,&osm->pmat);
664: }
665: }
666: if (osm->lrestriction) {
667: VecScatterDestroy(&osm->restriction);
668: for (i=0; i<osm->n_local_true; i++) {
669: VecScatterDestroy(&osm->lrestriction[i]);
670: if (osm->lprolongation) {VecScatterDestroy(&osm->lprolongation[i]);}
671: VecDestroy(&osm->x[i]);
672: VecDestroy(&osm->y[i]);
673: }
674: PetscFree(osm->lrestriction);
675: if (osm->lprolongation) {PetscFree(osm->lprolongation);}
676: PetscFree(osm->x);
677: PetscFree(osm->y);
679: }
680: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
681: ISDestroy(&osm->lis);
682: VecDestroy(&osm->lx);
683: VecDestroy(&osm->ly);
684: if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
685: MatDestroyMatrices(osm->n_local_true, &osm->lmats);
686: }
688: PetscFree(osm->sub_mat_type);
690: osm->is = NULL;
691: osm->is_local = NULL;
692: return(0);
693: }
695: static PetscErrorCode PCDestroy_ASM(PC pc)
696: {
697: PC_ASM *osm = (PC_ASM*)pc->data;
699: PetscInt i;
702: PCReset_ASM(pc);
703: if (osm->ksp) {
704: for (i=0; i<osm->n_local_true; i++) {
705: KSPDestroy(&osm->ksp[i]);
706: }
707: PetscFree(osm->ksp);
708: }
709: PetscFree(pc->data);
710: return(0);
711: }
713: static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
714: {
715: PC_ASM *osm = (PC_ASM*)pc->data;
717: PetscInt blocks,ovl;
718: PetscBool flg;
719: PCASMType asmtype;
720: PCCompositeType loctype;
721: char sub_mat_type[256];
724: PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");
725: PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
726: PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
727: if (flg) {
728: PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
729: osm->dm_subdomains = PETSC_FALSE;
730: }
731: PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);
732: if (flg) {
733: PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);
734: osm->dm_subdomains = PETSC_FALSE;
735: }
736: PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
737: if (flg) {
738: PCASMSetOverlap(pc,ovl);
739: osm->dm_subdomains = PETSC_FALSE;
740: }
741: flg = PETSC_FALSE;
742: PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
743: if (flg) {PCASMSetType(pc,asmtype); }
744: flg = PETSC_FALSE;
745: PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);
746: if (flg) {PCASMSetLocalType(pc,loctype); }
747: PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);
748: if (flg) {
749: PCASMSetSubMatType(pc,sub_mat_type);
750: }
751: PetscOptionsTail();
752: return(0);
753: }
755: /*------------------------------------------------------------------------------------*/
757: static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
758: {
759: PC_ASM *osm = (PC_ASM*)pc->data;
761: PetscInt i;
764: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
765: if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
767: if (!pc->setupcalled) {
768: if (is) {
769: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is[i]);}
770: }
771: if (is_local) {
772: for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is_local[i]);}
773: }
774: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
776: osm->n_local_true = n;
777: osm->is = NULL;
778: osm->is_local = NULL;
779: if (is) {
780: PetscMalloc1(n,&osm->is);
781: for (i=0; i<n; i++) osm->is[i] = is[i];
782: /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
783: osm->overlap = -1;
784: }
785: if (is_local) {
786: PetscMalloc1(n,&osm->is_local);
787: for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
788: if (!is) {
789: PetscMalloc1(osm->n_local_true,&osm->is);
790: for (i=0; i<osm->n_local_true; i++) {
791: if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
792: ISDuplicate(osm->is_local[i],&osm->is[i]);
793: ISCopy(osm->is_local[i],osm->is[i]);
794: } else {
795: PetscObjectReference((PetscObject)osm->is_local[i]);
796: osm->is[i] = osm->is_local[i];
797: }
798: }
799: }
800: }
801: }
802: return(0);
803: }
805: static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
806: {
807: PC_ASM *osm = (PC_ASM*)pc->data;
809: PetscMPIInt rank,size;
810: PetscInt n;
813: if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
814: if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
816: /*
817: Split the subdomains equally among all processors
818: */
819: MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
820: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
821: n = N/size + ((N % size) > rank);
822: if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
823: if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
824: if (!pc->setupcalled) {
825: PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
827: osm->n_local_true = n;
828: osm->is = NULL;
829: osm->is_local = NULL;
830: }
831: return(0);
832: }
834: static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
835: {
836: PC_ASM *osm = (PC_ASM*)pc->data;
839: if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
840: if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
841: if (!pc->setupcalled) osm->overlap = ovl;
842: return(0);
843: }
845: static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type)
846: {
847: PC_ASM *osm = (PC_ASM*)pc->data;
850: osm->type = type;
851: osm->type_set = PETSC_TRUE;
852: return(0);
853: }
855: static PetscErrorCode PCASMGetType_ASM(PC pc,PCASMType *type)
856: {
857: PC_ASM *osm = (PC_ASM*)pc->data;
860: *type = osm->type;
861: return(0);
862: }
864: static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
865: {
866: PC_ASM *osm = (PC_ASM *) pc->data;
869: if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
870: osm->loctype = type;
871: return(0);
872: }
874: static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
875: {
876: PC_ASM *osm = (PC_ASM *) pc->data;
879: *type = osm->loctype;
880: return(0);
881: }
883: static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort)
884: {
885: PC_ASM *osm = (PC_ASM*)pc->data;
888: osm->sort_indices = doSort;
889: return(0);
890: }
892: static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
893: {
894: PC_ASM *osm = (PC_ASM*)pc->data;
898: if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here");
900: if (n_local) *n_local = osm->n_local_true;
901: if (first_local) {
902: MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
903: *first_local -= osm->n_local_true;
904: }
905: if (ksp) {
906: /* Assume that local solves are now different; not necessarily
907: true though! This flag is used only for PCView_ASM() */
908: *ksp = osm->ksp;
909: osm->same_local_solves = PETSC_FALSE;
910: }
911: return(0);
912: }
914: static PetscErrorCode PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
915: {
916: PC_ASM *osm = (PC_ASM*)pc->data;
921: *sub_mat_type = osm->sub_mat_type;
922: return(0);
923: }
925: static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
926: {
927: PetscErrorCode ierr;
928: PC_ASM *osm = (PC_ASM*)pc->data;
932: PetscFree(osm->sub_mat_type);
933: PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);
934: return(0);
935: }
937: /*@C
938: PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
940: Collective on pc
942: Input Parameters:
943: + pc - the preconditioner context
944: . n - the number of subdomains for this processor (default value = 1)
945: . is - the index set that defines the subdomains for this processor
946: (or NULL for PETSc to determine subdomains)
947: - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
948: (or NULL to not provide these)
950: Options Database Key:
951: To set the total number of subdomain blocks rather than specify the
952: index sets, use the option
953: . -pc_asm_local_blocks <blks> - Sets local blocks
955: Notes:
956: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
958: By default the ASM preconditioner uses 1 block per processor.
960: Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
962: If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
963: back to form the global solution (this is the standard restricted additive Schwarz method)
965: If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
966: no code to handle that case.
968: Level: advanced
970: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
971: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
972: @*/
973: PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
974: {
979: PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
980: return(0);
981: }
983: /*@C
984: PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
985: additive Schwarz preconditioner. Either all or no processors in the
986: PC communicator must call this routine, with the same index sets.
988: Collective on pc
990: Input Parameters:
991: + pc - the preconditioner context
992: . N - the number of subdomains for all processors
993: . is - the index sets that define the subdomains for all processors
994: (or NULL to ask PETSc to determine the subdomains)
995: - is_local - the index sets that define the local part of the subdomains for this processor
996: (or NULL to not provide this information)
998: Options Database Key:
999: To set the total number of subdomain blocks rather than specify the
1000: index sets, use the option
1001: . -pc_asm_blocks <blks> - Sets total blocks
1003: Notes:
1004: Currently you cannot use this to set the actual subdomains with the argument is or is_local.
1006: By default the ASM preconditioner uses 1 block per processor.
1008: These index sets cannot be destroyed until after completion of the
1009: linear solves for which the ASM preconditioner is being used.
1011: Use PCASMSetLocalSubdomains() to set local subdomains.
1013: The IS numbering is in the parallel, global numbering of the vector for both is and is_local
1015: Level: advanced
1017: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1018: PCASMCreateSubdomains2D()
1019: @*/
1020: PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
1021: {
1026: PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
1027: return(0);
1028: }
1030: /*@
1031: PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
1032: additive Schwarz preconditioner. Either all or no processors in the
1033: PC communicator must call this routine.
1035: Logically Collective on pc
1037: Input Parameters:
1038: + pc - the preconditioner context
1039: - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1041: Options Database Key:
1042: . -pc_asm_overlap <ovl> - Sets overlap
1044: Notes:
1045: By default the ASM preconditioner uses 1 block per processor. To use
1046: multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1047: PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1049: The overlap defaults to 1, so if one desires that no additional
1050: overlap be computed beyond what may have been set with a call to
1051: PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1052: must be set to be 0. In particular, if one does not explicitly set
1053: the subdomains an application code, then all overlap would be computed
1054: internally by PETSc, and using an overlap of 0 would result in an ASM
1055: variant that is equivalent to the block Jacobi preconditioner.
1057: The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1058: use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1060: Note that one can define initial index sets with any overlap via
1061: PCASMSetLocalSubdomains(); the routine
1062: PCASMSetOverlap() merely allows PETSc to extend that overlap further
1063: if desired.
1065: Level: intermediate
1067: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1068: PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1069: @*/
1070: PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl)
1071: {
1077: PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1078: return(0);
1079: }
1081: /*@
1082: PCASMSetType - Sets the type of restriction and interpolation used
1083: for local problems in the additive Schwarz method.
1085: Logically Collective on pc
1087: Input Parameters:
1088: + pc - the preconditioner context
1089: - type - variant of ASM, one of
1090: .vb
1091: PC_ASM_BASIC - full interpolation and restriction
1092: PC_ASM_RESTRICT - full restriction, local processor interpolation
1093: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1094: PC_ASM_NONE - local processor restriction and interpolation
1095: .ve
1097: Options Database Key:
1098: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1100: Notes:
1101: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1102: to limit the local processor interpolation
1104: Level: intermediate
1106: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1107: PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1108: @*/
1109: PetscErrorCode PCASMSetType(PC pc,PCASMType type)
1110: {
1116: PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1117: return(0);
1118: }
1120: /*@
1121: PCASMGetType - Gets the type of restriction and interpolation used
1122: for local problems in the additive Schwarz method.
1124: Logically Collective on pc
1126: Input Parameter:
1127: . pc - the preconditioner context
1129: Output Parameter:
1130: . type - variant of ASM, one of
1132: .vb
1133: PC_ASM_BASIC - full interpolation and restriction
1134: PC_ASM_RESTRICT - full restriction, local processor interpolation
1135: PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1136: PC_ASM_NONE - local processor restriction and interpolation
1137: .ve
1139: Options Database Key:
1140: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1142: Level: intermediate
1144: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1145: PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1146: @*/
1147: PetscErrorCode PCASMGetType(PC pc,PCASMType *type)
1148: {
1153: PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1154: return(0);
1155: }
1157: /*@
1158: PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1160: Logically Collective on pc
1162: Input Parameters:
1163: + pc - the preconditioner context
1164: - type - type of composition, one of
1165: .vb
1166: PC_COMPOSITE_ADDITIVE - local additive combination
1167: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1168: .ve
1170: Options Database Key:
1171: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1173: Level: intermediate
1175: .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1176: @*/
1177: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1178: {
1184: PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1185: return(0);
1186: }
1188: /*@
1189: PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1191: Logically Collective on pc
1193: Input Parameter:
1194: . pc - the preconditioner context
1196: Output Parameter:
1197: . type - type of composition, one of
1198: .vb
1199: PC_COMPOSITE_ADDITIVE - local additive combination
1200: PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1201: .ve
1203: Options Database Key:
1204: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1206: Level: intermediate
1208: .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1209: @*/
1210: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1211: {
1217: PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1218: return(0);
1219: }
1221: /*@
1222: PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1224: Logically Collective on pc
1226: Input Parameters:
1227: + pc - the preconditioner context
1228: - doSort - sort the subdomain indices
1230: Level: intermediate
1232: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1233: PCASMCreateSubdomains2D()
1234: @*/
1235: PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort)
1236: {
1242: PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1243: return(0);
1244: }
1246: /*@C
1247: PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1248: this processor.
1250: Collective on pc iff first_local is requested
1252: Input Parameter:
1253: . pc - the preconditioner context
1255: Output Parameters:
1256: + n_local - the number of blocks on this processor or NULL
1257: . first_local - the global number of the first block on this processor or NULL,
1258: all processors must request or all must pass NULL
1259: - ksp - the array of KSP contexts
1261: Note:
1262: After PCASMGetSubKSP() the array of KSPes is not to be freed.
1264: You must call KSPSetUp() before calling PCASMGetSubKSP().
1266: Fortran note:
1267: The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1269: Level: advanced
1271: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1272: PCASMCreateSubdomains2D(),
1273: @*/
1274: PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1275: {
1280: PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1281: return(0);
1282: }
1284: /* -------------------------------------------------------------------------------------*/
1285: /*MC
1286: PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1287: its own KSP object.
1289: Options Database Keys:
1290: + -pc_asm_blocks <blks> - Sets total blocks
1291: . -pc_asm_overlap <ovl> - Sets overlap
1292: . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1293: - -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1295: IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1296: will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1297: -pc_asm_type basic to use the standard ASM.
1299: Notes:
1300: Each processor can have one or more blocks, but a block cannot be shared by more
1301: than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1303: To set options on the solvers for each block append -sub_ to all the KSP, and PC
1304: options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1306: To set the options on the solvers separate for each block call PCASMGetSubKSP()
1307: and set the options directly on the resulting KSP object (you can access its PC
1308: with KSPGetPC())
1310: Level: beginner
1312: References:
1313: + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1314: Courant Institute, New York University Technical report
1315: - 2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1316: Cambridge University Press.
1318: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC,
1319: PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1320: PCASMSetTotalSubdomains(), PCSetModifySubMatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1322: M*/
1324: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1325: {
1327: PC_ASM *osm;
1330: PetscNewLog(pc,&osm);
1332: osm->n = PETSC_DECIDE;
1333: osm->n_local = 0;
1334: osm->n_local_true = PETSC_DECIDE;
1335: osm->overlap = 1;
1336: osm->ksp = NULL;
1337: osm->restriction = NULL;
1338: osm->lprolongation = NULL;
1339: osm->lrestriction = NULL;
1340: osm->x = NULL;
1341: osm->y = NULL;
1342: osm->is = NULL;
1343: osm->is_local = NULL;
1344: osm->mat = NULL;
1345: osm->pmat = NULL;
1346: osm->type = PC_ASM_RESTRICT;
1347: osm->loctype = PC_COMPOSITE_ADDITIVE;
1348: osm->same_local_solves = PETSC_TRUE;
1349: osm->sort_indices = PETSC_TRUE;
1350: osm->dm_subdomains = PETSC_FALSE;
1351: osm->sub_mat_type = NULL;
1353: pc->data = (void*)osm;
1354: pc->ops->apply = PCApply_ASM;
1355: pc->ops->matapply = PCMatApply_ASM;
1356: pc->ops->applytranspose = PCApplyTranspose_ASM;
1357: pc->ops->setup = PCSetUp_ASM;
1358: pc->ops->reset = PCReset_ASM;
1359: pc->ops->destroy = PCDestroy_ASM;
1360: pc->ops->setfromoptions = PCSetFromOptions_ASM;
1361: pc->ops->setuponblocks = PCSetUpOnBlocks_ASM;
1362: pc->ops->view = PCView_ASM;
1363: pc->ops->applyrichardson = NULL;
1365: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1366: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1367: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1368: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1369: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);
1370: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);
1371: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);
1372: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1373: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1374: PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);
1375: PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);
1376: return(0);
1377: }
1379: /*@C
1380: PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1381: preconditioner for a any problem on a general grid.
1383: Collective
1385: Input Parameters:
1386: + A - The global matrix operator
1387: - n - the number of local blocks
1389: Output Parameters:
1390: . outis - the array of index sets defining the subdomains
1392: Level: advanced
1394: Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1395: from these if you use PCASMSetLocalSubdomains()
1397: In the Fortran version you must provide the array outis[] already allocated of length n.
1399: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1400: @*/
1401: PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1402: {
1403: MatPartitioning mpart;
1404: const char *prefix;
1405: PetscInt i,j,rstart,rend,bs;
1406: PetscBool hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1407: Mat Ad = NULL, adj;
1408: IS ispart,isnumb,*is;
1409: PetscErrorCode ierr;
1414: if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1416: /* Get prefix, row distribution, and block size */
1417: MatGetOptionsPrefix(A,&prefix);
1418: MatGetOwnershipRange(A,&rstart,&rend);
1419: MatGetBlockSize(A,&bs);
1420: if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1422: /* Get diagonal block from matrix if possible */
1423: MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);
1424: if (hasop) {
1425: MatGetDiagonalBlock(A,&Ad);
1426: }
1427: if (Ad) {
1428: PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1429: if (!isbaij) {PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1430: }
1431: if (Ad && n > 1) {
1432: PetscBool match,done;
1433: /* Try to setup a good matrix partitioning if available */
1434: MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1435: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1436: MatPartitioningSetFromOptions(mpart);
1437: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1438: if (!match) {
1439: PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1440: }
1441: if (!match) { /* assume a "good" partitioner is available */
1442: PetscInt na;
1443: const PetscInt *ia,*ja;
1444: MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1445: if (done) {
1446: /* Build adjacency matrix by hand. Unfortunately a call to
1447: MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1448: remove the block-aij structure and we cannot expect
1449: MatPartitioning to split vertices as we need */
1450: PetscInt i,j,len,nnz,cnt,*iia=NULL,*jja=NULL;
1451: const PetscInt *row;
1452: nnz = 0;
1453: for (i=0; i<na; i++) { /* count number of nonzeros */
1454: len = ia[i+1] - ia[i];
1455: row = ja + ia[i];
1456: for (j=0; j<len; j++) {
1457: if (row[j] == i) { /* don't count diagonal */
1458: len--; break;
1459: }
1460: }
1461: nnz += len;
1462: }
1463: PetscMalloc1(na+1,&iia);
1464: PetscMalloc1(nnz,&jja);
1465: nnz = 0;
1466: iia[0] = 0;
1467: for (i=0; i<na; i++) { /* fill adjacency */
1468: cnt = 0;
1469: len = ia[i+1] - ia[i];
1470: row = ja + ia[i];
1471: for (j=0; j<len; j++) {
1472: if (row[j] != i) { /* if not diagonal */
1473: jja[nnz+cnt++] = row[j];
1474: }
1475: }
1476: nnz += cnt;
1477: iia[i+1] = nnz;
1478: }
1479: /* Partitioning of the adjacency matrix */
1480: MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1481: MatPartitioningSetAdjacency(mpart,adj);
1482: MatPartitioningSetNParts(mpart,n);
1483: MatPartitioningApply(mpart,&ispart);
1484: ISPartitioningToNumbering(ispart,&isnumb);
1485: MatDestroy(&adj);
1486: foundpart = PETSC_TRUE;
1487: }
1488: MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1489: }
1490: MatPartitioningDestroy(&mpart);
1491: }
1493: PetscMalloc1(n,&is);
1494: *outis = is;
1496: if (!foundpart) {
1498: /* Partitioning by contiguous chunks of rows */
1500: PetscInt mbs = (rend-rstart)/bs;
1501: PetscInt start = rstart;
1502: for (i=0; i<n; i++) {
1503: PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1504: ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1505: start += count;
1506: }
1508: } else {
1510: /* Partitioning by adjacency of diagonal block */
1512: const PetscInt *numbering;
1513: PetscInt *count,nidx,*indices,*newidx,start=0;
1514: /* Get node count in each partition */
1515: PetscMalloc1(n,&count);
1516: ISPartitioningCount(ispart,n,count);
1517: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1518: for (i=0; i<n; i++) count[i] *= bs;
1519: }
1520: /* Build indices from node numbering */
1521: ISGetLocalSize(isnumb,&nidx);
1522: PetscMalloc1(nidx,&indices);
1523: for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1524: ISGetIndices(isnumb,&numbering);
1525: PetscSortIntWithPermutation(nidx,numbering,indices);
1526: ISRestoreIndices(isnumb,&numbering);
1527: if (isbaij && bs > 1) { /* adjust for the block-aij case */
1528: PetscMalloc1(nidx*bs,&newidx);
1529: for (i=0; i<nidx; i++) {
1530: for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1531: }
1532: PetscFree(indices);
1533: nidx *= bs;
1534: indices = newidx;
1535: }
1536: /* Shift to get global indices */
1537: for (i=0; i<nidx; i++) indices[i] += rstart;
1539: /* Build the index sets for each block */
1540: for (i=0; i<n; i++) {
1541: ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1542: ISSort(is[i]);
1543: start += count[i];
1544: }
1546: PetscFree(count);
1547: PetscFree(indices);
1548: ISDestroy(&isnumb);
1549: ISDestroy(&ispart);
1551: }
1552: return(0);
1553: }
1555: /*@C
1556: PCASMDestroySubdomains - Destroys the index sets created with
1557: PCASMCreateSubdomains(). Should be called after setting subdomains
1558: with PCASMSetLocalSubdomains().
1560: Collective
1562: Input Parameters:
1563: + n - the number of index sets
1564: . is - the array of index sets
1565: - is_local - the array of local index sets, can be NULL
1567: Level: advanced
1569: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1570: @*/
1571: PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1572: {
1573: PetscInt i;
1577: if (n <= 0) return(0);
1578: if (is) {
1580: for (i=0; i<n; i++) { ISDestroy(&is[i]); }
1581: PetscFree(is);
1582: }
1583: if (is_local) {
1585: for (i=0; i<n; i++) { ISDestroy(&is_local[i]); }
1586: PetscFree(is_local);
1587: }
1588: return(0);
1589: }
1591: /*@
1592: PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1593: preconditioner for a two-dimensional problem on a regular grid.
1595: Not Collective
1597: Input Parameters:
1598: + m, n - the number of mesh points in the x and y directions
1599: . M, N - the number of subdomains in the x and y directions
1600: . dof - degrees of freedom per node
1601: - overlap - overlap in mesh lines
1603: Output Parameters:
1604: + Nsub - the number of subdomains created
1605: . is - array of index sets defining overlapping (if overlap > 0) subdomains
1606: - is_local - array of index sets defining non-overlapping subdomains
1608: Note:
1609: Presently PCAMSCreateSubdomains2d() is valid only for sequential
1610: preconditioners. More general related routines are
1611: PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1613: Level: advanced
1615: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1616: PCASMSetOverlap()
1617: @*/
1618: PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1619: {
1620: PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1622: PetscInt nidx,*idx,loc,ii,jj,count;
1625: if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1627: *Nsub = N*M;
1628: PetscMalloc1(*Nsub,is);
1629: PetscMalloc1(*Nsub,is_local);
1630: ystart = 0;
1631: loc_outer = 0;
1632: for (i=0; i<N; i++) {
1633: height = n/N + ((n % N) > i); /* height of subdomain */
1634: if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1635: yleft = ystart - overlap; if (yleft < 0) yleft = 0;
1636: yright = ystart + height + overlap; if (yright > n) yright = n;
1637: xstart = 0;
1638: for (j=0; j<M; j++) {
1639: width = m/M + ((m % M) > j); /* width of subdomain */
1640: if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1641: xleft = xstart - overlap; if (xleft < 0) xleft = 0;
1642: xright = xstart + width + overlap; if (xright > m) xright = m;
1643: nidx = (xright - xleft)*(yright - yleft);
1644: PetscMalloc1(nidx,&idx);
1645: loc = 0;
1646: for (ii=yleft; ii<yright; ii++) {
1647: count = m*ii + xleft;
1648: for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1649: }
1650: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1651: if (overlap == 0) {
1652: PetscObjectReference((PetscObject)(*is)[loc_outer]);
1654: (*is_local)[loc_outer] = (*is)[loc_outer];
1655: } else {
1656: for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1657: for (jj=xstart; jj<xstart+width; jj++) {
1658: idx[loc++] = m*ii + jj;
1659: }
1660: }
1661: ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1662: }
1663: PetscFree(idx);
1664: xstart += width;
1665: loc_outer++;
1666: }
1667: ystart += height;
1668: }
1669: for (i=0; i<*Nsub; i++) { ISSort((*is)[i]); }
1670: return(0);
1671: }
1673: /*@C
1674: PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1675: only) for the additive Schwarz preconditioner.
1677: Not Collective
1679: Input Parameter:
1680: . pc - the preconditioner context
1682: Output Parameters:
1683: + n - the number of subdomains for this processor (default value = 1)
1684: . is - the index sets that define the subdomains for this processor
1685: - is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1688: Notes:
1689: The IS numbering is in the parallel, global numbering of the vector.
1691: Level: advanced
1693: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1694: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1695: @*/
1696: PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1697: {
1698: PC_ASM *osm = (PC_ASM*)pc->data;
1700: PetscBool match;
1706: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1707: if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1708: if (n) *n = osm->n_local_true;
1709: if (is) *is = osm->is;
1710: if (is_local) *is_local = osm->is_local;
1711: return(0);
1712: }
1714: /*@C
1715: PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1716: only) for the additive Schwarz preconditioner.
1718: Not Collective
1720: Input Parameter:
1721: . pc - the preconditioner context
1723: Output Parameters:
1724: + n - the number of matrices for this processor (default value = 1)
1725: - mat - the matrices
1727: Level: advanced
1729: Notes:
1730: Call after PCSetUp() (or KSPSetUp()) but before PCApply() and before PCSetUpOnBlocks())
1732: Usually one would use PCSetModifySubMatrices() to change the submatrices in building the preconditioner.
1734: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1735: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubMatrices()
1736: @*/
1737: PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1738: {
1739: PC_ASM *osm;
1741: PetscBool match;
1747: if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp().");
1748: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1749: if (!match) {
1750: if (n) *n = 0;
1751: if (mat) *mat = NULL;
1752: } else {
1753: osm = (PC_ASM*)pc->data;
1754: if (n) *n = osm->n_local_true;
1755: if (mat) *mat = osm->pmat;
1756: }
1757: return(0);
1758: }
1760: /*@
1761: PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1763: Logically Collective
1765: Input Parameter:
1766: + pc - the preconditioner
1767: - flg - boolean indicating whether to use subdomains defined by the DM
1769: Options Database Key:
1770: . -pc_asm_dm_subdomains
1772: Level: intermediate
1774: Notes:
1775: PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1776: so setting either of the first two effectively turns the latter off.
1778: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1779: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1780: @*/
1781: PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg)
1782: {
1783: PC_ASM *osm = (PC_ASM*)pc->data;
1785: PetscBool match;
1790: if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1791: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1792: if (match) {
1793: osm->dm_subdomains = flg;
1794: }
1795: return(0);
1796: }
1798: /*@
1799: PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1800: Not Collective
1802: Input Parameter:
1803: . pc - the preconditioner
1805: Output Parameter:
1806: . flg - boolean indicating whether to use subdomains defined by the DM
1808: Level: intermediate
1810: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1811: PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1812: @*/
1813: PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1814: {
1815: PC_ASM *osm = (PC_ASM*)pc->data;
1817: PetscBool match;
1822: PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1823: if (match) *flg = osm->dm_subdomains;
1824: else *flg = PETSC_FALSE;
1825: return(0);
1826: }
1828: /*@
1829: PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1831: Not Collective
1833: Input Parameter:
1834: . pc - the PC
1836: Output Parameter:
1837: . -pc_asm_sub_mat_type - name of matrix type
1839: Level: advanced
1841: .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1842: @*/
1843: PetscErrorCode PCASMGetSubMatType(PC pc,MatType *sub_mat_type)
1844: {
1849: PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1850: return(0);
1851: }
1853: /*@
1854: PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1856: Collective on Mat
1858: Input Parameters:
1859: + pc - the PC object
1860: - sub_mat_type - matrix type
1862: Options Database Key:
1863: . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1865: Notes:
1866: See "${PETSC_DIR}/include/petscmat.h" for available types
1868: Level: advanced
1870: .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1871: @*/
1872: PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1873: {
1878: PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1879: return(0);
1880: }