Actual source code: da.c
petsc-3.11.0 2019-03-29
1: #include <petsc/private/dmdaimpl.h>
3: /*@
4: DMDASetSizes - Sets the number of grid points in the three dimensional directions
6: Logically Collective on DMDA
8: Input Parameters:
9: + da - the DMDA
10: . M - the global X size
11: . N - the global Y size
12: - P - the global Z size
14: Level: intermediate
16: Developer Notes:
17: Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points
19: .seealso: PetscSplitOwnership()
20: @*/
21: PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
22: {
23: DM_DA *dd = (DM_DA*)da->data;
30: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
31: if (M < 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in X direction must be positive");
32: if (N < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Y direction must be positive");
33: if (P < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Z direction must be positive");
35: dd->M = M;
36: dd->N = N;
37: dd->P = P;
38: return(0);
39: }
41: /*@
42: DMDASetNumProcs - Sets the number of processes in each dimension
44: Logically Collective on DMDA
46: Input Parameters:
47: + da - the DMDA
48: . m - the number of X procs (or PETSC_DECIDE)
49: . n - the number of Y procs (or PETSC_DECIDE)
50: - p - the number of Z procs (or PETSC_DECIDE)
52: Level: intermediate
54: .seealso: DMDASetSizes(), PetscSplitOwnership()
55: @*/
56: PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
57: {
58: DM_DA *dd = (DM_DA*)da->data;
66: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
67: dd->m = m;
68: dd->n = n;
69: dd->p = p;
70: if (da->dim == 2) {
71: PetscMPIInt size;
72: MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);
73: if ((dd->m > 0) && (dd->n < 0)) {
74: dd->n = size/dd->m;
75: if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
76: }
77: if ((dd->n > 0) && (dd->m < 0)) {
78: dd->m = size/dd->n;
79: if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
80: }
81: }
82: return(0);
83: }
85: /*@
86: DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
88: Not collective
90: Input Parameter:
91: + da - The DMDA
92: - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
94: Level: intermediate
96: .keywords: distributed array, periodicity
97: .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
98: @*/
99: PetscErrorCode DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
100: {
101: DM_DA *dd = (DM_DA*)da->data;
108: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
109: dd->bx = bx;
110: dd->by = by;
111: dd->bz = bz;
112: return(0);
113: }
115: /*@
116: DMDASetDof - Sets the number of degrees of freedom per vertex
118: Not collective
120: Input Parameters:
121: + da - The DMDA
122: - dof - Number of degrees of freedom
124: Level: intermediate
126: .keywords: distributed array, degrees of freedom
127: .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
128: @*/
129: PetscErrorCode DMDASetDof(DM da, PetscInt dof)
130: {
131: DM_DA *dd = (DM_DA*)da->data;
136: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
137: dd->w = dof;
138: da->bs = dof;
139: return(0);
140: }
142: /*@
143: DMDAGetDof - Gets the number of degrees of freedom per vertex
145: Not collective
147: Input Parameter:
148: . da - The DMDA
150: Output Parameter:
151: . dof - Number of degrees of freedom
153: Level: intermediate
155: .keywords: distributed array, degrees of freedom
156: .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
157: @*/
158: PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
159: {
160: DM_DA *dd = (DM_DA *) da->data;
165: *dof = dd->w;
166: return(0);
167: }
169: /*@
170: DMDAGetOverlap - Gets the size of the per-processor overlap.
172: Not collective
174: Input Parameters:
175: . da - The DMDA
177: Output Parameters:
178: + x - Overlap in the x direction
179: . y - Overlap in the y direction
180: - z - Overlap in the z direction
182: Level: intermediate
184: .keywords: distributed array, overlap, domain decomposition
185: .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
186: @*/
187: PetscErrorCode DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
188: {
189: DM_DA *dd = (DM_DA*)da->data;
193: if (x) *x = dd->xol;
194: if (y) *y = dd->yol;
195: if (z) *z = dd->zol;
196: return(0);
197: }
199: /*@
200: DMDASetOverlap - Sets the size of the per-processor overlap.
202: Not collective
204: Input Parameters:
205: + da - The DMDA
206: . x - Overlap in the x direction
207: . y - Overlap in the y direction
208: - z - Overlap in the z direction
210: Level: intermediate
212: .keywords: distributed array, overlap, domain decomposition
213: .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
214: @*/
215: PetscErrorCode DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
216: {
217: DM_DA *dd = (DM_DA*)da->data;
224: dd->xol = x;
225: dd->yol = y;
226: dd->zol = z;
227: return(0);
228: }
231: /*@
232: DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
234: Not collective
236: Input Parameters:
237: . da - The DMDA
239: Output Parameters:
240: + Nsub - Number of local subdomains created upon decomposition
242: Level: intermediate
244: .keywords: distributed array, domain decomposition
245: .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
246: @*/
247: PetscErrorCode DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
248: {
249: DM_DA *dd = (DM_DA*)da->data;
253: if (Nsub) *Nsub = dd->Nsub;
254: return(0);
255: }
257: /*@
258: DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
260: Not collective
262: Input Parameters:
263: + da - The DMDA
264: - Nsub - The number of local subdomains requested
266: Level: intermediate
268: .keywords: distributed array, domain decomposition
269: .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
270: @*/
271: PetscErrorCode DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
272: {
273: DM_DA *dd = (DM_DA*)da->data;
278: dd->Nsub = Nsub;
279: return(0);
280: }
282: /*@
283: DMDASetOffset - Sets the index offset of the DA.
285: Collective on DA
287: Input Parameter:
288: + da - The DMDA
289: . xo - The offset in the x direction
290: . yo - The offset in the y direction
291: - zo - The offset in the z direction
293: Level: intermediate
295: Notes:
296: This is used primarily to overlap a computation on a local DA with that on a global DA without
297: changing boundary conditions or subdomain features that depend upon the global offsets.
299: .keywords: distributed array, degrees of freedom
300: .seealso: DMDAGetOffset(), DMDAVecGetArray()
301: @*/
302: PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
303: {
305: DM_DA *dd = (DM_DA*)da->data;
315: dd->xo = xo;
316: dd->yo = yo;
317: dd->zo = zo;
318: dd->Mo = Mo;
319: dd->No = No;
320: dd->Po = Po;
322: if (da->coordinateDM) {
323: DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);
324: }
325: return(0);
326: }
328: /*@
329: DMDAGetOffset - Gets the index offset of the DA.
331: Not collective
333: Input Parameter:
334: . da - The DMDA
336: Output Parameters:
337: + xo - The offset in the x direction
338: . yo - The offset in the y direction
339: . zo - The offset in the z direction
340: . Mo - The global size in the x direction
341: . No - The global size in the y direction
342: - Po - The global size in the z direction
344: Level: intermediate
346: .keywords: distributed array, degrees of freedom
347: .seealso: DMDASetOffset(), DMDAVecGetArray()
348: @*/
349: PetscErrorCode DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
350: {
351: DM_DA *dd = (DM_DA*)da->data;
355: if (xo) *xo = dd->xo;
356: if (yo) *yo = dd->yo;
357: if (zo) *zo = dd->zo;
358: if (Mo) *Mo = dd->Mo;
359: if (No) *No = dd->No;
360: if (Po) *Po = dd->Po;
361: return(0);
362: }
364: /*@
365: DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
367: Not collective
369: Input Parameter:
370: . da - The DMDA
372: Output Parameters:
373: + xs - The start of the region in x
374: . ys - The start of the region in y
375: . zs - The start of the region in z
376: . xs - The size of the region in x
377: . ys - The size of the region in y
378: . zs - The size of the region in z
380: Level: intermediate
382: .keywords: distributed array, degrees of freedom
383: .seealso: DMDAGetOffset(), DMDAVecGetArray()
384: @*/
385: PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
386: {
387: DM_DA *dd = (DM_DA*)da->data;
391: if (xs) *xs = dd->nonxs;
392: if (ys) *ys = dd->nonys;
393: if (zs) *zs = dd->nonzs;
394: if (xm) *xm = dd->nonxm;
395: if (ym) *ym = dd->nonym;
396: if (zm) *zm = dd->nonzm;
397: return(0);
398: }
401: /*@
402: DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
404: Collective on DA
406: Input Parameter:
407: + da - The DMDA
408: . xs - The start of the region in x
409: . ys - The start of the region in y
410: . zs - The start of the region in z
411: . xs - The size of the region in x
412: . ys - The size of the region in y
413: . zs - The size of the region in z
415: Level: intermediate
417: .keywords: distributed array, degrees of freedom
418: .seealso: DMDAGetOffset(), DMDAVecGetArray()
419: @*/
420: PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
421: {
422: DM_DA *dd = (DM_DA*)da->data;
432: dd->nonxs = xs;
433: dd->nonys = ys;
434: dd->nonzs = zs;
435: dd->nonxm = xm;
436: dd->nonym = ym;
437: dd->nonzm = zm;
439: return(0);
440: }
442: /*@
443: DMDASetStencilType - Sets the type of the communication stencil
445: Logically Collective on DMDA
447: Input Parameter:
448: + da - The DMDA
449: - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
451: Level: intermediate
453: .keywords: distributed array, stencil
454: .seealso: DMDACreate(), DMDestroy(), DMDA
455: @*/
456: PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
457: {
458: DM_DA *dd = (DM_DA*)da->data;
463: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
464: dd->stencil_type = stype;
465: return(0);
466: }
468: /*@
469: DMDAGetStencilType - Gets the type of the communication stencil
471: Not collective
473: Input Parameter:
474: . da - The DMDA
476: Output Parameter:
477: . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
479: Level: intermediate
481: .keywords: distributed array, stencil
482: .seealso: DMDACreate(), DMDestroy(), DMDA
483: @*/
484: PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
485: {
486: DM_DA *dd = (DM_DA*)da->data;
491: *stype = dd->stencil_type;
492: return(0);
493: }
495: /*@
496: DMDASetStencilWidth - Sets the width of the communication stencil
498: Logically Collective on DMDA
500: Input Parameter:
501: + da - The DMDA
502: - width - The stencil width
504: Level: intermediate
506: .keywords: distributed array, stencil
507: .seealso: DMDACreate(), DMDestroy(), DMDA
508: @*/
509: PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
510: {
511: DM_DA *dd = (DM_DA*)da->data;
516: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
517: dd->s = width;
518: return(0);
519: }
521: /*@
522: DMDAGetStencilWidth - Gets the width of the communication stencil
524: Not collective
526: Input Parameter:
527: . da - The DMDA
529: Output Parameter:
530: . width - The stencil width
532: Level: intermediate
534: .keywords: distributed array, stencil
535: .seealso: DMDACreate(), DMDestroy(), DMDA
536: @*/
537: PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
538: {
539: DM_DA *dd = (DM_DA *) da->data;
544: *width = dd->s;
545: return(0);
546: }
548: static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
549: {
550: PetscInt i,sum;
553: if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
554: for (i=sum=0; i<m; i++) sum += lx[i];
555: if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
556: return(0);
557: }
559: /*@
560: DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
562: Logically Collective on DMDA
564: Input Parameter:
565: + da - The DMDA
566: . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
567: . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
568: - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
570: Level: intermediate
572: Note: these numbers are NOT multiplied by the number of dof per node.
574: .keywords: distributed array
575: .seealso: DMDACreate(), DMDestroy(), DMDA
576: @*/
577: PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
578: {
580: DM_DA *dd = (DM_DA*)da->data;
584: if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
585: if (lx) {
586: if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
587: DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);
588: if (!dd->lx) {
589: PetscMalloc1(dd->m, &dd->lx);
590: }
591: PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));
592: }
593: if (ly) {
594: if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
595: DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);
596: if (!dd->ly) {
597: PetscMalloc1(dd->n, &dd->ly);
598: }
599: PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));
600: }
601: if (lz) {
602: if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
603: DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);
604: if (!dd->lz) {
605: PetscMalloc1(dd->p, &dd->lz);
606: }
607: PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));
608: }
609: return(0);
610: }
612: /*@
613: DMDASetInterpolationType - Sets the type of interpolation that will be
614: returned by DMCreateInterpolation()
616: Logically Collective on DMDA
618: Input Parameter:
619: + da - initial distributed array
620: . ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
622: Level: intermediate
624: Notes:
625: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
627: .keywords: distributed array, interpolation
629: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
630: @*/
631: PetscErrorCode DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
632: {
633: DM_DA *dd = (DM_DA*)da->data;
638: dd->interptype = ctype;
639: return(0);
640: }
642: /*@
643: DMDAGetInterpolationType - Gets the type of interpolation that will be
644: used by DMCreateInterpolation()
646: Not Collective
648: Input Parameter:
649: . da - distributed array
651: Output Parameter:
652: . ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
654: Level: intermediate
656: .keywords: distributed array, interpolation
658: .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
659: @*/
660: PetscErrorCode DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
661: {
662: DM_DA *dd = (DM_DA*)da->data;
667: *ctype = dd->interptype;
668: return(0);
669: }
671: /*@C
672: DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
673: processes neighbors.
675: Not Collective
677: Input Parameter:
678: . da - the DMDA object
680: Output Parameters:
681: . ranks - the neighbors ranks, stored with the x index increasing most rapidly.
682: this process itself is in the list
684: Notes:
685: In 2d the array is of length 9, in 3d of length 27
686: Not supported in 1d
687: Do not free the array, it is freed when the DMDA is destroyed.
689: Fortran Notes:
690: In fortran you must pass in an array of the appropriate length.
692: Level: intermediate
694: @*/
695: PetscErrorCode DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
696: {
697: DM_DA *dd = (DM_DA*)da->data;
701: *ranks = dd->neighbors;
702: return(0);
703: }
705: /*@C
706: DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
708: Not Collective
710: Input Parameter:
711: . da - the DMDA object
713: Output Parameter:
714: + lx - ownership along x direction (optional)
715: . ly - ownership along y direction (optional)
716: - lz - ownership along z direction (optional)
718: Level: intermediate
720: Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
722: In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
723: eighth arguments from DMDAGetInfo()
725: In C you should not free these arrays, nor change the values in them. They will only have valid values while the
726: DMDA they came from still exists (has not been destroyed).
728: These numbers are NOT multiplied by the number of dof per node.
730: Not available from Fortran
732: .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
733: @*/
734: PetscErrorCode DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
735: {
736: DM_DA *dd = (DM_DA*)da->data;
740: if (lx) *lx = dd->lx;
741: if (ly) *ly = dd->ly;
742: if (lz) *lz = dd->lz;
743: return(0);
744: }
746: /*@
747: DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
749: Logically Collective on DMDA
751: Input Parameters:
752: + da - the DMDA object
753: . refine_x - ratio of fine grid to coarse in x direction (2 by default)
754: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
755: - refine_z - ratio of fine grid to coarse in z direction (2 by default)
757: Options Database:
758: + -da_refine_x - refinement ratio in x direction
759: . -da_refine_y - refinement ratio in y direction
760: - -da_refine_z - refinement ratio in z direction
762: Level: intermediate
764: Notes:
765: Pass PETSC_IGNORE to leave a value unchanged
767: .seealso: DMRefine(), DMDAGetRefinementFactor()
768: @*/
769: PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
770: {
771: DM_DA *dd = (DM_DA*)da->data;
779: if (refine_x > 0) dd->refine_x = refine_x;
780: if (refine_y > 0) dd->refine_y = refine_y;
781: if (refine_z > 0) dd->refine_z = refine_z;
782: return(0);
783: }
785: /*@C
786: DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
788: Not Collective
790: Input Parameter:
791: . da - the DMDA object
793: Output Parameters:
794: + refine_x - ratio of fine grid to coarse in x direction (2 by default)
795: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
796: - refine_z - ratio of fine grid to coarse in z direction (2 by default)
798: Level: intermediate
800: Notes:
801: Pass NULL for values you do not need
803: .seealso: DMRefine(), DMDASetRefinementFactor()
804: @*/
805: PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
806: {
807: DM_DA *dd = (DM_DA*)da->data;
811: if (refine_x) *refine_x = dd->refine_x;
812: if (refine_y) *refine_y = dd->refine_y;
813: if (refine_z) *refine_z = dd->refine_z;
814: return(0);
815: }
817: /*@C
818: DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
820: Logically Collective on DMDA
822: Input Parameters:
823: + da - the DMDA object
824: - f - the function that allocates the matrix for that specific DMDA
826: Level: developer
828: Notes:
829: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
830: the diagonal and off-diagonal blocks of the matrix
832: Not supported from Fortran
834: .seealso: DMCreateMatrix(), DMDASetBlockFills()
835: @*/
836: PetscErrorCode DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
837: {
840: da->ops->creatematrix = f;
841: return(0);
842: }
844: /*
845: Creates "balanced" ownership ranges after refinement, constrained by the need for the
846: fine grid boundaries to fall within one stencil width of the coarse partition.
848: Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
849: */
850: static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
851: {
852: PetscInt i,totalc = 0,remaining,startc = 0,startf = 0;
856: if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
857: if (ratio == 1) {
858: PetscMemcpy(lf,lc,m*sizeof(lc[0]));
859: return(0);
860: }
861: for (i=0; i<m; i++) totalc += lc[i];
862: remaining = (!periodic) + ratio * (totalc - (!periodic));
863: for (i=0; i<m; i++) {
864: PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
865: if (i == m-1) lf[i] = want;
866: else {
867: const PetscInt nextc = startc + lc[i];
868: /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
869: * coarse stencil width of the first coarse node in the next subdomain. */
870: while ((startf+want)/ratio < nextc - stencil_width) want++;
871: /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
872: * coarse stencil width of the last coarse node in the current subdomain. */
873: while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
874: /* Make sure all constraints are satisfied */
875: if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
876: || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
877: }
878: lf[i] = want;
879: startc += lc[i];
880: startf += lf[i];
881: remaining -= lf[i];
882: }
883: return(0);
884: }
886: /*
887: Creates "balanced" ownership ranges after coarsening, constrained by the need for the
888: fine grid boundaries to fall within one stencil width of the coarse partition.
890: Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
891: */
892: static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
893: {
894: PetscInt i,totalf,remaining,startc,startf;
898: if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
899: if (ratio == 1) {
900: PetscMemcpy(lc,lf,m*sizeof(lf[0]));
901: return(0);
902: }
903: for (i=0,totalf=0; i<m; i++) totalf += lf[i];
904: remaining = (!periodic) + (totalf - (!periodic)) / ratio;
905: for (i=0,startc=0,startf=0; i<m; i++) {
906: PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
907: if (i == m-1) lc[i] = want;
908: else {
909: const PetscInt nextf = startf+lf[i];
910: /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
911: * node is within one stencil width. */
912: while (nextf/ratio < startc+want-stencil_width) want--;
913: /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
914: * fine node is within one stencil width. */
915: while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
916: if (want < 0 || want > remaining
917: || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
918: }
919: lc[i] = want;
920: startc += lc[i];
921: startf += lf[i];
922: remaining -= lc[i];
923: }
924: return(0);
925: }
927: PetscErrorCode DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
928: {
930: PetscInt M,N,P,i,dim;
931: DM da2;
932: DM_DA *dd = (DM_DA*)da->data,*dd2;
938: DMGetDimension(da, &dim);
939: if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
940: M = dd->refine_x*dd->M;
941: } else {
942: M = 1 + dd->refine_x*(dd->M - 1);
943: }
944: if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
945: if (dim > 1) {
946: N = dd->refine_y*dd->N;
947: } else {
948: N = 1;
949: }
950: } else {
951: N = 1 + dd->refine_y*(dd->N - 1);
952: }
953: if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
954: if (dim > 2) {
955: P = dd->refine_z*dd->P;
956: } else {
957: P = 1;
958: }
959: } else {
960: P = 1 + dd->refine_z*(dd->P - 1);
961: }
962: DMDACreate(PetscObjectComm((PetscObject)da),&da2);
963: DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);
964: DMSetDimension(da2,dim);
965: DMDASetSizes(da2,M,N,P);
966: DMDASetNumProcs(da2,dd->m,dd->n,dd->p);
967: DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);
968: DMDASetDof(da2,dd->w);
969: DMDASetStencilType(da2,dd->stencil_type);
970: DMDASetStencilWidth(da2,dd->s);
971: if (dim == 3) {
972: PetscInt *lx,*ly,*lz;
973: PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);
974: DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);
975: DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);
976: DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);
977: DMDASetOwnershipRanges(da2,lx,ly,lz);
978: PetscFree3(lx,ly,lz);
979: } else if (dim == 2) {
980: PetscInt *lx,*ly;
981: PetscMalloc2(dd->m,&lx,dd->n,&ly);
982: DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);
983: DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);
984: DMDASetOwnershipRanges(da2,lx,ly,NULL);
985: PetscFree2(lx,ly);
986: } else if (dim == 1) {
987: PetscInt *lx;
988: PetscMalloc1(dd->m,&lx);
989: DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);
990: DMDASetOwnershipRanges(da2,lx,NULL,NULL);
991: PetscFree(lx);
992: }
993: dd2 = (DM_DA*)da2->data;
995: /* allow overloaded (user replaced) operations to be inherited by refinement clones */
996: da2->ops->creatematrix = da->ops->creatematrix;
997: /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
998: da2->ops->getcoloring = da->ops->getcoloring;
999: dd2->interptype = dd->interptype;
1001: /* copy fill information if given */
1002: if (dd->dfill) {
1003: PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);
1004: PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));
1005: }
1006: if (dd->ofill) {
1007: PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);
1008: PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));
1009: }
1010: /* copy the refine information */
1011: dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1012: dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1013: dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1015: if (dd->refine_z_hier) {
1016: if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1017: dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1018: }
1019: if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1020: dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1021: }
1022: dd2->refine_z_hier_n = dd->refine_z_hier_n;
1023: PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);
1024: PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));
1025: }
1026: if (dd->refine_y_hier) {
1027: if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1028: dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1029: }
1030: if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1031: dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1032: }
1033: dd2->refine_y_hier_n = dd->refine_y_hier_n;
1034: PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);
1035: PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));
1036: }
1037: if (dd->refine_x_hier) {
1038: if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1039: dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1040: }
1041: if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1042: dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1043: }
1044: dd2->refine_x_hier_n = dd->refine_x_hier_n;
1045: PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);
1046: PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));
1047: }
1050: /* copy vector type information */
1051: DMSetVecType(da2,da->vectype);
1053: dd2->lf = dd->lf;
1054: dd2->lj = dd->lj;
1056: da2->leveldown = da->leveldown;
1057: da2->levelup = da->levelup + 1;
1059: DMSetUp(da2);
1061: /* interpolate coordinates if they are set on the coarse grid */
1062: if (da->coordinates) {
1063: DM cdaf,cdac;
1064: Vec coordsc,coordsf;
1065: Mat II;
1067: DMGetCoordinateDM(da,&cdac);
1068: DMGetCoordinates(da,&coordsc);
1069: DMGetCoordinateDM(da2,&cdaf);
1070: /* force creation of the coordinate vector */
1071: DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);
1072: DMGetCoordinates(da2,&coordsf);
1073: DMCreateInterpolation(cdac,cdaf,&II,NULL);
1074: MatInterpolate(II,coordsc,coordsf);
1075: MatDestroy(&II);
1076: }
1078: for (i=0; i<da->bs; i++) {
1079: const char *fieldname;
1080: DMDAGetFieldName(da,i,&fieldname);
1081: DMDASetFieldName(da2,i,fieldname);
1082: }
1084: *daref = da2;
1085: return(0);
1086: }
1089: PetscErrorCode DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1090: {
1092: PetscInt M,N,P,i,dim;
1093: DM da2;
1094: DM_DA *dd = (DM_DA*)da->data,*dd2;
1100: DMGetDimension(da, &dim);
1101: if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1102: M = dd->M / dd->coarsen_x;
1103: } else {
1104: M = 1 + (dd->M - 1) / dd->coarsen_x;
1105: }
1106: if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1107: if (dim > 1) {
1108: N = dd->N / dd->coarsen_y;
1109: } else {
1110: N = 1;
1111: }
1112: } else {
1113: N = 1 + (dd->N - 1) / dd->coarsen_y;
1114: }
1115: if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1116: if (dim > 2) {
1117: P = dd->P / dd->coarsen_z;
1118: } else {
1119: P = 1;
1120: }
1121: } else {
1122: P = 1 + (dd->P - 1) / dd->coarsen_z;
1123: }
1124: DMDACreate(PetscObjectComm((PetscObject)da),&da2);
1125: DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);
1126: DMSetDimension(da2,dim);
1127: DMDASetSizes(da2,M,N,P);
1128: DMDASetNumProcs(da2,dd->m,dd->n,dd->p);
1129: DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);
1130: DMDASetDof(da2,dd->w);
1131: DMDASetStencilType(da2,dd->stencil_type);
1132: DMDASetStencilWidth(da2,dd->s);
1133: if (dim == 3) {
1134: PetscInt *lx,*ly,*lz;
1135: PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);
1136: DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);
1137: DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);
1138: DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);
1139: DMDASetOwnershipRanges(da2,lx,ly,lz);
1140: PetscFree3(lx,ly,lz);
1141: } else if (dim == 2) {
1142: PetscInt *lx,*ly;
1143: PetscMalloc2(dd->m,&lx,dd->n,&ly);
1144: DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);
1145: DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);
1146: DMDASetOwnershipRanges(da2,lx,ly,NULL);
1147: PetscFree2(lx,ly);
1148: } else if (dim == 1) {
1149: PetscInt *lx;
1150: PetscMalloc1(dd->m,&lx);
1151: DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);
1152: DMDASetOwnershipRanges(da2,lx,NULL,NULL);
1153: PetscFree(lx);
1154: }
1155: dd2 = (DM_DA*)da2->data;
1157: /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1158: /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1159: da2->ops->creatematrix = da->ops->creatematrix;
1160: da2->ops->getcoloring = da->ops->getcoloring;
1161: dd2->interptype = dd->interptype;
1163: /* copy fill information if given */
1164: if (dd->dfill) {
1165: PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);
1166: PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));
1167: }
1168: if (dd->ofill) {
1169: PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);
1170: PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));
1171: }
1172: /* copy the refine information */
1173: dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1174: dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1175: dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1177: if (dd->refine_z_hier) {
1178: if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1179: dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1180: }
1181: if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1182: dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1183: }
1184: dd2->refine_z_hier_n = dd->refine_z_hier_n;
1185: PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);
1186: PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));
1187: }
1188: if (dd->refine_y_hier) {
1189: if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1190: dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1191: }
1192: if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1193: dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1194: }
1195: dd2->refine_y_hier_n = dd->refine_y_hier_n;
1196: PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);
1197: PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));
1198: }
1199: if (dd->refine_x_hier) {
1200: if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1201: dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1202: }
1203: if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1204: dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1205: }
1206: dd2->refine_x_hier_n = dd->refine_x_hier_n;
1207: PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);
1208: PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));
1209: }
1211: /* copy vector type information */
1212: DMSetVecType(da2,da->vectype);
1214: dd2->lf = dd->lf;
1215: dd2->lj = dd->lj;
1217: da2->leveldown = da->leveldown + 1;
1218: da2->levelup = da->levelup;
1220: DMSetUp(da2);
1222: /* inject coordinates if they are set on the fine grid */
1223: if (da->coordinates) {
1224: DM cdaf,cdac;
1225: Vec coordsc,coordsf;
1226: Mat inject;
1227: VecScatter vscat;
1229: DMGetCoordinateDM(da,&cdaf);
1230: DMGetCoordinates(da,&coordsf);
1231: DMGetCoordinateDM(da2,&cdac);
1232: /* force creation of the coordinate vector */
1233: DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);
1234: DMGetCoordinates(da2,&coordsc);
1236: DMCreateInjection(cdac,cdaf,&inject);
1237: MatScatterGetVecScatter(inject,&vscat);
1238: VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);
1239: VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);
1240: MatDestroy(&inject);
1241: }
1243: for (i=0; i<da->bs; i++) {
1244: const char *fieldname;
1245: DMDAGetFieldName(da,i,&fieldname);
1246: DMDASetFieldName(da2,i,fieldname);
1247: }
1249: *daref = da2;
1250: return(0);
1251: }
1253: PetscErrorCode DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1254: {
1256: PetscInt i,n,*refx,*refy,*refz;
1260: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1261: if (nlevels == 0) return(0);
1264: /* Get refinement factors, defaults taken from the coarse DMDA */
1265: PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);
1266: for (i=0; i<nlevels; i++) {
1267: DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);
1268: }
1269: n = nlevels;
1270: PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);
1271: n = nlevels;
1272: PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);
1273: n = nlevels;
1274: PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);
1276: DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);
1277: DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);
1278: for (i=1; i<nlevels; i++) {
1279: DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);
1280: DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);
1281: }
1282: PetscFree3(refx,refy,refz);
1283: return(0);
1284: }
1286: PetscErrorCode DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1287: {
1289: PetscInt i;
1293: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1294: if (nlevels == 0) return(0);
1296: DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);
1297: for (i=1; i<nlevels; i++) {
1298: DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);
1299: }
1300: return(0);
1301: }
1303: #include <petscgll.h>
1305: PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscGLL *gll)
1306: {
1308: PetscInt i,j,n = gll->n,xs,xn,q;
1309: PetscScalar *xx;
1310: PetscReal h;
1311: Vec x;
1312: DM_DA *da = (DM_DA*)dm->data;
1315: if (da->bx != DM_BOUNDARY_PERIODIC) {
1316: DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1317: q = (q-1)/(n-1); /* number of spectral elements */
1318: h = 2.0/q;
1319: DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL);
1320: xs = xs/(n-1);
1321: xn = xn/(n-1);
1322: DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.);
1323: DMGetCoordinates(dm,&x);
1324: DMDAVecGetArray(dm,x,&xx);
1326: /* loop over local spectral elements */
1327: for (j=xs; j<xs+xn; j++) {
1328: /*
1329: Except for the first process, each process starts on the second GLL point of the first element on that process
1330: */
1331: for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) {
1332: xx[j*(n-1) + i] = -1.0 + h*j + h*(gll->nodes[i]+1.0)/2.;
1333: }
1334: }
1335: DMDAVecRestoreArray(dm,x,&xx);
1336: } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic");
1337: return(0);
1338: }
1340: /*@
1342: DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1344: Collective on DM
1346: Input Parameters:
1347: + da - the DMDA object
1348: - gll - the GLL object
1350: Notes:
1351: the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1352: on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is
1353: periodic or not.
1355: Level: advanced
1357: .seealso: DMDACreate(), PetscGLLCreate(), DMGetCoordinates()
1358: @*/
1359: PetscErrorCode DMDASetGLLCoordinates(DM da,PetscGLL *gll)
1360: {
1364: if (da->dim == 1) {
1365: DMDASetGLLCoordinates_1d(da,gll);
1366: } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d");
1367: return(0);
1368: }
1370: PETSC_INTERN PetscErrorCode DMGetCompatibility_DA(DM da1,DM dm2,PetscBool *compatible,PetscBool *set)
1371: {
1373: DM_DA *dd1 = (DM_DA*)da1->data,*dd2;
1374: DM da2;
1375: DMType dmtype2;
1376: PetscBool isda,compatibleLocal;
1377: PetscInt i;
1380: if (!da1->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da1),PETSC_ERR_ARG_WRONGSTATE,"DMSetUp() must be called on first DM before DMGetCompatibility()");
1381: DMGetType(dm2,&dmtype2);
1382: PetscStrcmp(dmtype2,DMDA,&isda);
1383: if (isda) {
1384: da2 = dm2;
1385: dd2 = (DM_DA*)da2->data;
1386: if (!da2->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da2),PETSC_ERR_ARG_WRONGSTATE,"DMSetUp() must be called on second DM before DMGetCompatibility()");
1387: compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1388: if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1389: /* Global size ranks Boundary type */
1390: if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1391: if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1392: if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1393: if (compatibleLocal) {
1394: for (i=0; i<dd1->m; ++i) {
1395: compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size */
1396: }
1397: }
1398: if (compatibleLocal && da1->dim > 1) {
1399: for (i=0; i<dd1->n; ++i) {
1400: compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1401: }
1402: }
1403: if (compatibleLocal && da1->dim > 2) {
1404: for (i=0; i<dd1->p; ++i) {
1405: compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1406: }
1407: }
1408: *compatible = compatibleLocal;
1409: *set = PETSC_TRUE;
1410: } else {
1411: /* Decline to determine compatibility with other DM types */
1412: *set = PETSC_FALSE;
1413: }
1414: return(0);
1415: }