Actual source code: ex5.c
petsc-3.7.1 2016-05-15
2: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";
4: /*
5: Page 21, Pattern Formation with Reaction-Diffusion Equations
7: u_t = D1 (u_xx + u_yy) - u*v^2 + gama(1 -u)
8: v_t = D2 (v_xx + v_yy) + u*v^2 - (gamma + kappa)v
10: Unlike in the book this uses periodic boundary conditions instead of Neumann
11: (since they are easier for finite differences).
12: */
14: /*
15: Helpful runtime monitor options:
16: -ts_monitor_draw_solution
17: -draw_save -draw_save_movie
19: Helpful runtime linear solver options:
20: -pc_type mg -pc_mg_galerkin -da_refine 1 -snes_monitor -ksp_monitor -ts_view (note that these Jacobians are so well-conditioned multigrid may not be the best solver)
22: Point your browser to localhost:8080 to monitor the simulation
23: ./ex5 -ts_view_pre saws -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .
25: */
27: /*
29: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
30: Include "petscts.h" so that we can use SNES solvers. Note that this
31: file automatically includes:
32: petscsys.h - base PETSc routines petscvec.h - vectors
33: petscmat.h - matrices
34: petscis.h - index sets petscksp.h - Krylov subspace methods
35: petscviewer.h - viewers petscpc.h - preconditioners
36: petscksp.h - linear solvers
37: */
38: #include <petscdm.h>
39: #include <petscdmda.h>
40: #include <petscts.h>
42: typedef struct {
43: PetscScalar u,v;
44: } Field;
46: typedef struct {
47: PetscReal D1,D2,gamma,kappa;
48: } AppCtx;
50: /*
51: User-defined routines
52: */
53: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*),InitialConditions(DM,Vec);
54: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
58: int main(int argc,char **argv)
59: {
60: TS ts; /* ODE integrator */
61: Vec x; /* solution */
63: DM da;
64: AppCtx appctx;
66: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
67: Initialize program
68: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
69: PetscInitialize(&argc,&argv,(char*)0,help);
71: appctx.D1 = 8.0e-5;
72: appctx.D2 = 4.0e-5;
73: appctx.gamma = .024;
74: appctx.kappa = .06;
76: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77: Create distributed array (DMDA) to manage parallel grid and vectors
78: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
79: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,-65,-65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
80: DMDASetFieldName(da,0,"u");
81: DMDASetFieldName(da,1,"v");
83: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84: Extract global vectors from DMDA; then duplicate for remaining
85: vectors that are the same types
86: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87: DMCreateGlobalVector(da,&x);
89: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
90: Create timestepping solver context
91: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
92: TSCreate(PETSC_COMM_WORLD,&ts);
93: TSSetType(ts,TSARKIMEX);
94: TSARKIMEXSetFullyImplicit(ts,PETSC_TRUE);
95: TSSetDM(ts,da);
96: TSSetProblemType(ts,TS_NONLINEAR);
97: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
98: TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);
100: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101: Set initial conditions
102: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103: InitialConditions(da,x);
104: TSSetSolution(ts,x);
106: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107: Set solver options
108: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109: TSSetDuration(ts,PETSC_DEFAULT,2000.0);
110: TSSetInitialTimeStep(ts,0.0,.0001);
111: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
112: TSSetFromOptions(ts);
114: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: Solve ODE system
116: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117: TSSolve(ts,x);
119: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: Free work space. All PETSc objects should be destroyed when they
121: are no longer needed.
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: VecDestroy(&x);
124: TSDestroy(&ts);
125: DMDestroy(&da);
127: PetscFinalize();
128: return(0);
129: }
130: /* ------------------------------------------------------------------- */
133: /*
134: RHSFunction - Evaluates nonlinear function, F(x).
136: Input Parameters:
137: . ts - the TS context
138: . X - input vector
139: . ptr - optional user-defined context, as set by TSSetRHSFunction()
141: Output Parameter:
142: . F - function vector
143: */
144: PetscErrorCode RHSFunction(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
145: {
146: AppCtx *appctx = (AppCtx*)ptr;
147: DM da;
149: PetscInt i,j,Mx,My,xs,ys,xm,ym;
150: PetscReal hx,hy,sx,sy;
151: PetscScalar uc,uxx,uyy,vc,vxx,vyy;
152: Field **u,**f;
153: Vec localU;
156: TSGetDM(ts,&da);
157: DMGetLocalVector(da,&localU);
158: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
160: hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
161: hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
163: /*
164: Scatter ghost points to local vector,using the 2-step process
165: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
166: By placing code between these two statements, computations can be
167: done while messages are in transition.
168: */
169: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
170: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
172: /*
173: Get pointers to vector data
174: */
175: DMDAVecGetArrayRead(da,localU,&u);
176: DMDAVecGetArray(da,F,&f);
178: /*
179: Get local grid boundaries
180: */
181: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
183: /*
184: Compute function over the locally owned part of the grid
185: */
186: for (j=ys; j<ys+ym; j++) {
187: for (i=xs; i<xs+xm; i++) {
188: uc = u[j][i].u;
189: uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
190: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
191: vc = u[j][i].v;
192: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
193: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
194: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
195: f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
196: }
197: }
198: PetscLogFlops(16*xm*ym);
200: /*
201: Restore vectors
202: */
203: DMDAVecRestoreArrayRead(da,localU,&u);
204: DMDAVecRestoreArray(da,F,&f);
205: DMRestoreLocalVector(da,&localU);
206: return(0);
207: }
209: /* ------------------------------------------------------------------- */
212: PetscErrorCode InitialConditions(DM da,Vec U)
213: {
215: PetscInt i,j,xs,ys,xm,ym,Mx,My;
216: Field **u;
217: PetscReal hx,hy,x,y;
220: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
222: hx = 2.5/(PetscReal)(Mx);
223: hy = 2.5/(PetscReal)(My);
225: /*
226: Get pointers to vector data
227: */
228: DMDAVecGetArray(da,U,&u);
230: /*
231: Get local grid boundaries
232: */
233: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
235: /*
236: Compute function over the locally owned part of the grid
237: */
238: for (j=ys; j<ys+ym; j++) {
239: y = j*hy;
240: for (i=xs; i<xs+xm; i++) {
241: x = i*hx;
242: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
243: else u[j][i].v = 0.0;
245: u[j][i].u = 1.0 - 2.0*u[j][i].v;
246: }
247: }
249: /*
250: Restore vectors
251: */
252: DMDAVecRestoreArray(da,U,&u);
253: return(0);
254: }
258: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
259: {
260: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
261: DM da;
263: PetscInt i,j,Mx,My,xs,ys,xm,ym;
264: PetscReal hx,hy,sx,sy;
265: PetscScalar uc,vc;
266: Field **u;
267: Vec localU;
268: MatStencil stencil[6],rowstencil;
269: PetscScalar entries[6];
272: TSGetDM(ts,&da);
273: DMGetLocalVector(da,&localU);
274: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
276: hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
277: hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
279: /*
280: Scatter ghost points to local vector,using the 2-step process
281: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
282: By placing code between these two statements, computations can be
283: done while messages are in transition.
284: */
285: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
286: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
288: /*
289: Get pointers to vector data
290: */
291: DMDAVecGetArrayRead(da,localU,&u);
293: /*
294: Get local grid boundaries
295: */
296: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
298: stencil[0].k = 0;
299: stencil[1].k = 0;
300: stencil[2].k = 0;
301: stencil[3].k = 0;
302: stencil[4].k = 0;
303: stencil[5].k = 0;
304: rowstencil.k = 0;
305: /*
306: Compute function over the locally owned part of the grid
307: */
308: for (j=ys; j<ys+ym; j++) {
310: stencil[0].j = j-1;
311: stencil[1].j = j+1;
312: stencil[2].j = j;
313: stencil[3].j = j;
314: stencil[4].j = j;
315: stencil[5].j = j;
316: rowstencil.k = 0; rowstencil.j = j;
317: for (i=xs; i<xs+xm; i++) {
318: uc = u[j][i].u;
319: vc = u[j][i].v;
321: /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
322: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
324: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
325: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
326: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
328: stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
329: stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
330: stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
331: stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
332: stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
333: stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
334: rowstencil.i = i; rowstencil.c = 0;
336: MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
338: stencil[0].c = 1; entries[0] = appctx->D2*sy;
339: stencil[1].c = 1; entries[1] = appctx->D2*sy;
340: stencil[2].c = 1; entries[2] = appctx->D2*sx;
341: stencil[3].c = 1; entries[3] = appctx->D2*sx;
342: stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
343: stencil[5].c = 0; entries[5] = vc*vc;
344: rowstencil.c = 1;
346: MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
347: /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
348: }
349: }
351: /*
352: Restore vectors
353: */
354: PetscLogFlops(19*xm*ym);
355: DMDAVecRestoreArrayRead(da,localU,&u);
356: DMRestoreLocalVector(da,&localU);
357: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
358: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
359: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
360: return(0);
361: }