Actual source code: ex4.c

petsc-3.7.2 2016-06-05
Report Typos and Errors
  2: static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";

  4: /*
  5:      Page 18, Chemo-taxis Problems from Mathematical Biology

  7:         rho_t =
  8:         c_t   =

 10:      Further discusson on Page 134 and in 2d on Page 409
 11: */

 13: /*

 15:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 16:    Include "petscts.h" so that we can use SNES solvers.  Note that this
 17:    file automatically includes:
 18:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 19:      petscmat.h - matrices
 20:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 21:      petscviewer.h - viewers               petscpc.h  - preconditioners
 22:      petscksp.h   - linear solvers
 23: */
 24: #include <petscdm.h>
 25: #include <petscdmda.h>
 26: #include <petscts.h>

 28: typedef struct {
 29:   PetscScalar rho,c;
 30: } Field;

 32: typedef struct {
 33:   PetscScalar epsilon,delta,alpha,beta,gamma,kappa,lambda,mu,cstar;
 34:   PetscBool   upwind;
 35: } AppCtx;

 37: /*
 38:    User-defined routines
 39: */
 40: extern PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*),InitialConditions(DM,Vec);

 44: int main(int argc,char **argv)
 45: {
 46:   TS             ts;                  /* nonlinear solver */
 47:   Vec            U;                   /* solution, residual vectors */
 49:   DM             da;
 50:   AppCtx         appctx;

 52:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 53:      Initialize program
 54:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 55:   PetscInitialize(&argc,&argv,(char*)0,help);

 57:   appctx.epsilon = 1.0e-3;
 58:   appctx.delta   = 1.0;
 59:   appctx.alpha   = 10.0;
 60:   appctx.beta    = 4.0;
 61:   appctx.gamma   = 1.0;
 62:   appctx.kappa   = .75;
 63:   appctx.lambda  = 1.0;
 64:   appctx.mu      = 100.;
 65:   appctx.cstar   = .2;
 66:   appctx.upwind  = PETSC_TRUE;

 68:   PetscOptionsGetScalar(NULL,NULL,"-delta",&appctx.delta,NULL);
 69:   PetscOptionsGetBool(NULL,NULL,"-upwind",&appctx.upwind,NULL);

 71:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 72:      Create distributed array (DMDA) to manage parallel grid and vectors
 73:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 74:   DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,-8,2,1,NULL,&da);
 75:   DMDASetFieldName(da,0,"rho");
 76:   DMDASetFieldName(da,1,"c");

 78:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Extract global vectors from DMDA; then duplicate for remaining
 80:      vectors that are the same types
 81:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 82:   DMCreateGlobalVector(da,&U);

 84:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 85:      Create timestepping solver context
 86:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 87:   TSCreate(PETSC_COMM_WORLD,&ts);
 88:   TSSetType(ts,TSROSW);
 89:   TSSetDM(ts,da);
 90:   TSSetProblemType(ts,TS_NONLINEAR);
 91:   TSSetIFunction(ts,NULL,IFunction,&appctx);


 94:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 95:      Set initial conditions
 96:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 97:   InitialConditions(da,U);
 98:   TSSetSolution(ts,U);

100:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101:      Set solver options
102:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103:   TSSetInitialTimeStep(ts,0.0,.0001);
104:   TSSetDuration(ts,PETSC_DEFAULT,1.0);
105:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
106:   TSSetFromOptions(ts);

108:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109:      Solve nonlinear system
110:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111:   TSSolve(ts,U);

113:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114:      Free work space.  All PETSc objects should be destroyed when they
115:      are no longer needed.
116:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117:   VecDestroy(&U);
118:   TSDestroy(&ts);
119:   DMDestroy(&da);

121:   PetscFinalize();
122:   return(0);
123: }
124: /* ------------------------------------------------------------------- */
127: /*
128:    IFunction - Evaluates nonlinear function, F(U).

130:    Input Parameters:
131: .  ts - the TS context
132: .  U - input vector
133: .  ptr - optional user-defined context, as set by SNESSetFunction()

135:    Output Parameter:
136: .  F - function vector
137:  */
138: PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
139: {
140:   AppCtx         *appctx = (AppCtx*)ptr;
141:   DM             da;
143:   PetscInt       i,Mx,xs,xm;
144:   PetscReal      hx,sx;
145:   PetscScalar    rho,c,rhoxx,cxx,cx,rhox,kcxrhox;
146:   Field          *u,*f,*udot;
147:   Vec            localU;

150:   TSGetDM(ts,&da);
151:   DMGetLocalVector(da,&localU);
152:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

154:   hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);

156:   /*
157:      Scatter ghost points to local vector,using the 2-step process
158:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
159:      By placing code between these two statements, computations can be
160:      done while messages are in transition.
161:   */
162:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
163:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

165:   /*
166:      Get pointers to vector data
167:   */
168:   DMDAVecGetArrayRead(da,localU,&u);
169:   DMDAVecGetArrayRead(da,Udot,&udot);
170:   DMDAVecGetArray(da,F,&f);

172:   /*
173:      Get local grid boundaries
174:   */
175:   DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);

177:   if (!xs) {
178:     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
179:     f[0].c   = udot[0].c; /* u[0].c   - 1.0; */
180:     xs++;
181:     xm--;
182:   }
183:   if (xs+xm == Mx) {
184:     f[Mx-1].rho = udot[Mx-1].rho; /* u[Mx-1].rho - 1.0; */
185:     f[Mx-1].c   = udot[Mx-1].c;  /* u[Mx-1].c   - 0.0;  */
186:     xm--;
187:   }

189:   /*
190:      Compute function over the locally owned part of the grid
191:   */
192:   for (i=xs; i<xs+xm; i++) {
193:     rho   = u[i].rho;
194:     rhoxx = (-2.0*rho + u[i-1].rho + u[i+1].rho)*sx;
195:     c     = u[i].c;
196:     cxx   = (-2.0*c + u[i-1].c + u[i+1].c)*sx;

198:     if (!appctx->upwind) {
199:       rhox    = .5*(u[i+1].rho - u[i-1].rho)/hx;
200:       cx      = .5*(u[i+1].c - u[i-1].c)/hx;
201:       kcxrhox = appctx->kappa*(cxx*rho + cx*rhox);
202:     } else {
203:       kcxrhox = appctx->kappa*((u[i+1].c - u[i].c)*u[i+1].rho - (u[i].c - u[i-1].c)*u[i].rho)*sx;
204:     }

206:     f[i].rho = udot[i].rho - appctx->epsilon*rhoxx + kcxrhox  - appctx->mu*PetscAbsScalar(rho)*(1.0 - rho)*PetscMax(0,PetscRealPart(c - appctx->cstar)) + appctx->beta*rho;
207:     f[i].c   = udot[i].c - appctx->delta*cxx + appctx->lambda*c + appctx->alpha*rho*c/(appctx->gamma + c);
208:   }

210:   /*
211:      Restore vectors
212:   */
213:   DMDAVecRestoreArrayRead(da,localU,&u);
214:   DMDAVecRestoreArrayRead(da,Udot,&udot);
215:   DMDAVecRestoreArray(da,F,&f);
216:   DMRestoreLocalVector(da,&localU);
217:   return(0);
218: }

220: /* ------------------------------------------------------------------- */
223: PetscErrorCode InitialConditions(DM da,Vec U)
224: {
226:   PetscInt       i,xs,xm,Mx;
227:   Field          *u;
228:   PetscReal      hx,x;

231:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

233:   hx = 1.0/(PetscReal)(Mx-1);

235:   /*
236:      Get pointers to vector data
237:   */
238:   DMDAVecGetArray(da,U,&u);

240:   /*
241:      Get local grid boundaries
242:   */
243:   DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);

245:   /*
246:      Compute function over the locally owned part of the grid
247:   */
248:   for (i=xs; i<xs+xm; i++) {
249:     x = i*hx;
250:     if (x < 1.0) u[i].rho = 0.0;
251:     else         u[i].rho = 1.0;
252:     u[i].c = PetscCosReal(.5*PETSC_PI*x);
253:   }

255:   /*
256:      Restore vectors
257:   */
258:   DMDAVecRestoreArray(da,U,&u);
259:   return(0);
260: }