Actual source code: ex5.c
petsc-3.4.2 2013-07-02
2: static char help[] = "Pattern Formation with Reaction-Diffusion Equations.\n";
4: /*
5: Page 21, Pattern Formation with Reaction-Diffusion Equations
7: u_t = D1 (u_xx + u_yy) - u*v^2 + gama(1 -u)
8: v_t = D2 (v_xx + v_yy) + u*v^2 - (gamma + kappa)v
10: Unlike in the book this uses periodic boundary conditions instead of Neumann
11: (since they are easier for finite differences).
12: */
14: /*
15: Helpful runtime monitor options:
16: -ts_monitor_draw_solution
17: -draw_save -draw_save_movie
19: Helpful runtime linear solver options:
20: -pc_type mg -pc_mg_galerkin -da_refine 1 -snes_monitor -ksp_monitor -ts_view (note that these Jacobians are so well-conditioned multigrid may not be the best solver)
22: */
24: /*
26: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
27: Include "petscts.h" so that we can use SNES solvers. Note that this
28: file automatically includes:
29: petscsys.h - base PETSc routines petscvec.h - vectors
30: petscmat.h - matrices
31: petscis.h - index sets petscksp.h - Krylov subspace methods
32: petscviewer.h - viewers petscpc.h - preconditioners
33: petscksp.h - linear solvers
34: */
35: #include <petscdmda.h>
36: #include <petscts.h>
38: typedef struct {
39: PetscScalar u,v;
40: } Field;
42: typedef struct {
43: PetscReal D1,D2,gamma,kappa;
44: } AppCtx;
46: /*
47: User-defined routines
48: */
49: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*),InitialConditions(DM,Vec);
50: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
54: int main(int argc,char **argv)
55: {
56: TS ts; /* ODE integrator */
57: Vec x; /* solution */
59: DM da;
60: AppCtx appctx;
62: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63: Initialize program
64: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
65: PetscInitialize(&argc,&argv,(char*)0,help);
67: appctx.D1 = 8.0e-5;
68: appctx.D2 = 4.0e-5;
69: appctx.gamma = .024;
70: appctx.kappa = .06;
72: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73: Create distributed array (DMDA) to manage parallel grid and vectors
74: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
75: DMDACreate2d(PETSC_COMM_WORLD,DMDA_BOUNDARY_PERIODIC,DMDA_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,-65,-65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
76: DMDASetFieldName(da,0,"u");
77: DMDASetFieldName(da,1,"v");
79: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: Extract global vectors from DMDA; then duplicate for remaining
81: vectors that are the same types
82: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83: DMCreateGlobalVector(da,&x);
85: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: Create timestepping solver context
87: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88: TSCreate(PETSC_COMM_WORLD,&ts);
89: TSSetType(ts,TSARKIMEX);
90: TSARKIMEXSetFullyImplicit(ts,PETSC_TRUE);
91: TSSetDM(ts,da);
92: TSSetProblemType(ts,TS_NONLINEAR);
93: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
94: TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);
96: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97: Set initial conditions
98: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
99: InitialConditions(da,x);
100: TSSetSolution(ts,x);
102: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: Set solver options
104: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105: TSSetDuration(ts,PETSC_DEFAULT,2000.0);
106: TSSetInitialTimeStep(ts,0.0,.0001);
107: TSSetFromOptions(ts);
109: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
110: Solve ODE system
111: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
112: TSSolve(ts,x);
114: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: Free work space. All PETSc objects should be destroyed when they
116: are no longer needed.
117: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118: VecDestroy(&x);
119: TSDestroy(&ts);
120: DMDestroy(&da);
122: PetscFinalize();
123: return(0);
124: }
125: /* ------------------------------------------------------------------- */
128: /*
129: RHSFunction - Evaluates nonlinear function, F(x).
131: Input Parameters:
132: . ts - the TS context
133: . X - input vector
134: . ptr - optional user-defined context, as set by TSSetRHSFunction()
136: Output Parameter:
137: . F - function vector
138: */
139: PetscErrorCode RHSFunction(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
140: {
141: AppCtx *appctx = (AppCtx*)ptr;
142: DM da;
144: PetscInt i,j,Mx,My,xs,ys,xm,ym;
145: PetscReal hx,hy,sx,sy;
146: PetscScalar uc,uxx,uyy,vc,vxx,vyy;
147: Field **u,**f;
148: Vec localU;
151: TSGetDM(ts,&da);
152: DMGetLocalVector(da,&localU);
153: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
155: hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
156: hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
158: /*
159: Scatter ghost points to local vector,using the 2-step process
160: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
161: By placing code between these two statements, computations can be
162: done while messages are in transition.
163: */
164: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
165: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
167: /*
168: Get pointers to vector data
169: */
170: DMDAVecGetArray(da,localU,&u);
171: DMDAVecGetArray(da,F,&f);
173: /*
174: Get local grid boundaries
175: */
176: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
178: /*
179: Compute function over the locally owned part of the grid
180: */
181: for (j=ys; j<ys+ym; j++) {
182: for (i=xs; i<xs+xm; i++) {
183: uc = u[j][i].u;
184: uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
185: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
186: vc = u[j][i].v;
187: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
188: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
189: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
190: f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
191: }
192: }
193: PetscLogFlops(16*xm*ym);
195: /*
196: Restore vectors
197: */
198: DMDAVecRestoreArray(da,localU,&u);
199: DMDAVecRestoreArray(da,F,&f);
200: DMRestoreLocalVector(da,&localU);
201: return(0);
202: }
204: /* ------------------------------------------------------------------- */
207: PetscErrorCode InitialConditions(DM da,Vec U)
208: {
210: PetscInt i,j,xs,ys,xm,ym,Mx,My;
211: Field **u;
212: PetscReal hx,hy,x,y;
215: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
217: hx = 2.5/(PetscReal)(Mx);
218: hy = 2.5/(PetscReal)(My);
220: /*
221: Get pointers to vector data
222: */
223: DMDAVecGetArray(da,U,&u);
225: /*
226: Get local grid boundaries
227: */
228: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
230: /*
231: Compute function over the locally owned part of the grid
232: */
233: for (j=ys; j<ys+ym; j++) {
234: y = j*hy;
235: for (i=xs; i<xs+xm; i++) {
236: x = i*hx;
237: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowScalar(PetscSinScalar(4.0*PETSC_PI*x),2.0)*PetscPowScalar(PetscSinScalar(4.0*PETSC_PI*y),2.0);
238: else u[j][i].v = 0.0;
240: u[j][i].u = 1.0 - 2.0*u[j][i].v;
241: }
242: }
244: /*
245: Restore vectors
246: */
247: DMDAVecRestoreArray(da,U,&u);
248: return(0);
249: }
253: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
254: {
255: Mat A = *AA; /* Jacobian matrix */
256: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
257: DM da;
259: PetscInt i,j,Mx,My,xs,ys,xm,ym;
260: PetscReal hx,hy,sx,sy;
261: PetscScalar uc,vc;
262: Field **u;
263: Vec localU;
264: MatStencil stencil[6],rowstencil;
265: PetscScalar entries[6];
268: TSGetDM(ts,&da);
269: DMGetLocalVector(da,&localU);
270: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
272: hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
273: hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
275: /*
276: Scatter ghost points to local vector,using the 2-step process
277: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
278: By placing code between these two statements, computations can be
279: done while messages are in transition.
280: */
281: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
282: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
284: /*
285: Get pointers to vector data
286: */
287: DMDAVecGetArray(da,localU,&u);
289: /*
290: Get local grid boundaries
291: */
292: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
294: stencil[0].k = 0;
295: stencil[1].k = 0;
296: stencil[2].k = 0;
297: stencil[3].k = 0;
298: stencil[4].k = 0;
299: stencil[5].k = 0;
300: rowstencil.k = 0;
301: /*
302: Compute function over the locally owned part of the grid
303: */
304: for (j=ys; j<ys+ym; j++) {
306: stencil[0].j = j-1;
307: stencil[1].j = j+1;
308: stencil[2].j = j;
309: stencil[3].j = j;
310: stencil[4].j = j;
311: stencil[5].j = j;
312: rowstencil.k = 0; rowstencil.j = j;
313: for (i=xs; i<xs+xm; i++) {
314: uc = u[j][i].u;
315: vc = u[j][i].v;
317: /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
318: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
320: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
321: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
322: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
324: stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
325: stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
326: stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
327: stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
328: stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
329: stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
330: rowstencil.i = i; rowstencil.c = 0;
332: MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
334: stencil[0].c = 1; entries[0] = appctx->D2*sy;
335: stencil[1].c = 1; entries[1] = appctx->D2*sy;
336: stencil[2].c = 1; entries[2] = appctx->D2*sx;
337: stencil[3].c = 1; entries[3] = appctx->D2*sx;
338: stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
339: stencil[5].c = 0; entries[5] = vc*vc;
340: rowstencil.c = 1;
342: MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
343: /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
344: }
345: }
347: /*
348: Restore vectors
349: */
350: PetscLogFlops(19*xm*ym);
351: DMDAVecRestoreArray(da,localU,&u);
352: DMRestoreLocalVector(da,&localU);
353: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
354: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
355: *str = SAME_NONZERO_PATTERN;
356: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
357: return(0);
358: }