Actual source code: ex14.c


  2: static char help[] = "Solves a nonlinear system in parallel with a user-defined Newton method.\n\
  3: Uses KSP to solve the linearized Newton systems.  This solver\n\
  4: is a very simplistic inexact Newton method.  The intent of this code is to\n\
  5: demonstrate the repeated solution of linear systems with the same nonzero pattern.\n\
  6: \n\
  7: This is NOT the recommended approach for solving nonlinear problems with PETSc!\n\
  8: We urge users to employ the SNES component for solving nonlinear problems whenever\n\
  9: possible, as it offers many advantages over coding nonlinear solvers independently.\n\
 10: \n\
 11: We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
 12: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
 13: The command line options include:\n\
 14:   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
 15:      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\
 16:   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
 17:   -my <yg>, where <yg> = number of grid points in the y-direction\n\
 18:   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
 19:   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";

 21: /*T
 22:    Concepts: KSP^writing a user-defined nonlinear solver (parallel Bratu example);
 23:    Concepts: DMDA^using distributed arrays;
 24:    Processors: n
 25: T*/

 27: /* ------------------------------------------------------------------------

 29:     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 30:     the partial differential equation

 32:             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,

 34:     with boundary conditions

 36:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 38:     A finite difference approximation with the usual 5-point stencil
 39:     is used to discretize the boundary value problem to obtain a nonlinear
 40:     system of equations.

 42:     The SNES version of this problem is:  snes/tutorials/ex5.c
 43:     We urge users to employ the SNES component for solving nonlinear
 44:     problems whenever possible, as it offers many advantages over coding
 45:     nonlinear solvers independently.

 47:   ------------------------------------------------------------------------- */

 49: /*
 50:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 51:    Include "petscksp.h" so that we can use KSP solvers.  Note that this
 52:    file automatically includes:
 53:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 54:      petscmat.h - matrices
 55:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 56:      petscviewer.h - viewers               petscpc.h  - preconditioners
 57: */
 58: #include <petscdm.h>
 59: #include <petscdmda.h>
 60: #include <petscksp.h>

 62: /*
 63:    User-defined application context - contains data needed by the
 64:    application-provided call-back routines, ComputeJacobian() and
 65:    ComputeFunction().
 66: */
 67: typedef struct {
 68:   PetscReal param;             /* test problem parameter */
 69:   PetscInt  mx,my;             /* discretization in x,y directions */
 70:   Vec       localX;           /* ghosted local vector */
 71:   DM        da;                /* distributed array data structure */
 72: } AppCtx;

 74: /*
 75:    User-defined routines
 76: */
 77: extern PetscErrorCode ComputeFunction(AppCtx*,Vec,Vec),FormInitialGuess(AppCtx*,Vec);
 78: extern PetscErrorCode ComputeJacobian(AppCtx*,Vec,Mat);

 80: int main(int argc,char **argv)
 81: {
 82:   /* -------------- Data to define application problem ---------------- */
 83:   MPI_Comm       comm;                /* communicator */
 84:   KSP            ksp;                /* linear solver */
 85:   Vec            X,Y,F;             /* solution, update, residual vectors */
 86:   Mat            J;                   /* Jacobian matrix */
 87:   AppCtx         user;                /* user-defined work context */
 88:   PetscInt       Nx,Ny;              /* number of preocessors in x- and y- directions */
 89:   PetscMPIInt    size;                /* number of processors */
 90:   PetscReal      bratu_lambda_max = 6.81,bratu_lambda_min = 0.;
 91:   PetscInt       m,N;

 94:   /* --------------- Data to define nonlinear solver -------------- */
 95:   PetscReal    rtol = 1.e-8;          /* relative convergence tolerance */
 96:   PetscReal    xtol = 1.e-8;          /* step convergence tolerance */
 97:   PetscReal    ttol;                  /* convergence tolerance */
 98:   PetscReal    fnorm,ynorm,xnorm;     /* various vector norms */
 99:   PetscInt     max_nonlin_its = 3;   /* maximum number of iterations for nonlinear solver */
100:   PetscInt     max_functions  = 50;   /* maximum number of function evaluations */
101:   PetscInt     lin_its;               /* number of linear solver iterations for each step */
102:   PetscInt     i;                     /* nonlinear solve iteration number */
103:   PetscBool    no_output = PETSC_FALSE;             /* flag indicating whether to surpress output */

105:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
106:   comm = PETSC_COMM_WORLD;
107:   PetscOptionsGetBool(NULL,NULL,"-no_output",&no_output,NULL);

109:   /*
110:      Initialize problem parameters
111:   */
112:   user.mx = 4; user.my = 4; user.param = 6.0;

114:   PetscOptionsGetInt(NULL,NULL,"-mx",&user.mx,NULL);
115:   PetscOptionsGetInt(NULL,NULL,"-my",&user.my,NULL);
116:   PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);
117:   if (user.param >= bratu_lambda_max || user.param <= bratu_lambda_min) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Lambda is out of range");
118:   N = user.mx*user.my;

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121:      Create linear solver context
122:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

124:   KSPCreate(comm,&ksp);

126:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127:      Create vector data structures
128:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

130:   /*
131:      Create distributed array (DMDA) to manage parallel grid and vectors
132:   */
133:   MPI_Comm_size(comm,&size);
134:   Nx   = PETSC_DECIDE; Ny = PETSC_DECIDE;
135:   PetscOptionsGetInt(NULL,NULL,"-Nx",&Nx,NULL);
136:   PetscOptionsGetInt(NULL,NULL,"-Ny",&Ny,NULL);
137:   if (Nx*Ny != size && (Nx != PETSC_DECIDE || Ny != PETSC_DECIDE)) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"Incompatible number of processors:  Nx * Ny != size");
138:   DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.mx,user.my,Nx,Ny,1,1,NULL,NULL,&user.da);
139:   DMSetFromOptions(user.da);
140:   DMSetUp(user.da);

142:   /*
143:      Extract global and local vectors from DMDA; then duplicate for remaining
144:      vectors that are the same types
145:   */
146:   DMCreateGlobalVector(user.da,&X);
147:   DMCreateLocalVector(user.da,&user.localX);
148:   VecDuplicate(X,&F);
149:   VecDuplicate(X,&Y);

151:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152:      Create matrix data structure for Jacobian
153:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154:   /*
155:      Note:  For the parallel case, vectors and matrices MUST be partitioned
156:      accordingly.  When using distributed arrays (DMDAs) to create vectors,
157:      the DMDAs determine the problem partitioning.  We must explicitly
158:      specify the local matrix dimensions upon its creation for compatibility
159:      with the vector distribution.  Thus, the generic MatCreate() routine
160:      is NOT sufficient when working with distributed arrays.

162:      Note: Here we only approximately preallocate storage space for the
163:      Jacobian.  See the users manual for a discussion of better techniques
164:      for preallocating matrix memory.
165:   */
166:   if (size == 1) {
167:     MatCreateSeqAIJ(comm,N,N,5,NULL,&J);
168:   } else {
169:     VecGetLocalSize(X,&m);
170:     MatCreateAIJ(comm,m,m,N,N,5,NULL,3,NULL,&J);
171:   }

173:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174:      Customize linear solver; set runtime options
175:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

177:   /*
178:      Set runtime options (e.g.,-ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
179:   */
180:   KSPSetFromOptions(ksp);

182:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183:      Evaluate initial guess
184:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

186:   FormInitialGuess(&user,X);
187:   ComputeFunction(&user,X,F);   /* Compute F(X)    */
188:   VecNorm(F,NORM_2,&fnorm);     /* fnorm = || F || */
189:   ttol = fnorm*rtol;
190:   if (!no_output) {PetscPrintf(comm,"Initial function norm = %g\n",(double)fnorm);}

192:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193:      Solve nonlinear system with a user-defined method
194:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

196:   /*
197:       This solver is a very simplistic inexact Newton method, with no
198:       no damping strategies or bells and whistles. The intent of this code
199:       is  merely to demonstrate the repeated solution with KSP of linear
200:       systems with the same nonzero structure.

202:       This is NOT the recommended approach for solving nonlinear problems
203:       with PETSc!  We urge users to employ the SNES component for solving
204:       nonlinear problems whenever possible with application codes, as it
205:       offers many advantages over coding nonlinear solvers independently.
206:    */

208:   for (i=0; i<max_nonlin_its; i++) {

210:     /*
211:         Compute the Jacobian matrix.
212:      */
213:     ComputeJacobian(&user,X,J);

215:     /*
216:         Solve J Y = F, where J is the Jacobian matrix.
217:           - First, set the KSP linear operators.  Here the matrix that
218:             defines the linear system also serves as the preconditioning
219:             matrix.
220:           - Then solve the Newton system.
221:      */
222:     KSPSetOperators(ksp,J,J);
223:     KSPSolve(ksp,F,Y);
224:     KSPGetIterationNumber(ksp,&lin_its);

226:     /*
227:        Compute updated iterate
228:      */
229:     VecNorm(Y,NORM_2,&ynorm);       /* ynorm = || Y || */
230:     VecAYPX(Y,-1.0,X);              /* Y <- X - Y      */
231:     VecCopy(Y,X);                   /* X <- Y          */
232:     VecNorm(X,NORM_2,&xnorm);       /* xnorm = || X || */
233:     if (!no_output) {
234:       PetscPrintf(comm,"   linear solve iterations = %D, xnorm=%g, ynorm=%g\n",lin_its,(double)xnorm,(double)ynorm);
235:     }

237:     /*
238:        Evaluate new nonlinear function
239:      */
240:     ComputeFunction(&user,X,F);     /* Compute F(X)    */
241:     VecNorm(F,NORM_2,&fnorm);       /* fnorm = || F || */
242:     if (!no_output) {
243:       PetscPrintf(comm,"Iteration %D, function norm = %g\n",i+1,(double)fnorm);
244:     }

246:     /*
247:        Test for convergence
248:      */
249:     if (fnorm <= ttol) {
250:       if (!no_output) {
251:         PetscPrintf(comm,"Converged due to function norm %g < %g (relative tolerance)\n",(double)fnorm,(double)ttol);
252:       }
253:       break;
254:     }
255:     if (ynorm < xtol*(xnorm)) {
256:       if (!no_output) {
257:         PetscPrintf(comm,"Converged due to small update length: %g < %g * %g\n",(double)ynorm,(double)xtol,(double)xnorm);
258:       }
259:       break;
260:     }
261:     if (i > max_functions) {
262:       if (!no_output) {
263:         PetscPrintf(comm,"Exceeded maximum number of function evaluations: %D > %D\n",i,max_functions);
264:       }
265:       break;
266:     }
267:   }
268:   PetscPrintf(comm,"Number of nonlinear iterations = %D\n",i);

270:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
271:      Free work space.  All PETSc objects should be destroyed when they
272:      are no longer needed.
273:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

275:   MatDestroy(&J);           VecDestroy(&Y);
276:   VecDestroy(&user.localX); VecDestroy(&X);
277:   VecDestroy(&F);
278:   KSPDestroy(&ksp);  DMDestroy(&user.da);
279:   PetscFinalize();
280:   return ierr;
281: }
282: /* ------------------------------------------------------------------- */
283: /*
284:    FormInitialGuess - Forms initial approximation.

286:    Input Parameters:
287:    user - user-defined application context
288:    X - vector

290:    Output Parameter:
291:    X - vector
292:  */
293: PetscErrorCode FormInitialGuess(AppCtx *user,Vec X)
294: {
295:   PetscInt    i,j,row,mx,my,ierr,xs,ys,xm,ym,gxm,gym,gxs,gys;
296:   PetscReal   one = 1.0,lambda,temp1,temp,hx,hy;
297:   PetscScalar *x;

299:   mx    = user->mx;            my = user->my;            lambda = user->param;
300:   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
301:   temp1 = lambda/(lambda + one);

303:   /*
304:      Get a pointer to vector data.
305:        - For default PETSc vectors, VecGetArray() returns a pointer to
306:          the data array.  Otherwise, the routine is implementation dependent.
307:        - You MUST call VecRestoreArray() when you no longer need access to
308:          the array.
309:   */
310:   VecGetArray(X,&x);

312:   /*
313:      Get local grid boundaries (for 2-dimensional DMDA):
314:        xs, ys   - starting grid indices (no ghost points)
315:        xm, ym   - widths of local grid (no ghost points)
316:        gxs, gys - starting grid indices (including ghost points)
317:        gxm, gym - widths of local grid (including ghost points)
318:   */
319:   DMDAGetCorners(user->da,&xs,&ys,NULL,&xm,&ym,NULL);
320:   DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);

322:   /*
323:      Compute initial guess over the locally owned part of the grid
324:   */
325:   for (j=ys; j<ys+ym; j++) {
326:     temp = (PetscReal)(PetscMin(j,my-j-1))*hy;
327:     for (i=xs; i<xs+xm; i++) {
328:       row = i - gxs + (j - gys)*gxm;
329:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
330:         x[row] = 0.0;
331:         continue;
332:       }
333:       x[row] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,mx-i-1))*hx,temp));
334:     }
335:   }

337:   /*
338:      Restore vector
339:   */
340:   VecRestoreArray(X,&x);
341:   return 0;
342: }
343: /* ------------------------------------------------------------------- */
344: /*
345:    ComputeFunction - Evaluates nonlinear function, F(x).

347:    Input Parameters:
348: .  X - input vector
349: .  user - user-defined application context

351:    Output Parameter:
352: .  F - function vector
353:  */
354: PetscErrorCode ComputeFunction(AppCtx *user,Vec X,Vec F)
355: {
357:   PetscInt       i,j,row,mx,my,xs,ys,xm,ym,gxs,gys,gxm,gym;
358:   PetscReal      two = 2.0,one = 1.0,lambda,hx,hy,hxdhy,hydhx,sc;
359:   PetscScalar    u,uxx,uyy,*x,*f;
360:   Vec            localX = user->localX;

362:   mx = user->mx;            my = user->my;            lambda = user->param;
363:   hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
364:   sc = hx*hy*lambda;        hxdhy = hx/hy;            hydhx = hy/hx;

366:   /*
367:      Scatter ghost points to local vector, using the 2-step process
368:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
369:      By placing code between these two statements, computations can be
370:      done while messages are in transition.
371:   */
372:   DMGlobalToLocalBegin(user->da,X,INSERT_VALUES,localX);
373:   DMGlobalToLocalEnd(user->da,X,INSERT_VALUES,localX);

375:   /*
376:      Get pointers to vector data
377:   */
378:   VecGetArray(localX,&x);
379:   VecGetArray(F,&f);

381:   /*
382:      Get local grid boundaries
383:   */
384:   DMDAGetCorners(user->da,&xs,&ys,NULL,&xm,&ym,NULL);
385:   DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);

387:   /*
388:      Compute function over the locally owned part of the grid
389:   */
390:   for (j=ys; j<ys+ym; j++) {
391:     row = (j - gys)*gxm + xs - gxs - 1;
392:     for (i=xs; i<xs+xm; i++) {
393:       row++;
394:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
395:         f[row] = x[row];
396:         continue;
397:       }
398:       u      = x[row];
399:       uxx    = (two*u - x[row-1] - x[row+1])*hydhx;
400:       uyy    = (two*u - x[row-gxm] - x[row+gxm])*hxdhy;
401:       f[row] = uxx + uyy - sc*PetscExpScalar(u);
402:     }
403:   }

405:   /*
406:      Restore vectors
407:   */
408:   VecRestoreArray(localX,&x);
409:   VecRestoreArray(F,&f);
410:   PetscLogFlops(11.0*ym*xm);
411:   return 0;
412: }
413: /* ------------------------------------------------------------------- */
414: /*
415:    ComputeJacobian - Evaluates Jacobian matrix.

417:    Input Parameters:
418: .  x - input vector
419: .  user - user-defined application context

421:    Output Parameters:
422: .  jac - Jacobian matrix
423: .  flag - flag indicating matrix structure

425:    Notes:
426:    Due to grid point reordering with DMDAs, we must always work
427:    with the local grid points, and then transform them to the new
428:    global numbering with the "ltog" mapping
429:    We cannot work directly with the global numbers for the original
430:    uniprocessor grid!
431: */
432: PetscErrorCode ComputeJacobian(AppCtx *user,Vec X,Mat jac)
433: {
434:   PetscErrorCode         ierr;
435:   Vec                    localX = user->localX;   /* local vector */
436:   const PetscInt         *ltog;                   /* local-to-global mapping */
437:   PetscInt               i,j,row,mx,my,col[5];
438:   PetscInt               xs,ys,xm,ym,gxs,gys,gxm,gym,grow;
439:   PetscScalar            two = 2.0,one = 1.0,lambda,v[5],hx,hy,hxdhy,hydhx,sc,*x;
440:   ISLocalToGlobalMapping ltogm;

442:   mx = user->mx;            my = user->my;            lambda = user->param;
443:   hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
444:   sc = hx*hy;               hxdhy = hx/hy;            hydhx = hy/hx;

446:   /*
447:      Scatter ghost points to local vector, using the 2-step process
448:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
449:      By placing code between these two statements, computations can be
450:      done while messages are in transition.
451:   */
452:   DMGlobalToLocalBegin(user->da,X,INSERT_VALUES,localX);
453:   DMGlobalToLocalEnd(user->da,X,INSERT_VALUES,localX);

455:   /*
456:      Get pointer to vector data
457:   */
458:   VecGetArray(localX,&x);

460:   /*
461:      Get local grid boundaries
462:   */
463:   DMDAGetCorners(user->da,&xs,&ys,NULL,&xm,&ym,NULL);
464:   DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);

466:   /*
467:      Get the global node numbers for all local nodes, including ghost points
468:   */
469:   DMGetLocalToGlobalMapping(user->da,&ltogm);
470:   ISLocalToGlobalMappingGetIndices(ltogm,&ltog);

472:   /*
473:      Compute entries for the locally owned part of the Jacobian.
474:       - Currently, all PETSc parallel matrix formats are partitioned by
475:         contiguous chunks of rows across the processors. The "grow"
476:         parameter computed below specifies the global row number
477:         corresponding to each local grid point.
478:       - Each processor needs to insert only elements that it owns
479:         locally (but any non-local elements will be sent to the
480:         appropriate processor during matrix assembly).
481:       - Always specify global row and columns of matrix entries.
482:       - Here, we set all entries for a particular row at once.
483:   */
484:   for (j=ys; j<ys+ym; j++) {
485:     row = (j - gys)*gxm + xs - gxs - 1;
486:     for (i=xs; i<xs+xm; i++) {
487:       row++;
488:       grow = ltog[row];
489:       /* boundary points */
490:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
491:         MatSetValues(jac,1,&grow,1,&grow,&one,INSERT_VALUES);
492:         continue;
493:       }
494:       /* interior grid points */
495:       v[0] = -hxdhy; col[0] = ltog[row - gxm];
496:       v[1] = -hydhx; col[1] = ltog[row - 1];
497:       v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = grow;
498:       v[3] = -hydhx; col[3] = ltog[row + 1];
499:       v[4] = -hxdhy; col[4] = ltog[row + gxm];
500:       MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
501:     }
502:   }
503:   ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);

505:   /*
506:      Assemble matrix, using the 2-step process:
507:        MatAssemblyBegin(), MatAssemblyEnd().
508:      By placing code between these two statements, computations can be
509:      done while messages are in transition.
510:   */
511:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
512:   VecRestoreArray(localX,&x);
513:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);

515:   return 0;
516: }

518: /*TEST

520:     test:

522: TEST*/