Actual source code: telescope_dmda.c


  2: #include <petsc/private/matimpl.h>
  3: #include <petsc/private/pcimpl.h>
  4: #include <petsc/private/dmimpl.h>
  5: #include <petscksp.h>
  6: #include <petscdm.h>
  7: #include <petscdmda.h>

  9: #include "../src/ksp/pc/impls/telescope/telescope.h"

 11: static PetscBool  cited = PETSC_FALSE;
 12: static const char citation[] =
 13: "@inproceedings{MaySananRuppKnepleySmith2016,\n"
 14: "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
 15: "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
 16: "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
 17: "  series    = {PASC '16},\n"
 18: "  isbn      = {978-1-4503-4126-4},\n"
 19: "  location  = {Lausanne, Switzerland},\n"
 20: "  pages     = {5:1--5:12},\n"
 21: "  articleno = {5},\n"
 22: "  numpages  = {12},\n"
 23: "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
 24: "  doi       = {10.1145/2929908.2929913},\n"
 25: "  acmid     = {2929913},\n"
 26: "  publisher = {ACM},\n"
 27: "  address   = {New York, NY, USA},\n"
 28: "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
 29: "  year      = {2016}\n"
 30: "}\n";

 32: static PetscErrorCode _DMDADetermineRankFromGlobalIJK(PetscInt dim,PetscInt i,PetscInt j,PetscInt k,
 33:                                                       PetscInt Mp,PetscInt Np,PetscInt Pp,
 34:                                                       PetscInt start_i[],PetscInt start_j[],PetscInt start_k[],
 35:                                                       PetscInt span_i[],PetscInt span_j[],PetscInt span_k[],
 36:                                                       PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *_pk,PetscMPIInt *rank_re)
 37: {
 38:   PetscInt pi,pj,pk,n;

 41:   *rank_re = -1;
 42:   if (_pi) *_pi = -1;
 43:   if (_pj) *_pj = -1;
 44:   if (_pk) *_pk = -1;
 45:   pi = pj = pk = -1;
 46:   if (_pi) {
 47:     for (n=0; n<Mp; n++) {
 48:       if ((i >= start_i[n]) && (i < start_i[n]+span_i[n])) {
 49:         pi = n;
 50:         break;
 51:       }
 52:     }
 53:     if (pi == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pi cannot be determined : range %D, val %D",Mp,i);
 54:     *_pi = pi;
 55:   }

 57:   if (_pj) {
 58:     for (n=0; n<Np; n++) {
 59:       if ((j >= start_j[n]) && (j < start_j[n]+span_j[n])) {
 60:         pj = n;
 61:         break;
 62:       }
 63:     }
 64:     if (pj == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pj cannot be determined : range %D, val %D",Np,j);
 65:     *_pj = pj;
 66:   }

 68:   if (_pk) {
 69:     for (n=0; n<Pp; n++) {
 70:       if ((k >= start_k[n]) && (k < start_k[n]+span_k[n])) {
 71:         pk = n;
 72:         break;
 73:       }
 74:     }
 75:     if (pk == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pk cannot be determined : range %D, val %D",Pp,k);
 76:     *_pk = pk;
 77:   }

 79:   switch (dim) {
 80:   case 1:
 81:     *rank_re = pi;
 82:     break;
 83:   case 2:
 84:     *rank_re = pi + pj * Mp;
 85:     break;
 86:   case 3:
 87:     *rank_re = pi + pj * Mp + pk * (Mp*Np);
 88:     break;
 89:   }
 90:   return(0);
 91: }

 93: static PetscErrorCode _DMDADetermineGlobalS0(PetscInt dim,PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,PetscInt Pp_re,
 94:                                       PetscInt range_i_re[],PetscInt range_j_re[],PetscInt range_k_re[],PetscInt *s0)
 95: {
 96:   PetscInt i,j,k,start_IJK = 0;
 97:   PetscInt rank_ijk;

100:   switch (dim) {
101:   case 1:
102:     for (i=0; i<Mp_re; i++) {
103:       rank_ijk = i;
104:       if (rank_ijk < rank_re) {
105:         start_IJK += range_i_re[i];
106:       }
107:     }
108:     break;
109:     case 2:
110:     for (j=0; j<Np_re; j++) {
111:       for (i=0; i<Mp_re; i++) {
112:         rank_ijk = i + j*Mp_re;
113:         if (rank_ijk < rank_re) {
114:           start_IJK += range_i_re[i]*range_j_re[j];
115:         }
116:       }
117:     }
118:     break;
119:     case 3:
120:     for (k=0; k<Pp_re; k++) {
121:       for (j=0; j<Np_re; j++) {
122:         for (i=0; i<Mp_re; i++) {
123:           rank_ijk = i + j*Mp_re + k*Mp_re*Np_re;
124:           if (rank_ijk < rank_re) {
125:             start_IJK += range_i_re[i]*range_j_re[j]*range_k_re[k];
126:           }
127:         }
128:       }
129:     }
130:     break;
131:   }
132:   *s0 = start_IJK;
133:   return(0);
134: }

136: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors2d(PC_Telescope sred,DM dm,DM subdm)
137: {
139:   DM             cdm;
140:   Vec            coor,coor_natural,perm_coors;
141:   PetscInt       i,j,si,sj,ni,nj,M,N,Ml,Nl,c,nidx;
142:   PetscInt       *fine_indices;
143:   IS             is_fine,is_local;
144:   VecScatter     sctx;

147:   DMGetCoordinates(dm,&coor);
148:   if (!coor) return(0);
149:   if (PCTelescope_isActiveRank(sred)) {
150:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
151:   }
152:   /* Get the coordinate vector from the distributed array */
153:   DMGetCoordinateDM(dm,&cdm);
154:   DMDACreateNaturalVector(cdm,&coor_natural);

156:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
157:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

159:   /* get indices of the guys I want to grab */
160:   DMDAGetInfo(dm,NULL,&M,&N,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
161:   if (PCTelescope_isActiveRank(sred)) {
162:     DMDAGetCorners(subdm,&si,&sj,NULL,&ni,&nj,NULL);
163:     Ml = ni;
164:     Nl = nj;
165:   } else {
166:     si = sj = 0;
167:     ni = nj = 0;
168:     Ml = Nl = 0;
169:   }

171:   PetscMalloc1(Ml*Nl*2,&fine_indices);
172:   c = 0;
173:   if (PCTelescope_isActiveRank(sred)) {
174:     for (j=sj; j<sj+nj; j++) {
175:       for (i=si; i<si+ni; i++) {
176:         nidx = (i) + (j)*M;
177:         fine_indices[c  ] = 2 * nidx     ;
178:         fine_indices[c+1] = 2 * nidx + 1 ;
179:         c = c + 2;
180:       }
181:     }
182:     if (c != Ml*Nl*2) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c %D should equal 2 * Ml %D * Nl %D",c,Ml,Nl);
183:   }

185:   /* generate scatter */
186:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*2,fine_indices,PETSC_USE_POINTER,&is_fine);
187:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*2,0,1,&is_local);

189:   /* scatter */
190:   VecCreate(PETSC_COMM_SELF,&perm_coors);
191:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*2);
192:   VecSetType(perm_coors,VECSEQ);

194:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
195:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
196:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
197:   /* access */
198:   if (PCTelescope_isActiveRank(sred)) {
199:     Vec               _coors;
200:     const PetscScalar *LA_perm;
201:     PetscScalar       *LA_coors;

203:     DMGetCoordinates(subdm,&_coors);
204:     VecGetArrayRead(perm_coors,&LA_perm);
205:     VecGetArray(_coors,&LA_coors);
206:     for (i=0; i<Ml*Nl*2; i++) {
207:       LA_coors[i] = LA_perm[i];
208:     }
209:     VecRestoreArray(_coors,&LA_coors);
210:     VecRestoreArrayRead(perm_coors,&LA_perm);
211:   }

213:   /* update local coords */
214:   if (PCTelescope_isActiveRank(sred)) {
215:     DM  _dmc;
216:     Vec _coors,_coors_local;
217:     DMGetCoordinateDM(subdm,&_dmc);
218:     DMGetCoordinates(subdm,&_coors);
219:     DMGetCoordinatesLocal(subdm,&_coors_local);
220:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
221:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
222:   }
223:   VecScatterDestroy(&sctx);
224:   ISDestroy(&is_fine);
225:   PetscFree(fine_indices);
226:   ISDestroy(&is_local);
227:   VecDestroy(&perm_coors);
228:   VecDestroy(&coor_natural);
229:   return(0);
230: }

232: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors3d(PC_Telescope sred,DM dm,DM subdm)
233: {
235:   DM             cdm;
236:   Vec            coor,coor_natural,perm_coors;
237:   PetscInt       i,j,k,si,sj,sk,ni,nj,nk,M,N,P,Ml,Nl,Pl,c,nidx;
238:   PetscInt       *fine_indices;
239:   IS             is_fine,is_local;
240:   VecScatter     sctx;

243:   DMGetCoordinates(dm,&coor);
244:   if (!coor) return(0);

246:   if (PCTelescope_isActiveRank(sred)) {
247:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
248:   }

250:   /* Get the coordinate vector from the distributed array */
251:   DMGetCoordinateDM(dm,&cdm);
252:   DMDACreateNaturalVector(cdm,&coor_natural);
253:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
254:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

256:   /* get indices of the guys I want to grab */
257:   DMDAGetInfo(dm,NULL,&M,&N,&P,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

259:   if (PCTelescope_isActiveRank(sred)) {
260:     DMDAGetCorners(subdm,&si,&sj,&sk,&ni,&nj,&nk);
261:     Ml = ni;
262:     Nl = nj;
263:     Pl = nk;
264:   } else {
265:     si = sj = sk = 0;
266:     ni = nj = nk = 0;
267:     Ml = Nl = Pl = 0;
268:   }

270:   PetscMalloc1(Ml*Nl*Pl*3,&fine_indices);

272:   c = 0;
273:   if (PCTelescope_isActiveRank(sred)) {
274:     for (k=sk; k<sk+nk; k++) {
275:       for (j=sj; j<sj+nj; j++) {
276:         for (i=si; i<si+ni; i++) {
277:           nidx = (i) + (j)*M + (k)*M*N;
278:           fine_indices[c  ] = 3 * nidx     ;
279:           fine_indices[c+1] = 3 * nidx + 1 ;
280:           fine_indices[c+2] = 3 * nidx + 2 ;
281:           c = c + 3;
282:         }
283:       }
284:     }
285:   }

287:   /* generate scatter */
288:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*Pl*3,fine_indices,PETSC_USE_POINTER,&is_fine);
289:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*Pl*3,0,1,&is_local);

291:   /* scatter */
292:   VecCreate(PETSC_COMM_SELF,&perm_coors);
293:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*Pl*3);
294:   VecSetType(perm_coors,VECSEQ);
295:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
296:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
297:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);

299:   /* access */
300:   if (PCTelescope_isActiveRank(sred)) {
301:     Vec               _coors;
302:     const PetscScalar *LA_perm;
303:     PetscScalar       *LA_coors;

305:     DMGetCoordinates(subdm,&_coors);
306:     VecGetArrayRead(perm_coors,&LA_perm);
307:     VecGetArray(_coors,&LA_coors);
308:     for (i=0; i<Ml*Nl*Pl*3; i++) {
309:       LA_coors[i] = LA_perm[i];
310:     }
311:     VecRestoreArray(_coors,&LA_coors);
312:     VecRestoreArrayRead(perm_coors,&LA_perm);
313:   }

315:   /* update local coords */
316:   if (PCTelescope_isActiveRank(sred)) {
317:     DM  _dmc;
318:     Vec _coors,_coors_local;

320:     DMGetCoordinateDM(subdm,&_dmc);
321:     DMGetCoordinates(subdm,&_coors);
322:     DMGetCoordinatesLocal(subdm,&_coors_local);
323:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
324:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
325:   }

327:   VecScatterDestroy(&sctx);
328:   ISDestroy(&is_fine);
329:   PetscFree(fine_indices);
330:   ISDestroy(&is_local);
331:   VecDestroy(&perm_coors);
332:   VecDestroy(&coor_natural);
333:   return(0);
334: }

336: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
337: {
338:   PetscInt       dim;
339:   DM             dm,subdm;
340:   PetscSubcomm   psubcomm;
341:   MPI_Comm       comm;
342:   Vec            coor;

346:   PCGetDM(pc,&dm);
347:   DMGetCoordinates(dm,&coor);
348:   if (!coor) return(0);
349:   psubcomm = sred->psubcomm;
350:   comm = PetscSubcommParent(psubcomm);
351:   subdm = ctx->dmrepart;

353:   PetscInfo(pc,"PCTelescope: setting up the coordinates (DMDA)\n");
354:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
355:   switch (dim) {
356:   case 1:
357:     SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
358:   case 2: PCTelescopeSetUp_dmda_repart_coors2d(sred,dm,subdm);
359:     break;
360:   case 3: PCTelescopeSetUp_dmda_repart_coors3d(sred,dm,subdm);
361:     break;
362:   }
363:   return(0);
364: }

366: /* setup repartitioned dm */
367: PetscErrorCode PCTelescopeSetUp_dmda_repart(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
368: {
369:   PetscErrorCode        ierr;
370:   DM                    dm;
371:   PetscInt              dim,nx,ny,nz,ndof,nsw,sum,k;
372:   DMBoundaryType        bx,by,bz;
373:   DMDAStencilType       stencil;
374:   const PetscInt        *_range_i_re;
375:   const PetscInt        *_range_j_re;
376:   const PetscInt        *_range_k_re;
377:   DMDAInterpolationType itype;
378:   PetscInt              refine_x,refine_y,refine_z;
379:   MPI_Comm              comm,subcomm;
380:   const char            *prefix;

383:   comm = PetscSubcommParent(sred->psubcomm);
384:   subcomm = PetscSubcommChild(sred->psubcomm);
385:   PCGetDM(pc,&dm);

387:   DMDAGetInfo(dm,&dim,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,&nsw,&bx,&by,&bz,&stencil);
388:   DMDAGetInterpolationType(dm,&itype);
389:   DMDAGetRefinementFactor(dm,&refine_x,&refine_y,&refine_z);

391:   ctx->dmrepart = NULL;
392:   _range_i_re = _range_j_re = _range_k_re = NULL;
393:   /* Create DMDA on the child communicator */
394:   if (PCTelescope_isActiveRank(sred)) {
395:     switch (dim) {
396:     case 1:
397:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (1D)\n");
398:       /*DMDACreate1d(subcomm,bx,nx,ndof,nsw,NULL,&ctx->dmrepart);*/
399:       ny = nz = 1;
400:       by = bz = DM_BOUNDARY_NONE;
401:       break;
402:     case 2:
403:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (2D)\n");
404:       /*DMDACreate2d(subcomm,bx,by,stencil,nx,ny, PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,&ctx->dmrepart);*/
405:       nz = 1;
406:       bz = DM_BOUNDARY_NONE;
407:       break;
408:     case 3:
409:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (3D)\n");
410:       /*DMDACreate3d(subcomm,bx,by,bz,stencil,nx,ny,nz, PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,NULL,&ctx->dmrepart);*/
411:       break;
412:     }
413:     /*
414:      The API DMDACreate1d(), DMDACreate2d(), DMDACreate3d() does not allow us to set/append
415:      a unique option prefix for the DM, thus I prefer to expose the contents of these API's here.
416:      This allows users to control the partitioning of the subDM.
417:     */
418:     DMDACreate(subcomm,&ctx->dmrepart);
419:     /* Set unique option prefix name */
420:     KSPGetOptionsPrefix(sred->ksp,&prefix);
421:     DMSetOptionsPrefix(ctx->dmrepart,prefix);
422:     DMAppendOptionsPrefix(ctx->dmrepart,"repart_");
423:     /* standard setup from DMDACreate{1,2,3}d() */
424:     DMSetDimension(ctx->dmrepart,dim);
425:     DMDASetSizes(ctx->dmrepart,nx,ny,nz);
426:     DMDASetNumProcs(ctx->dmrepart,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE);
427:     DMDASetBoundaryType(ctx->dmrepart,bx,by,bz);
428:     DMDASetDof(ctx->dmrepart,ndof);
429:     DMDASetStencilType(ctx->dmrepart,stencil);
430:     DMDASetStencilWidth(ctx->dmrepart,nsw);
431:     DMDASetOwnershipRanges(ctx->dmrepart,NULL,NULL,NULL);
432:     DMSetFromOptions(ctx->dmrepart);
433:     DMSetUp(ctx->dmrepart);
434:     /* Set refinement factors and interpolation type from the partent */
435:     DMDASetRefinementFactor(ctx->dmrepart,refine_x,refine_y,refine_z);
436:     DMDASetInterpolationType(ctx->dmrepart,itype);

438:     DMDAGetInfo(ctx->dmrepart,NULL,NULL,NULL,NULL,&ctx->Mp_re,&ctx->Np_re,&ctx->Pp_re,NULL,NULL,NULL,NULL,NULL,NULL);
439:     DMDAGetOwnershipRanges(ctx->dmrepart,&_range_i_re,&_range_j_re,&_range_k_re);

441:     ctx->dmrepart->ops->creatematrix = dm->ops->creatematrix;
442:     ctx->dmrepart->ops->createdomaindecomposition = dm->ops->createdomaindecomposition;
443:   }

445:   /* generate ranges for repartitioned dm */
446:   /* note - assume rank 0 always participates */
447:   /* TODO: use a single MPI call */
448:   MPI_Bcast(&ctx->Mp_re,1,MPIU_INT,0,comm);
449:   MPI_Bcast(&ctx->Np_re,1,MPIU_INT,0,comm);
450:   MPI_Bcast(&ctx->Pp_re,1,MPIU_INT,0,comm);

452:   PetscCalloc3(ctx->Mp_re,&ctx->range_i_re,ctx->Np_re,&ctx->range_j_re,ctx->Pp_re,&ctx->range_k_re);

454:   if (_range_i_re) {PetscArraycpy(ctx->range_i_re,_range_i_re,ctx->Mp_re);}
455:   if (_range_j_re) {PetscArraycpy(ctx->range_j_re,_range_j_re,ctx->Np_re);}
456:   if (_range_k_re) {PetscArraycpy(ctx->range_k_re,_range_k_re,ctx->Pp_re);}

458:   /* TODO: use a single MPI call */
459:   MPI_Bcast(ctx->range_i_re,ctx->Mp_re,MPIU_INT,0,comm);
460:   MPI_Bcast(ctx->range_j_re,ctx->Np_re,MPIU_INT,0,comm);
461:   MPI_Bcast(ctx->range_k_re,ctx->Pp_re,MPIU_INT,0,comm);

463:   PetscMalloc3(ctx->Mp_re,&ctx->start_i_re,ctx->Np_re,&ctx->start_j_re,ctx->Pp_re,&ctx->start_k_re);

465:   sum = 0;
466:   for (k=0; k<ctx->Mp_re; k++) {
467:     ctx->start_i_re[k] = sum;
468:     sum += ctx->range_i_re[k];
469:   }

471:   sum = 0;
472:   for (k=0; k<ctx->Np_re; k++) {
473:     ctx->start_j_re[k] = sum;
474:     sum += ctx->range_j_re[k];
475:   }

477:   sum = 0;
478:   for (k=0; k<ctx->Pp_re; k++) {
479:     ctx->start_k_re[k] = sum;
480:     sum += ctx->range_k_re[k];
481:   }

483:   /* attach repartitioned dm to child ksp */
484:   {
485:     PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
486:     void           *dmksp_ctx;

488:     DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);

490:     /* attach dm to ksp on sub communicator */
491:     if (PCTelescope_isActiveRank(sred)) {
492:       KSPSetDM(sred->ksp,ctx->dmrepart);

494:       if (!dmksp_func || sred->ignore_kspcomputeoperators) {
495:         KSPSetDMActive(sred->ksp,PETSC_FALSE);
496:       } else {
497:         /* sub ksp inherits dmksp_func and context provided by user */
498:         KSPSetComputeOperators(sred->ksp,dmksp_func,dmksp_ctx);
499:         KSPSetDMActive(sred->ksp,PETSC_TRUE);
500:       }
501:     }
502:   }
503:   return(0);
504: }

506: PetscErrorCode PCTelescopeSetUp_dmda_permutation_3d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
507: {
509:   DM             dm;
510:   MPI_Comm       comm;
511:   Mat            Pscalar,P;
512:   PetscInt       ndof;
513:   PetscInt       i,j,k,location,startI[3],endI[3],lenI[3],nx,ny,nz;
514:   PetscInt       sr,er,Mr;
515:   Vec            V;

518:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-3D)\n");
519:   PetscObjectGetComm((PetscObject)pc,&comm);

521:   PCGetDM(pc,&dm);
522:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);

524:   DMGetGlobalVector(dm,&V);
525:   VecGetSize(V,&Mr);
526:   VecGetOwnershipRange(V,&sr,&er);
527:   DMRestoreGlobalVector(dm,&V);
528:   sr = sr / ndof;
529:   er = er / ndof;
530:   Mr = Mr / ndof;

532:   MatCreate(comm,&Pscalar);
533:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
534:   MatSetType(Pscalar,MATAIJ);
535:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
536:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

538:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],&lenI[2]);
539:   DMDAGetCorners(dm,&startI[0],&startI[1],&startI[2],&endI[0],&endI[1],&endI[2]);
540:   endI[0] += startI[0];
541:   endI[1] += startI[1];
542:   endI[2] += startI[2];

544:   for (k=startI[2]; k<endI[2]; k++) {
545:     for (j=startI[1]; j<endI[1]; j++) {
546:       for (i=startI[0]; i<endI[0]; i++) {
547:         PetscMPIInt rank_ijk_re,rank_reI[3];
548:         PetscInt    s0_re;
549:         PetscInt    ii,jj,kk,local_ijk_re,mapped_ijk;
550:         PetscInt    lenI_re[3];

552:         location = (i - startI[0]) + (j - startI[1])*lenI[0] + (k - startI[2])*lenI[0]*lenI[1];
553:         _DMDADetermineRankFromGlobalIJK(3,i,j,k,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
554:                                                ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
555:                                                ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
556:                                                &rank_reI[0],&rank_reI[1],&rank_reI[2],&rank_ijk_re);
557:         _DMDADetermineGlobalS0(3,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);
558:         ii = i - ctx->start_i_re[ rank_reI[0] ];
559:         if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error ii");
560:         jj = j - ctx->start_j_re[ rank_reI[1] ];
561:         if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error jj");
562:         kk = k - ctx->start_k_re[ rank_reI[2] ];
563:         if (kk < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error kk");
564:         lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
565:         lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
566:         lenI_re[2] = ctx->range_k_re[ rank_reI[2] ];
567:         local_ijk_re = ii + jj * lenI_re[0] + kk * lenI_re[0] * lenI_re[1];
568:         mapped_ijk = s0_re + local_ijk_re;
569:         MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
570:       }
571:     }
572:   }
573:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
574:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
575:   MatCreateMAIJ(Pscalar,ndof,&P);
576:   MatDestroy(&Pscalar);
577:   ctx->permutation = P;
578:   return(0);
579: }

581: PetscErrorCode PCTelescopeSetUp_dmda_permutation_2d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
582: {
584:   DM             dm;
585:   MPI_Comm       comm;
586:   Mat            Pscalar,P;
587:   PetscInt       ndof;
588:   PetscInt       i,j,location,startI[2],endI[2],lenI[2],nx,ny,nz;
589:   PetscInt       sr,er,Mr;
590:   Vec            V;

593:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-2D)\n");
594:   PetscObjectGetComm((PetscObject)pc,&comm);
595:   PCGetDM(pc,&dm);
596:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
597:   DMGetGlobalVector(dm,&V);
598:   VecGetSize(V,&Mr);
599:   VecGetOwnershipRange(V,&sr,&er);
600:   DMRestoreGlobalVector(dm,&V);
601:   sr = sr / ndof;
602:   er = er / ndof;
603:   Mr = Mr / ndof;

605:   MatCreate(comm,&Pscalar);
606:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
607:   MatSetType(Pscalar,MATAIJ);
608:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
609:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

611:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
612:   DMDAGetCorners(dm,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
613:   endI[0] += startI[0];
614:   endI[1] += startI[1];

616:   for (j=startI[1]; j<endI[1]; j++) {
617:     for (i=startI[0]; i<endI[0]; i++) {
618:       PetscMPIInt rank_ijk_re,rank_reI[3];
619:       PetscInt    s0_re;
620:       PetscInt    ii,jj,local_ijk_re,mapped_ijk;
621:       PetscInt    lenI_re[3];

623:       location = (i - startI[0]) + (j - startI[1])*lenI[0];
624:       _DMDADetermineRankFromGlobalIJK(2,i,j,0,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
625:                                              ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
626:                                              ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
627:                                              &rank_reI[0],&rank_reI[1],NULL,&rank_ijk_re);

629:       _DMDADetermineGlobalS0(2,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);

631:       ii = i - ctx->start_i_re[ rank_reI[0] ];
632:       if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error ii");
633:       jj = j - ctx->start_j_re[ rank_reI[1] ];
634:       if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error jj");

636:       lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
637:       lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
638:       local_ijk_re = ii + jj * lenI_re[0];
639:       mapped_ijk = s0_re + local_ijk_re;
640:       MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
641:     }
642:   }
643:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
644:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
645:   MatCreateMAIJ(Pscalar,ndof,&P);
646:   MatDestroy(&Pscalar);
647:   ctx->permutation = P;
648:   return(0);
649: }

651: PetscErrorCode PCTelescopeSetUp_dmda_scatters(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
652: {
654:   Vec            xred,yred,xtmp,x,xp;
655:   VecScatter     scatter;
656:   IS             isin;
657:   Mat            B;
658:   PetscInt       m,bs,st,ed;
659:   MPI_Comm       comm;

662:   PetscObjectGetComm((PetscObject)pc,&comm);
663:   PCGetOperators(pc,NULL,&B);
664:   MatCreateVecs(B,&x,NULL);
665:   MatGetBlockSize(B,&bs);
666:   VecDuplicate(x,&xp);
667:   m = 0;
668:   xred = NULL;
669:   yred = NULL;
670:   if (PCTelescope_isActiveRank(sred)) {
671:     DMCreateGlobalVector(ctx->dmrepart,&xred);
672:     VecDuplicate(xred,&yred);
673:     VecGetOwnershipRange(xred,&st,&ed);
674:     ISCreateStride(comm,ed-st,st,1,&isin);
675:     VecGetLocalSize(xred,&m);
676:   } else {
677:     VecGetOwnershipRange(x,&st,&ed);
678:     ISCreateStride(comm,0,st,1,&isin);
679:   }
680:   ISSetBlockSize(isin,bs);
681:   VecCreate(comm,&xtmp);
682:   VecSetSizes(xtmp,m,PETSC_DECIDE);
683:   VecSetBlockSize(xtmp,bs);
684:   VecSetType(xtmp,((PetscObject)x)->type_name);
685:   VecScatterCreate(x,isin,xtmp,NULL,&scatter);
686:   sred->xred    = xred;
687:   sred->yred    = yred;
688:   sred->isin    = isin;
689:   sred->scatter = scatter;
690:   sred->xtmp    = xtmp;

692:   ctx->xp       = xp;
693:   VecDestroy(&x);
694:   return(0);
695: }

697: PetscErrorCode PCTelescopeSetUp_dmda(PC pc,PC_Telescope sred)
698: {
699:   PC_Telescope_DMDACtx *ctx;
700:   PetscInt             dim;
701:   DM                   dm;
702:   MPI_Comm             comm;
703:   PetscErrorCode       ierr;

706:   PetscInfo(pc,"PCTelescope: setup (DMDA)\n");
707:   PetscNew(&ctx);
708:   sred->dm_ctx = (void*)ctx;

710:   PetscObjectGetComm((PetscObject)pc,&comm);
711:   PCGetDM(pc,&dm);
712:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

714:   PCTelescopeSetUp_dmda_repart(pc,sred,ctx);
715:   PCTelescopeSetUp_dmda_repart_coors(pc,sred,ctx);
716:   switch (dim) {
717:   case 1:
718:     SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
719:   case 2: PCTelescopeSetUp_dmda_permutation_2d(pc,sred,ctx);
720:     break;
721:   case 3: PCTelescopeSetUp_dmda_permutation_3d(pc,sred,ctx);
722:     break;
723:   }
724:   PCTelescopeSetUp_dmda_scatters(pc,sred,ctx);
725:   return(0);
726: }

728: PetscErrorCode PCTelescopeMatCreate_dmda_dmactivefalse(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
729: {
730:   PetscErrorCode       ierr;
731:   PC_Telescope_DMDACtx *ctx;
732:   MPI_Comm             comm,subcomm;
733:   Mat                  Bperm,Bred,B,P;
734:   PetscInt             nr,nc;
735:   IS                   isrow,iscol;
736:   Mat                  Blocal,*_Blocal;

739:   PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (DMDA)\n");
740:   PetscObjectGetComm((PetscObject)pc,&comm);
741:   subcomm = PetscSubcommChild(sred->psubcomm);
742:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;

744:   PCGetOperators(pc,NULL,&B);
745:   MatGetSize(B,&nr,&nc);

747:   P = ctx->permutation;
748:   MatPtAP(B,P,MAT_INITIAL_MATRIX,1.1,&Bperm);

750:   /* Get submatrices */
751:   isrow = sred->isin;
752:   ISCreateStride(comm,nc,0,1,&iscol);

754:   MatCreateSubMatrices(Bperm,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
755:   Blocal = *_Blocal;
756:   Bred = NULL;
757:   if (PCTelescope_isActiveRank(sred)) {
758:     PetscInt mm;

760:     if (reuse != MAT_INITIAL_MATRIX) {Bred = *A;}
761:     MatGetSize(Blocal,&mm,NULL);
762:     /* MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,PETSC_DECIDE,reuse,&Bred); */
763:     MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
764:   }
765:   *A = Bred;

767:   ISDestroy(&iscol);
768:   MatDestroy(&Bperm);
769:   MatDestroyMatrices(1,&_Blocal);
770:   return(0);
771: }

773: PetscErrorCode PCTelescopeMatCreate_dmda(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
774: {
776:   DM             dm;
777:   PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
778:   void           *dmksp_ctx;

781:   PCGetDM(pc,&dm);
782:   DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);
783:   /* We assume that dmksp_func = NULL, is equivalent to dmActive = PETSC_FALSE */
784:   if (dmksp_func && !sred->ignore_kspcomputeoperators) {
785:     DM  dmrepart;
786:     Mat Ak;

788:     *A = NULL;
789:     if (PCTelescope_isActiveRank(sred)) {
790:       KSPGetDM(sred->ksp,&dmrepart);
791:       if (reuse == MAT_INITIAL_MATRIX) {
792:         DMCreateMatrix(dmrepart,&Ak);
793:         *A = Ak;
794:       } else if (reuse == MAT_REUSE_MATRIX) {
795:         Ak = *A;
796:       }
797:       /*
798:        There is no need to explicitly assemble the operator now,
799:        the sub-KSP will call the method provided to KSPSetComputeOperators() during KSPSetUp()
800:       */
801:     }
802:   } else {
803:     PCTelescopeMatCreate_dmda_dmactivefalse(pc,sred,reuse,A);
804:   }
805:   return(0);
806: }

808: PetscErrorCode PCTelescopeSubNullSpaceCreate_dmda_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
809: {
810:   PetscErrorCode       ierr;
811:   PetscBool            has_const;
812:   PetscInt             i,k,n = 0;
813:   const Vec            *vecs;
814:   Vec                  *sub_vecs = NULL;
815:   MPI_Comm             subcomm;
816:   PC_Telescope_DMDACtx *ctx;

819:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
820:   subcomm = PetscSubcommChild(sred->psubcomm);
821:   MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);

823:   if (PCTelescope_isActiveRank(sred)) {
824:     /* create new vectors */
825:     if (n) {
826:       VecDuplicateVecs(sred->xred,n,&sub_vecs);
827:     }
828:   }

830:   /* copy entries */
831:   for (k=0; k<n; k++) {
832:     const PetscScalar *x_array;
833:     PetscScalar       *LA_sub_vec;
834:     PetscInt          st,ed;

836:     /* permute vector into ordering associated with re-partitioned dmda */
837:     MatMultTranspose(ctx->permutation,vecs[k],ctx->xp);

839:     /* pull in vector x->xtmp */
840:     VecScatterBegin(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
841:     VecScatterEnd(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);

843:     /* copy vector entries into xred */
844:     VecGetArrayRead(sred->xtmp,&x_array);
845:     if (sub_vecs) {
846:       if (sub_vecs[k]) {
847:         VecGetOwnershipRange(sub_vecs[k],&st,&ed);
848:         VecGetArray(sub_vecs[k],&LA_sub_vec);
849:         for (i=0; i<ed-st; i++) {
850:           LA_sub_vec[i] = x_array[i];
851:         }
852:         VecRestoreArray(sub_vecs[k],&LA_sub_vec);
853:       }
854:     }
855:     VecRestoreArrayRead(sred->xtmp,&x_array);
856:   }

858:   if (PCTelescope_isActiveRank(sred)) {
859:     /* create new (near) nullspace for redundant object */
860:     MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);
861:     VecDestroyVecs(n,&sub_vecs);
862:     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
863:     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
864:   }
865:   return(0);
866: }

868: PetscErrorCode PCTelescopeMatNullSpaceCreate_dmda(PC pc,PC_Telescope sred,Mat sub_mat)
869: {
871:   Mat            B;

874:   PCGetOperators(pc,NULL,&B);
875:   {
876:     MatNullSpace nullspace,sub_nullspace;
877:     MatGetNullSpace(B,&nullspace);
878:     if (nullspace) {
879:       PetscInfo(pc,"PCTelescope: generating nullspace (DMDA)\n");
880:       PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc,sred,nullspace,&sub_nullspace);
881:       if (PCTelescope_isActiveRank(sred)) {
882:         MatSetNullSpace(sub_mat,sub_nullspace);
883:         MatNullSpaceDestroy(&sub_nullspace);
884:       }
885:     }
886:   }
887:   {
888:     MatNullSpace nearnullspace,sub_nearnullspace;
889:     MatGetNearNullSpace(B,&nearnullspace);
890:     if (nearnullspace) {
891:       PetscInfo(pc,"PCTelescope: generating near nullspace (DMDA)\n");
892:       PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);
893:       if (PCTelescope_isActiveRank(sred)) {
894:         MatSetNearNullSpace(sub_mat,sub_nearnullspace);
895:         MatNullSpaceDestroy(&sub_nearnullspace);
896:       }
897:     }
898:   }
899:   return(0);
900: }

902: PetscErrorCode PCApply_Telescope_dmda(PC pc,Vec x,Vec y)
903: {
904:   PC_Telescope         sred = (PC_Telescope)pc->data;
905:   PetscErrorCode       ierr;
906:   Mat                  perm;
907:   Vec                  xtmp,xp,xred,yred;
908:   PetscInt             i,st,ed;
909:   VecScatter           scatter;
910:   PetscScalar          *array;
911:   const PetscScalar    *x_array;
912:   PC_Telescope_DMDACtx *ctx;

914:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
915:   xtmp    = sred->xtmp;
916:   scatter = sred->scatter;
917:   xred    = sred->xred;
918:   yred    = sred->yred;
919:   perm    = ctx->permutation;
920:   xp      = ctx->xp;

923:   PetscCitationsRegister(citation,&cited);

925:   /* permute vector into ordering associated with re-partitioned dmda */
926:   MatMultTranspose(perm,x,xp);

928:   /* pull in vector x->xtmp */
929:   VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
930:   VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

932:   /* copy vector entries into xred */
933:   VecGetArrayRead(xtmp,&x_array);
934:   if (xred) {
935:     PetscScalar *LA_xred;
936:     VecGetOwnershipRange(xred,&st,&ed);

938:     VecGetArray(xred,&LA_xred);
939:     for (i=0; i<ed-st; i++) {
940:       LA_xred[i] = x_array[i];
941:     }
942:     VecRestoreArray(xred,&LA_xred);
943:   }
944:   VecRestoreArrayRead(xtmp,&x_array);

946:   /* solve */
947:   if (PCTelescope_isActiveRank(sred)) {
948:     KSPSolve(sred->ksp,xred,yred);
949:     KSPCheckSolve(sred->ksp,pc,yred);
950:   }

952:   /* return vector */
953:   VecGetArray(xtmp,&array);
954:   if (yred) {
955:     const PetscScalar *LA_yred;
956:     VecGetOwnershipRange(yred,&st,&ed);
957:     VecGetArrayRead(yred,&LA_yred);
958:     for (i=0; i<ed-st; i++) {
959:       array[i] = LA_yred[i];
960:     }
961:     VecRestoreArrayRead(yred,&LA_yred);
962:   }
963:   VecRestoreArray(xtmp,&array);
964:   VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
965:   VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
966:   MatMult(perm,xp,y);
967:   return(0);
968: }

970: PetscErrorCode PCApplyRichardson_Telescope_dmda(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
971: {
972:   PC_Telescope         sred = (PC_Telescope)pc->data;
973:   PetscErrorCode       ierr;
974:   Mat                  perm;
975:   Vec                  xtmp,xp,yred;
976:   PetscInt             i,st,ed;
977:   VecScatter           scatter;
978:   const PetscScalar    *x_array;
979:   PetscBool            default_init_guess_value = PETSC_FALSE;
980:   PC_Telescope_DMDACtx *ctx;

983:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
984:   xtmp    = sred->xtmp;
985:   scatter = sred->scatter;
986:   yred    = sred->yred;
987:   perm    = ctx->permutation;
988:   xp      = ctx->xp;

990:   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope_dmda only supports max_it = 1");
991:   *reason = (PCRichardsonConvergedReason)0;

993:   if (!zeroguess) {
994:     PetscInfo(pc,"PCTelescopeDMDA: Scattering y for non-zero-initial guess\n");
995:     /* permute vector into ordering associated with re-partitioned dmda */
996:     MatMultTranspose(perm,y,xp);

998:     /* pull in vector x->xtmp */
999:     VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
1000:     VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

1002:     /* copy vector entries into xred */
1003:     VecGetArrayRead(xtmp,&x_array);
1004:     if (yred) {
1005:       PetscScalar *LA_yred;
1006:       VecGetOwnershipRange(yred,&st,&ed);
1007:       VecGetArray(yred,&LA_yred);
1008:       for (i=0; i<ed-st; i++) {
1009:         LA_yred[i] = x_array[i];
1010:       }
1011:       VecRestoreArray(yred,&LA_yred);
1012:     }
1013:     VecRestoreArrayRead(xtmp,&x_array);
1014:   }

1016:   if (PCTelescope_isActiveRank(sred)) {
1017:     KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
1018:     if (!zeroguess) {KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);}
1019:   }

1021:   PCApply_Telescope_dmda(pc,x,y);

1023:   if (PCTelescope_isActiveRank(sred)) {
1024:     KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
1025:   }

1027:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
1028:   *outits = 1;
1029:   return(0);
1030: }

1032: PetscErrorCode PCReset_Telescope_dmda(PC pc)
1033: {
1034:   PetscErrorCode       ierr;
1035:   PC_Telescope         sred = (PC_Telescope)pc->data;
1036:   PC_Telescope_DMDACtx *ctx;

1039:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
1040:   VecDestroy(&ctx->xp);
1041:   MatDestroy(&ctx->permutation);
1042:   DMDestroy(&ctx->dmrepart);
1043:   PetscFree3(ctx->range_i_re,ctx->range_j_re,ctx->range_k_re);
1044:   PetscFree3(ctx->start_i_re,ctx->start_j_re,ctx->start_k_re);
1045:   return(0);
1046: }

1048: PetscErrorCode DMView_DA_Short_3d(DM dm,PetscViewer v)
1049: {
1050:   PetscInt       M,N,P,m,n,p,ndof,nsw;
1051:   MPI_Comm       comm;
1052:   PetscMPIInt    size;
1053:   const char*    prefix;

1057:   PetscObjectGetComm((PetscObject)dm,&comm);
1058:   MPI_Comm_size(comm,&size);
1059:   DMGetOptionsPrefix(dm,&prefix);
1060:   DMDAGetInfo(dm,NULL,&M,&N,&P,&m,&n,&p,&ndof,&nsw,NULL,NULL,NULL,NULL);
1061:   if (prefix) {PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);}
1062:   else {PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);}
1063:   PetscViewerASCIIPrintf(v,"  M %D N %D P %D m %D n %D p %D dof %D overlap %D\n",M,N,P,m,n,p,ndof,nsw);
1064:   return(0);
1065: }

1067: PetscErrorCode DMView_DA_Short_2d(DM dm,PetscViewer v)
1068: {
1069:   PetscInt       M,N,m,n,ndof,nsw;
1070:   MPI_Comm       comm;
1071:   PetscMPIInt    size;
1072:   const char*    prefix;

1076:   PetscObjectGetComm((PetscObject)dm,&comm);
1077:   MPI_Comm_size(comm,&size);
1078:   DMGetOptionsPrefix(dm,&prefix);
1079:   DMDAGetInfo(dm,NULL,&M,&N,NULL,&m,&n,NULL,&ndof,&nsw,NULL,NULL,NULL,NULL);
1080:   if (prefix) {PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);}
1081:   else {PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);}
1082:   PetscViewerASCIIPrintf(v,"  M %D N %D m %D n %D dof %D overlap %D\n",M,N,m,n,ndof,nsw);
1083:   return(0);
1084: }

1086: PetscErrorCode DMView_DA_Short(DM dm,PetscViewer v)
1087: {
1089:   PetscInt       dim;

1092:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1093:   switch (dim) {
1094:   case 2: DMView_DA_Short_2d(dm,v);
1095:     break;
1096:   case 3: DMView_DA_Short_3d(dm,v);
1097:     break;
1098:   }
1099:   return(0);
1100: }