Actual source code: pipes1.c
petsc-3.14.3 2021-01-09
1: static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n";
2: /*
3: Example: mpiexec -n <np> ./pipes1 -ts_max_steps 10
4: */
6: #include "wash.h"
7: #include <petscdmplex.h>
9: /*
10: WashNetworkDistribute - proc[0] distributes sequential wash object
11: Input Parameters:
12: . comm - MPI communicator
13: . wash - wash context with all network data in proc[0]
15: Output Parameter:
16: . wash - wash context with nedge, nvertex and edgelist distributed
18: Note: The routine is used for testing parallel generation of dmnetwork, then redistribute.
19: */
20: PetscErrorCode WashNetworkDistribute(MPI_Comm comm,Wash wash)
21: {
23: PetscMPIInt rank,size,tag=0;
24: PetscInt i,e,v,numEdges,numVertices,nedges,*eowners=NULL,estart,eend,*vtype=NULL,nvertices;
25: PetscInt *edgelist = wash->edgelist,*nvtx=NULL,*vtxDone=NULL;
28: MPI_Comm_size(comm,&size);
29: if (size == 1) return(0);
31: MPI_Comm_rank(comm,&rank);
32: numEdges = wash->nedge;
33: numVertices = wash->nvertex;
35: /* (1) all processes get global and local number of edges */
36: MPI_Bcast(&numEdges,1,MPIU_INT,0,comm);
37: nedges = numEdges/size; /* local nedges */
38: if (!rank) {
39: nedges += numEdges - size*(numEdges/size);
40: }
41: wash->Nedge = numEdges;
42: wash->nedge = nedges;
43: /* PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges); */
45: PetscCalloc3(size+1,&eowners,size,&nvtx,numVertices,&vtxDone);
46: MPI_Allgather(&nedges,1,MPIU_INT,eowners+1,1,MPIU_INT,PETSC_COMM_WORLD);
47: eowners[0] = 0;
48: for (i=2; i<=size; i++) {
49: eowners[i] += eowners[i-1];
50: }
52: estart = eowners[rank];
53: eend = eowners[rank+1];
54: /* PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend); */
56: /* (2) distribute row block edgelist to all processors */
57: if (!rank) {
58: vtype = wash->vtype;
59: for (i=1; i<size; i++) {
60: /* proc[0] sends edgelist to proc[i] */
61: MPI_Send(edgelist+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm);
63: /* proc[0] sends vtype to proc[i] */
64: MPI_Send(vtype+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm);
65: }
66: } else {
67: MPI_Status status;
68: PetscMalloc1(2*(eend-estart),&vtype);
69: PetscMalloc1(2*(eend-estart),&edgelist);
71: MPI_Recv(edgelist,2*(eend-estart),MPIU_INT,0,tag,comm,&status);
72: MPI_Recv(vtype,2*(eend-estart),MPIU_INT,0,tag,comm,&status);
73: }
75: wash->edgelist = edgelist;
77: /* (3) all processes get global and local number of vertices, without ghost vertices */
78: if (!rank) {
79: for (i=0; i<size; i++) {
80: for (e=eowners[i]; e<eowners[i+1]; e++) {
81: v = edgelist[2*e];
82: if (!vtxDone[v]) {
83: nvtx[i]++; vtxDone[v] = 1;
84: }
85: v = edgelist[2*e+1];
86: if (!vtxDone[v]) {
87: nvtx[i]++; vtxDone[v] = 1;
88: }
89: }
90: }
91: }
92: MPI_Bcast(&numVertices,1,MPIU_INT,0,PETSC_COMM_WORLD);
93: MPI_Scatter(nvtx,1,MPIU_INT,&nvertices,1,MPIU_INT,0,PETSC_COMM_WORLD);
94: PetscFree3(eowners,nvtx,vtxDone);
96: wash->Nvertex = numVertices;
97: wash->nvertex = nvertices;
98: wash->vtype = vtype;
99: return(0);
100: }
102: PetscErrorCode WASHIFunction(TS ts,PetscReal t,Vec X,Vec Xdot,Vec F,void* ctx)
103: {
105: Wash wash=(Wash)ctx;
106: DM networkdm;
107: Vec localX,localXdot,localF, localXold;
108: const PetscInt *cone;
109: PetscInt vfrom,vto,offsetfrom,offsetto,varoffset;
110: PetscInt v,vStart,vEnd,e,eStart,eEnd;
111: PetscInt nend,type;
112: PetscBool ghost;
113: PetscScalar *farr,*juncf, *pipef;
114: PetscReal dt;
115: Pipe pipe;
116: PipeField *pipex,*pipexdot,*juncx;
117: Junction junction;
118: DMDALocalInfo info;
119: const PetscScalar *xarr,*xdotarr, *xoldarr;
122: localX = wash->localX;
123: localXdot = wash->localXdot;
125: TSGetSolution(ts,&localXold);
126: TSGetDM(ts,&networkdm);
127: TSGetTimeStep(ts,&dt);
128: DMGetLocalVector(networkdm,&localF);
130: /* Set F and localF as zero */
131: VecSet(F,0.0);
132: VecSet(localF,0.0);
134: /* Update ghost values of locaX and locaXdot */
135: DMGlobalToLocalBegin(networkdm,X,INSERT_VALUES,localX);
136: DMGlobalToLocalEnd(networkdm,X,INSERT_VALUES,localX);
138: DMGlobalToLocalBegin(networkdm,Xdot,INSERT_VALUES,localXdot);
139: DMGlobalToLocalEnd(networkdm,Xdot,INSERT_VALUES,localXdot);
141: VecGetArrayRead(localX,&xarr);
142: VecGetArrayRead(localXdot,&xdotarr);
143: VecGetArrayRead(localXold,&xoldarr);
144: VecGetArray(localF,&farr);
146: /* junction->type == JUNCTION:
147: juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout)
148: junction->type == RESERVOIR (upper stream):
149: juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout)
150: junction->type == VALVE (down stream):
151: juncf[0] = -qJ + sum(qin); juncf[1] = qJ
152: */
153: /* Vertex/junction initialization */
154: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
155: for (v=vStart; v<vEnd; v++) {
156: DMNetworkIsGhostVertex(networkdm,v,&ghost);
157: if (ghost) continue;
159: DMNetworkGetComponent(networkdm,v,0,&type,(void**)&junction);
160: DMNetworkGetVariableOffset(networkdm,v,&varoffset);
161: juncx = (PipeField*)(xarr + varoffset);
162: juncf = (PetscScalar*)(farr + varoffset);
164: juncf[0] = -juncx[0].q;
165: juncf[1] = juncx[0].q;
167: if (junction->type == RESERVOIR) { /* upstream reservoir */
168: juncf[0] = juncx[0].h - wash->H0;
169: }
170: }
172: /* Edge/pipe */
173: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
174: for (e=eStart; e<eEnd; e++) {
175: DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe);
176: DMNetworkGetVariableOffset(networkdm,e,&varoffset);
177: pipex = (PipeField*)(xarr + varoffset);
178: pipexdot = (PipeField*)(xdotarr + varoffset);
179: pipef = (PetscScalar*)(farr + varoffset);
181: /* Get some data into the pipe structure: note, some of these operations
182: * might be redundant. Will it consume too much time? */
183: pipe->dt = dt;
184: pipe->xold = (PipeField*)(xoldarr + varoffset);
186: /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */
187: DMDAGetLocalInfo(pipe->da,&info);
188: PipeIFunctionLocal_Lax(&info,t,pipex,pipexdot,pipef,pipe);
190: /* Get boundary values from connected vertices */
191: DMNetworkGetConnectedVertices(networkdm,e,&cone);
192: vfrom = cone[0]; /* local ordering */
193: vto = cone[1];
194: DMNetworkGetVariableOffset(networkdm,vfrom,&offsetfrom);
195: DMNetworkGetVariableOffset(networkdm,vto,&offsetto);
197: /* Evaluate upstream boundary */
198: DMNetworkGetComponent(networkdm,vfrom,0,&type,(void**)&junction);
199: if (junction->type != JUNCTION && junction->type != RESERVOIR) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
200: juncx = (PipeField*)(xarr + offsetfrom);
201: juncf = (PetscScalar*)(farr + offsetfrom);
203: pipef[0] = pipex[0].h - juncx[0].h;
204: juncf[1] -= pipex[0].q;
206: /* Evaluate downstream boundary */
207: DMNetworkGetComponent(networkdm,vto,0,&type,(void**)&junction);
208: if (junction->type != JUNCTION && junction->type != VALVE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
209: juncx = (PipeField*)(xarr + offsetto);
210: juncf = (PetscScalar*)(farr + offsetto);
211: nend = pipe->nnodes - 1;
213: pipef[2*nend + 1] = pipex[nend].h - juncx[0].h;
214: juncf[0] += pipex[nend].q;
215: }
217: VecRestoreArrayRead(localX,&xarr);
218: VecRestoreArrayRead(localXdot,&xdotarr);
219: VecRestoreArray(localF,&farr);
221: DMLocalToGlobalBegin(networkdm,localF,ADD_VALUES,F);
222: DMLocalToGlobalEnd(networkdm,localF,ADD_VALUES,F);
223: DMRestoreLocalVector(networkdm,&localF);
224: /*
225: PetscPrintf(PETSC_COMM_WORLD("F:\n");
226: VecView(F,PETSC_VIEWER_STDOUT_WORLD);
227: */
228: return(0);
229: }
231: PetscErrorCode WASHSetInitialSolution(DM networkdm,Vec X,Wash wash)
232: {
234: PetscInt k,nx,vkey,vfrom,vto,offsetfrom,offsetto;
235: PetscInt type,varoffset;
236: PetscInt e,eStart,eEnd;
237: Vec localX;
238: PetscScalar *xarr;
239: Pipe pipe;
240: Junction junction;
241: const PetscInt *cone;
242: const PetscScalar *xarray;
245: VecSet(X,0.0);
246: DMGetLocalVector(networkdm,&localX);
247: VecGetArray(localX,&xarr);
249: /* Edge */
250: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
251: for (e=eStart; e<eEnd; e++) {
252: DMNetworkGetVariableOffset(networkdm,e,&varoffset);
253: DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe);
255: /* set initial values for this pipe */
256: PipeComputeSteadyState(pipe,wash->Q0,wash->H0);
257: VecGetSize(pipe->x,&nx);
259: VecGetArrayRead(pipe->x,&xarray);
260: /* copy pipe->x to xarray */
261: for (k=0; k<nx; k++) {
262: (xarr+varoffset)[k] = xarray[k];
263: }
265: /* set boundary values into vfrom and vto */
266: DMNetworkGetConnectedVertices(networkdm,e,&cone);
267: vfrom = cone[0]; /* local ordering */
268: vto = cone[1];
269: DMNetworkGetVariableOffset(networkdm,vfrom,&offsetfrom);
270: DMNetworkGetVariableOffset(networkdm,vto,&offsetto);
272: /* if vform is a head vertex: */
273: DMNetworkGetComponent(networkdm,vfrom,0,&vkey,(void**)&junction);
274: if (junction->type == RESERVOIR) {
275: (xarr+offsetfrom)[1] = wash->H0; /* 1st H */
276: }
278: /* if vto is an end vertex: */
279: DMNetworkGetComponent(networkdm,vto,0,&vkey,(void**)&junction);
280: if (junction->type == VALVE) {
281: (xarr+offsetto)[0] = wash->QL; /* last Q */
282: }
283: VecRestoreArrayRead(pipe->x,&xarray);
284: }
286: VecRestoreArray(localX,&xarr);
287: DMLocalToGlobalBegin(networkdm,localX,ADD_VALUES,X);
288: DMLocalToGlobalEnd(networkdm,localX,ADD_VALUES,X);
289: DMRestoreLocalVector(networkdm,&localX);
291: #if 0
292: PetscInt N;
293: VecGetSize(X,&N);
294: PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N);
295: VecView(X,PETSC_VIEWER_STDOUT_WORLD);
296: #endif
297: return(0);
298: }
300: PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context)
301: {
302: PetscErrorCode ierr;
303: DMNetworkMonitor monitor;
306: monitor = (DMNetworkMonitor)context;
307: DMNetworkMonitorView(monitor,x);
308: return(0);
309: }
311: PetscErrorCode PipesView(Vec X,DM networkdm,Wash wash)
312: {
313: PetscErrorCode ierr;
314: Pipe pipe;
315: PetscInt key,Start,End;
316: PetscMPIInt rank;
317: PetscInt nx,nnodes,nidx,*idx1,*idx2,*idx1_h,*idx2_h,idx_start,i,k,k1,xstart,j1;
318: Vec Xq,Xh,localX;
319: IS is1_q,is2_q,is1_h,is2_h;
320: VecScatter ctx_q,ctx_h;
323: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
325: /* get num of local and global total nnodes */
326: nidx = wash->nnodes_loc;
327: MPIU_Allreduce(&nidx,&nx,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
329: VecCreate(PETSC_COMM_WORLD,&Xq);
330: if (rank == 0) { /* all entries of Xq are in proc[0] */
331: VecSetSizes(Xq,nx,PETSC_DECIDE);
332: } else {
333: VecSetSizes(Xq,0,PETSC_DECIDE);
334: }
335: VecSetFromOptions(Xq);
336: VecSet(Xq,0.0);
337: VecDuplicate(Xq,&Xh);
339: DMGetLocalVector(networkdm,&localX);
341: /* set idx1 and idx2 */
342: PetscCalloc4(nidx,&idx1,nidx,&idx2,nidx,&idx1_h,nidx,&idx2_h);
344: DMNetworkGetEdgeRange(networkdm,&Start, &End);
346: VecGetOwnershipRange(X,&xstart,NULL);
347: k1 = 0;
348: j1 = 0;
349: for (i = Start; i < End; i++) {
350: DMNetworkGetComponent(networkdm,i,0,&key,(void**)&pipe);
351: nnodes = pipe->nnodes;
352: idx_start = pipe->id*nnodes;
353: for (k=0; k<nnodes; k++) {
354: idx1[k1] = xstart + j1*2*nnodes + 2*k;
355: idx2[k1] = idx_start + k;
357: idx1_h[k1] = xstart + j1*2*nnodes + 2*k + 1;
358: idx2_h[k1] = idx_start + k;
359: k1++;
360: }
361: j1++;
362: }
364: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx1,PETSC_COPY_VALUES,&is1_q);
365: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx2,PETSC_COPY_VALUES,&is2_q);
366: VecScatterCreate(X,is1_q,Xq,is2_q,&ctx_q);
367: VecScatterBegin(ctx_q,X,Xq,INSERT_VALUES,SCATTER_FORWARD);
368: VecScatterEnd(ctx_q,X,Xq,INSERT_VALUES,SCATTER_FORWARD);
370: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx1_h,PETSC_COPY_VALUES,&is1_h);
371: ISCreateGeneral(PETSC_COMM_SELF,nidx,idx2_h,PETSC_COPY_VALUES,&is2_h);
372: VecScatterCreate(X,is1_h,Xh,is2_h,&ctx_h);
373: VecScatterBegin(ctx_h,X,Xh,INSERT_VALUES,SCATTER_FORWARD);
374: VecScatterEnd(ctx_h,X,Xh,INSERT_VALUES,SCATTER_FORWARD);
376: PetscPrintf(PETSC_COMM_WORLD,"Xq: \n");
377: VecView(Xq,PETSC_VIEWER_STDOUT_WORLD);
378: PetscPrintf(PETSC_COMM_WORLD,"Xh: \n");
379: VecView(Xh,PETSC_VIEWER_STDOUT_WORLD);
381: VecScatterDestroy(&ctx_q);
382: PetscFree4(idx1,idx2,idx1_h,idx2_h);
383: ISDestroy(&is1_q);
384: ISDestroy(&is2_q);
386: VecScatterDestroy(&ctx_h);
387: ISDestroy(&is1_h);
388: ISDestroy(&is2_h);
390: VecDestroy(&Xq);
391: VecDestroy(&Xh);
392: DMRestoreLocalVector(networkdm,&localX);
393: return(0);
394: }
396: PetscErrorCode WashNetworkCleanUp(Wash wash)
397: {
399: PetscMPIInt rank;
402: MPI_Comm_rank(wash->comm,&rank);
403: PetscFree(wash->edgelist);
404: PetscFree(wash->vtype);
405: if (!rank) {
406: PetscFree2(wash->junction,wash->pipe);
407: }
408: return(0);
409: }
411: PetscErrorCode WashNetworkCreate(MPI_Comm comm,PetscInt pipesCase,Wash *wash_ptr)
412: {
414: PetscInt npipes;
415: PetscMPIInt rank;
416: Wash wash=NULL;
417: PetscInt i,numVertices,numEdges,*vtype;
418: PetscInt *edgelist;
419: Junction junctions=NULL;
420: Pipe pipes=NULL;
421: PetscBool washdist=PETSC_TRUE;
424: MPI_Comm_rank(comm,&rank);
426: PetscCalloc1(1,&wash);
427: wash->comm = comm;
428: *wash_ptr = wash;
429: wash->Q0 = 0.477432; /* RESERVOIR */
430: wash->H0 = 150.0;
431: wash->HL = 143.488; /* VALVE */
432: wash->QL = 0.0;
433: wash->nnodes_loc = 0;
435: numVertices = 0;
436: numEdges = 0;
437: edgelist = NULL;
439: /* proc[0] creates a sequential wash and edgelist */
440: PetscPrintf(PETSC_COMM_WORLD,"Setup pipesCase %D\n",pipesCase);
442: /* Set global number of pipes, edges, and junctions */
443: /*-------------------------------------------------*/
444: switch (pipesCase) {
445: case 0:
446: /* pipeCase 0: */
447: /* =================================================
448: (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE)
449: ==================================================== */
450: npipes = 3;
451: PetscOptionsGetInt(NULL,NULL, "-npipes", &npipes, NULL);
452: wash->nedge = npipes;
453: wash->nvertex = npipes + 1;
455: /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
456: numVertices = 0;
457: numEdges = 0;
458: edgelist = NULL;
459: if (!rank) {
460: numVertices = wash->nvertex;
461: numEdges = wash->nedge;
463: PetscCalloc1(2*numEdges,&edgelist);
464: for (i=0; i<numEdges; i++) {
465: edgelist[2*i] = i; edgelist[2*i+1] = i+1;
466: }
468: /* Add network components */
469: /*------------------------*/
470: PetscCalloc2(numVertices,&junctions,numEdges,&pipes);
472: /* vertex */
473: for (i=0; i<numVertices; i++) {
474: junctions[i].id = i;
475: junctions[i].type = JUNCTION;
476: }
478: junctions[0].type = RESERVOIR;
479: junctions[numVertices-1].type = VALVE;
480: }
481: break;
482: case 1:
483: /* pipeCase 1: */
484: /* ==========================
485: v2 (VALVE)
486: ^
487: |
488: E2
489: |
490: v0 --E0--> v3--E1--> v1
491: (RESERVOIR) (RESERVOIR)
492: ============================= */
493: npipes = 3;
494: wash->nedge = npipes;
495: wash->nvertex = npipes + 1;
497: /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
498: if (!rank) {
499: numVertices = wash->nvertex;
500: numEdges = wash->nedge;
502: PetscCalloc1(2*numEdges,&edgelist);
503: edgelist[0] = 0; edgelist[1] = 3; /* edge[0] */
504: edgelist[2] = 3; edgelist[3] = 1; /* edge[1] */
505: edgelist[4] = 3; edgelist[5] = 2; /* edge[2] */
507: /* Add network components */
508: /*------------------------*/
509: PetscCalloc2(numVertices,&junctions,numEdges,&pipes);
510: /* vertex */
511: for (i=0; i<numVertices; i++) {
512: junctions[i].id = i;
513: junctions[i].type = JUNCTION;
514: }
516: junctions[0].type = RESERVOIR;
517: junctions[1].type = VALVE;
518: junctions[2].type = VALVE;
519: }
520: break;
521: case 2:
522: /* pipeCase 2: */
523: /* ==========================
524: (RESERVOIR) v2--> E2
525: |
526: v0 --E0--> v3--E1--> v1
527: (RESERVOIR) (VALVE)
528: ============================= */
530: /* Set application parameters -- to be used in function evalutions */
531: npipes = 3;
532: wash->nedge = npipes;
533: wash->nvertex = npipes + 1;
535: /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
536: if (!rank) {
537: numVertices = wash->nvertex;
538: numEdges = wash->nedge;
540: PetscCalloc1(2*numEdges,&edgelist);
541: edgelist[0] = 0; edgelist[1] = 3; /* edge[0] */
542: edgelist[2] = 3; edgelist[3] = 1; /* edge[1] */
543: edgelist[4] = 2; edgelist[5] = 3; /* edge[2] */
545: /* Add network components */
546: /*------------------------*/
547: PetscCalloc2(numVertices,&junctions,numEdges,&pipes);
548: /* vertex */
549: for (i=0; i<numVertices; i++) {
550: junctions[i].id = i;
551: junctions[i].type = JUNCTION;
552: }
554: junctions[0].type = RESERVOIR;
555: junctions[1].type = VALVE;
556: junctions[2].type = RESERVOIR;
557: }
558: break;
559: default:
560: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"not done yet");
561: }
563: /* set edge global id */
564: for (i=0; i<numEdges; i++) pipes[i].id = i;
566: if (!rank) { /* set vtype for proc[0] */
567: PetscInt v;
568: PetscMalloc1(2*numEdges,&vtype);
569: for (i=0; i<2*numEdges; i++) {
570: v = edgelist[i];
571: vtype[i] = junctions[v].type;
572: }
573: wash->vtype = vtype;
574: }
576: *wash_ptr = wash;
577: wash->nedge = numEdges;
578: wash->nvertex = numVertices;
579: wash->edgelist = edgelist;
580: wash->junction = junctions;
581: wash->pipe = pipes;
583: /* Distribute edgelist to other processors */
584: PetscOptionsGetBool(NULL,NULL,"-wash_distribute",&washdist,NULL);
585: if (washdist) {
586: /*
587: PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n");
588: */
589: WashNetworkDistribute(comm,wash);
590: }
591: return(0);
592: }
594: /* ------------------------------------------------------- */
595: int main(int argc,char ** argv)
596: {
597: PetscErrorCode ierr;
598: Wash wash;
599: Junction junctions,junction;
600: Pipe pipe,pipes;
601: PetscInt KeyPipe,KeyJunction;
602: PetscInt *edgelist = NULL,*edgelists[1],*vtype = NULL;
603: PetscInt i,e,v,eStart,eEnd,vStart,vEnd,key;
604: PetscInt vkey,type;
605: const PetscInt *cone;
606: DM networkdm;
607: PetscMPIInt size,rank;
608: PetscReal ftime;
609: Vec X;
610: TS ts;
611: PetscInt steps=1;
612: TSConvergedReason reason;
613: PetscBool viewpipes,monipipes=PETSC_FALSE,userJac=PETSC_TRUE,viewdm=PETSC_FALSE,viewX=PETSC_FALSE;
614: PetscBool test=PETSC_FALSE;
615: PetscInt pipesCase=0;
616: DMNetworkMonitor monitor;
617: MPI_Comm comm;
619: PetscInt nedges,nvertices; /* local num of edges and vertices */
620: PetscInt nnodes = 6,nv,ne;
621: const PetscInt *vtx,*edge;
623: PetscInitialize(&argc,&argv,"pOption",help);if (ierr) return ierr;
625: /* Read runtime options */
626: PetscOptionsGetInt(NULL,NULL, "-case", &pipesCase, NULL);
627: PetscOptionsGetBool(NULL,NULL,"-user_Jac",&userJac,NULL);
628: PetscOptionsGetBool(NULL,NULL,"-pipe_monitor",&monipipes,NULL);
629: PetscOptionsGetBool(NULL,NULL,"-viewdm",&viewdm,NULL);
630: PetscOptionsGetBool(NULL,NULL,"-viewX",&viewX,NULL);
631: PetscOptionsGetInt(NULL,NULL, "-npipenodes", &nnodes, NULL);
632: PetscOptionsGetBool(NULL,NULL,"-test",&test,NULL);
634: /* Create networkdm */
635: /*------------------*/
636: DMNetworkCreate(PETSC_COMM_WORLD,&networkdm);
637: PetscObjectGetComm((PetscObject)networkdm,&comm);
638: MPI_Comm_rank(comm,&rank);
639: MPI_Comm_size(comm,&size);
641: if (size == 1 && monipipes) {
642: DMNetworkMonitorCreate(networkdm,&monitor);
643: }
645: /* Register the components in the network */
646: DMNetworkRegisterComponent(networkdm,"junctionstruct",sizeof(struct _p_Junction),&KeyJunction);
647: DMNetworkRegisterComponent(networkdm,"pipestruct",sizeof(struct _p_Pipe),&KeyPipe);
649: /* Create a distributed wash network (user-specific) */
650: WashNetworkCreate(comm,pipesCase,&wash);
651: nedges = wash->nedge;
652: nvertices = wash->nvertex; /* local num of vertices, excluding ghosts */
653: edgelist = wash->edgelist;
654: vtype = wash->vtype;
655: junctions = wash->junction;
656: pipes = wash->pipe;
658: /* Set up the network layout */
659: DMNetworkSetSizes(networkdm,1,&nvertices,&nedges,0,NULL);
661: /* Add local edge connectivity */
662: edgelists[0] = edgelist;
663: DMNetworkSetEdgeList(networkdm,edgelists,NULL);
664: DMNetworkLayoutSetUp(networkdm);
666: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
667: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
668: /* PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd); */
670: /* Test DMNetworkGetSubnetworkInfo() */
671: if (test) {
672: DMNetworkGetSubnetworkInfo(networkdm,0,&nv,&ne,&vtx,&edge);
673: if (ne != eEnd - eStart || nv != vEnd - vStart) SETERRQ2(PetscObjectComm((PetscObject)networkdm),PETSC_ERR_ARG_WRONG,"ne %D or nv %D is incorrect",ne,nv);
674: }
676: if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */
677: /* vEnd - vStart = nvertices + num of ghost vertices! */
678: PetscCalloc2(vEnd - vStart,&junctions,nedges,&pipes);
679: }
681: /* Add Pipe component to all local edges */
682: for (e = eStart; e < eEnd; e++) {
683: if (test) {
684: if (e != edge[e]) SETERRQ2(PetscObjectComm((PetscObject)networkdm),PETSC_ERR_ARG_WRONG,"e %D != edge %D from DMNetworkGetSubnetworkInfo()",e,edge[e]);
685: }
687: pipes[e-eStart].nnodes = nnodes;
688: DMNetworkAddComponent(networkdm,e,KeyPipe,&pipes[e-eStart]);
690: /* Add number of variables to each edge */
691: DMNetworkAddNumVariables(networkdm,e,2*pipes[e-eStart].nnodes);
693: if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */
694: pipes[e-eStart].length = 600.0;
695: DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e-eStart].nnodes, 0, 2, 0.0,pipes[e-eStart].length, -0.8, 0.8, PETSC_TRUE);
696: DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e-eStart].nnodes, 1, 2, 0.0,pipes[e-eStart].length, -400.0, 800.0, PETSC_TRUE);
697: }
698: }
700: /* Add Junction component to all local vertices, including ghost vertices! */
701: for (v = vStart; v < vEnd; v++) {
702: if (test) {
703: if (v != vtx[v-vStart]) SETERRQ2(PetscObjectComm((PetscObject)networkdm),PETSC_ERR_ARG_WRONG,"v %D != vtx %D from DMNetworkGetSubnetworkInfo()",v,vtx[v-vStart]);
704: }
706: DMNetworkAddComponent(networkdm,v,KeyJunction,&junctions[v-vStart]);
708: /* Add number of variables to vertex */
709: DMNetworkAddNumVariables(networkdm,v,2);
710: }
712: if (size > 1) { /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */
713: DM plexdm;
714: PetscPartitioner part;
715: DMNetworkGetPlex(networkdm,&plexdm);
716: DMPlexGetPartitioner(plexdm, &part);
717: PetscPartitionerSetType(part,PETSCPARTITIONERSIMPLE);
718: PetscOptionsSetValue(NULL,"-dm_plex_csr_via_mat","true"); /* for parmetis */
719: }
721: /* Set up DM for use */
722: DMSetUp(networkdm);
723: if (viewdm) {
724: PetscPrintf(PETSC_COMM_WORLD,"\nOriginal networkdm, DMView:\n");
725: DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD);
726: }
728: /* Set user physical parameters to the components */
729: for (e = eStart; e < eEnd; e++) {
730: DMNetworkGetConnectedVertices(networkdm,e,&cone);
731: /* vfrom */
732: DMNetworkGetComponent(networkdm,cone[0],0,&vkey,(void**)&junction);
733: junction->type = (VertexType)vtype[2*e];
735: /* vto */
736: DMNetworkGetComponent(networkdm,cone[1],0,&vkey,(void**)&junction);
737: junction->type = (VertexType)vtype[2*e+1];
738: }
740: WashNetworkCleanUp(wash);
742: /* Network partitioning and distribution of data */
743: DMNetworkDistribute(&networkdm,0);
744: if (viewdm) {
745: PetscPrintf(PETSC_COMM_WORLD,"\nAfter DMNetworkDistribute, DMView:\n");
746: DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD);
747: }
749: /* create vectors */
750: DMCreateGlobalVector(networkdm,&X);
751: DMCreateLocalVector(networkdm,&wash->localX);
752: DMCreateLocalVector(networkdm,&wash->localXdot);
754: /* PipeSetUp -- each process only sets its own pipes */
755: /*---------------------------------------------------*/
756: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
758: userJac = PETSC_TRUE;
759: DMNetworkHasJacobian(networkdm,userJac,userJac);
760: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
761: for (e=eStart; e<eEnd; e++) { /* each edge has only one component, pipe */
762: DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe);
764: wash->nnodes_loc += pipe->nnodes; /* local total num of nodes, will be used by PipesView() */
765: PipeSetParameters(pipe,
766: 600.0, /* length */
767: 0.5, /* diameter */
768: 1200.0, /* a */
769: 0.018); /* friction */
770: PipeSetUp(pipe);
772: if (userJac) {
773: /* Create Jacobian matrix structures for a Pipe */
774: Mat *J;
775: PipeCreateJacobian(pipe,NULL,&J);
776: DMNetworkEdgeSetMatrix(networkdm,e,J);
777: }
778: }
780: if (userJac) {
781: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
782: for (v=vStart; v<vEnd; v++) {
783: Mat *J;
784: JunctionCreateJacobian(networkdm,v,NULL,&J);
785: DMNetworkVertexSetMatrix(networkdm,v,J);
787: DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction);
788: junction->jacobian = J;
789: }
790: }
792: /* Test DMNetworkGetSubnetworkInfo() */
793: if (test) {
794: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
795: DMNetworkGetSubnetworkInfo(networkdm,0,&nv,&ne,&vtx,&edge);
796: if (ne != eEnd - eStart || nv != vEnd - vStart) SETERRQ2(PetscObjectComm((PetscObject)networkdm),PETSC_ERR_ARG_WRONG,"ne %D or nv %D is incorrect",ne,nv);
798: for (e = eStart; e < eEnd; e++) {
799: if (e != edge[e]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"e %D != edge %D from DMNetworkGetSubnetworkInfo()",e,edge[e]);
800: }
801: for (v = vStart; v < vEnd; v++) {
802: if (v != vtx[v-vStart]) SETERRQ2(PetscObjectComm((PetscObject)networkdm),PETSC_ERR_ARG_WRONG,"v %D != vtx %D from DMNetworkGetSubnetworkInfo()",v,vtx[v-vStart]);
803: }
804: }
806: /* Setup solver */
807: /*--------------------------------------------------------*/
808: TSCreate(PETSC_COMM_WORLD,&ts);
810: TSSetDM(ts,(DM)networkdm);
811: TSSetIFunction(ts,NULL,WASHIFunction,wash);
813: TSSetMaxSteps(ts,steps);
814: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
815: TSSetTimeStep(ts,0.1);
816: TSSetType(ts,TSBEULER);
817: if (size == 1 && monipipes) {
818: TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL);
819: }
820: TSSetFromOptions(ts);
822: WASHSetInitialSolution(networkdm,X,wash);
824: TSSolve(ts,X);
826: TSGetSolveTime(ts,&ftime);
827: TSGetStepNumber(ts,&steps);
828: TSGetConvergedReason(ts,&reason);
829: PetscPrintf(PETSC_COMM_WORLD,"%s at time %g after %D steps\n",TSConvergedReasons[reason],(double)ftime,steps);
830: if (viewX) {
831: VecView(X,PETSC_VIEWER_STDOUT_WORLD);
832: }
834: viewpipes = PETSC_FALSE;
835: PetscOptionsGetBool(NULL,NULL, "-Jac_view", &viewpipes,NULL);
836: if (viewpipes) {
837: SNES snes;
838: Mat Jac;
839: TSGetSNES(ts,&snes);
840: SNESGetJacobian(snes,&Jac,NULL,NULL,NULL);
841: MatView(Jac,PETSC_VIEWER_DRAW_WORLD);
842: }
844: /* View solution q and h */
845: /* --------------------- */
846: viewpipes = PETSC_FALSE;
847: PetscOptionsGetBool(NULL,NULL, "-pipe_view", &viewpipes,NULL);
848: if (viewpipes) {
849: PipesView(X,networkdm,wash);
850: }
852: /* Free spaces */
853: /* ----------- */
854: TSDestroy(&ts);
855: VecDestroy(&X);
856: VecDestroy(&wash->localX);
857: VecDestroy(&wash->localXdot);
859: /* Destroy objects from each pipe that are created in PipeSetUp() */
860: DMNetworkGetEdgeRange(networkdm,&eStart, &eEnd);
861: for (i = eStart; i < eEnd; i++) {
862: DMNetworkGetComponent(networkdm,i,0,&key,(void**)&pipe);
863: PipeDestroy(&pipe);
864: }
865: if (userJac) {
866: for (v=vStart; v<vEnd; v++) {
867: DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction);
868: JunctionDestroyJacobian(networkdm,v,junction);
869: }
870: }
872: if (size == 1 && monipipes) {
873: DMNetworkMonitorDestroy(&monitor);
874: }
875: DMDestroy(&networkdm);
876: PetscFree(wash);
878: if (rank) {
879: PetscFree2(junctions,pipes);
880: }
881: PetscFinalize();
882: return ierr;
883: }
885: /*TEST
887: build:
888: depends: pipeInterface.c pipeImpls.c
890: test:
891: args: -ts_monitor -case 1 -ts_max_steps 1 -options_left no -viewX -test
892: localrunfiles: pOption
893: output_file: output/pipes1_1.out
895: test:
896: suffix: 2
897: nsize: 2
898: requires: mumps
899: args: -ts_monitor -case 1 -ts_max_steps 1 -petscpartitioner_type simple -options_left no -viewX -test
900: localrunfiles: pOption
901: output_file: output/pipes1_2.out
903: test:
904: suffix: 3
905: nsize: 2
906: requires: mumps
907: args: -ts_monitor -case 0 -ts_max_steps 1 -petscpartitioner_type simple -options_left no -viewX -test
908: localrunfiles: pOption
909: output_file: output/pipes1_3.out
911: test:
912: suffix: 4
913: args: -ts_monitor -case 2 -ts_max_steps 1 -options_left no -viewX -test
914: localrunfiles: pOption
915: output_file: output/pipes1_4.out
917: test:
918: suffix: 5
919: nsize: 3
920: requires: mumps
921: args: -ts_monitor -case 2 -ts_max_steps 10 -petscpartitioner_type simple -options_left no -viewX -test
922: localrunfiles: pOption
923: output_file: output/pipes1_5.out
925: test:
926: suffix: 6
927: nsize: 2
928: requires: mumps
929: args: -ts_monitor -case 1 -ts_max_steps 1 -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX -test
930: localrunfiles: pOption
931: output_file: output/pipes1_6.out
933: test:
934: suffix: 7
935: nsize: 2
936: requires: mumps
937: args: -ts_monitor -case 2 -ts_max_steps 1 -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX -test
938: localrunfiles: pOption
939: output_file: output/pipes1_7.out
941: test:
942: suffix: 8
943: nsize: 2
944: requires: mumps parmetis
945: args: -ts_monitor -case 2 -ts_max_steps 1 -petscpartitioner_type parmetis -options_left no -wash_distribute 1
946: localrunfiles: pOption
947: output_file: output/pipes1_8.out
949: TEST*/