Actual source code: ex8f.F
petsc-3.10.2 2018-10-09
1: !
2: ! Tests PCMGSetResidual
3: !
4: ! -----------------------------------------------------------------------
6: program main
7: #include <petsc/finclude/petscksp.h>
8: use petscksp
9: implicit none
11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
12: ! Variable declarations
13: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
14: !
15: ! Variables:
16: ! ksp - linear solver context
17: ! x, b, u - approx solution, right-hand-side, exact solution vectors
18: ! A - matrix that defines linear system
19: ! its - iterations for convergence
20: ! norm - norm of error in solution
21: ! rctx - random number context
22: !
24: Mat A
25: Vec x,b,u
26: PC pc
27: PetscInt n,dim,istart,iend
28: PetscInt i,j,jj,ii,one,zero
29: PetscErrorCode ierr
30: PetscScalar v
31: external MyResidual
32: PetscScalar pfive
33: KSP ksp
35: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
36: ! Beginning of program
37: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
39: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
40: if (ierr .ne. 0) then
41: print*,'Unable to initialize PETSc'
42: stop
43: endif
44: pfive = .5
45: n = 6
46: dim = n*n
47: one = 1
48: zero = 0
50: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51: ! Compute the matrix and right-hand-side vector that define
52: ! the linear system, Ax = b.
53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55: ! Create parallel matrix, specifying only its global dimensions.
56: ! When using MatCreate(), the matrix format can be specified at
57: ! runtime. Also, the parallel partitioning of the matrix is
58: ! determined by PETSc at runtime.
60: call MatCreate(PETSC_COMM_WORLD,A,ierr)
61: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
62: call MatSetFromOptions(A,ierr)
63: call MatSetUp(A,ierr)
65: ! Currently, all PETSc parallel matrix formats are partitioned by
66: ! contiguous chunks of rows across the processors. Determine which
67: ! rows of the matrix are locally owned.
69: call MatGetOwnershipRange(A,Istart,Iend,ierr)
71: ! Set matrix elements in parallel.
72: ! - Each processor needs to insert only elements that it owns
73: ! locally (but any non-local elements will be sent to the
74: ! appropriate processor during matrix assembly).
75: ! - Always specify global rows and columns of matrix entries.
77: do 10, II=Istart,Iend-1
78: v = -1.0
79: i = II/n
80: j = II - i*n
81: if (i.gt.0) then
82: JJ = II - n
83: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
84: endif
85: if (i.lt.n-1) then
86: JJ = II + n
87: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
88: endif
89: if (j.gt.0) then
90: JJ = II - 1
91: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
92: endif
93: if (j.lt.n-1) then
94: JJ = II + 1
95: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
96: endif
97: v = 4.0
98: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
99: 10 continue
101: ! Assemble matrix, using the 2-step process:
102: ! MatAssemblyBegin(), MatAssemblyEnd()
103: ! Computations can be done while messages are in transition
104: ! by placing code between these two statements.
106: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
107: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
109: ! Create parallel vectors.
110: ! - Here, the parallel partitioning of the vector is determined by
111: ! PETSc at runtime. We could also specify the local dimensions
112: ! if desired.
113: ! - Note: We form 1 vector from scratch and then duplicate as needed.
115: call VecCreate(PETSC_COMM_WORLD,u,ierr)
116: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
117: call VecSetFromOptions(u,ierr)
118: call VecDuplicate(u,b,ierr)
119: call VecDuplicate(b,x,ierr)
121: ! Set exact solution; then compute right-hand-side vector.
123: call VecSet(u,pfive,ierr)
124: call MatMult(A,u,b,ierr)
126: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127: ! Create the linear solver and set various options
128: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130: ! Create linear solver context
132: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
133: call KSPGetPC(ksp,pc,ierr)
134: call PCSetType(pc,PCMG,ierr)
135: ! the passing of PETSC_NULL_VEC is bogus but we don't currently have
136: ! a PETSC_NULL_MPI_COMM
137: call PCMGSetLevels(pc,one,PETSC_NULL_VEC,ierr)
139: call PCMGSetResidual(pc,zero,MyResidual,A,ierr)
141: ! Set operators. Here the matrix that defines the linear system
142: ! also serves as the preconditioning matrix.
144: call KSPSetOperators(ksp,A,A,ierr)
147: call KSPDestroy(ksp,ierr)
148: call VecDestroy(u,ierr)
149: call VecDestroy(x,ierr)
150: call VecDestroy(b,ierr)
151: call MatDestroy(A,ierr)
153: call PetscFinalize(ierr)
154: end
156: subroutine MyResidual(A,b,x,r,ierr)
157: use petscksp
158: implicit none
159: Mat A
160: Vec b,x,r
161: integer ierr
162: return
163: end
166: !/*TEST
167: !
168: ! test:
169: ! TODO: Need to develop comparison test
170: !
171: !TEST*/