Actual source code: ex6f.F90

  1: !
  2: !  Description: This example demonstrates repeated linear solves as
  3: !  well as the use of different preconditioner and linear system
  4: !  matrices.  This example also illustrates how to save PETSc objects
  5: !  in common blocks.
  6: !
  7: !/*T
  8: !  Concepts: KSP^repeatedly solving linear systems;
  9: !  Concepts: KSP^different matrices for linear system and preconditioner;
 10: !  Processors: n
 11: !T*/
 12: !

 14:       program main
 15: #include <petsc/finclude/petscksp.h>
 16:       use petscksp
 17:       implicit none

 19: !  Variables:
 20: !
 21: !  A       - matrix that defines linear system
 22: !  ksp    - KSP context
 23: !  ksp     - KSP context
 24: !  x, b, u - approx solution, RHS, exact solution vectors
 25: !
 26:       Vec     x,u,b
 27:       Mat     A,A2
 28:       KSP    ksp
 29:       PetscInt i,j,II,JJ,m,n
 30:       PetscInt Istart,Iend
 31:       PetscInt nsteps,one
 32:       PetscErrorCode ierr
 33:       PetscBool  flg
 34:       PetscScalar  v

 36:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 37:       if (ierr .ne. 0) then
 38:         print*,'Unable to initialize PETSc'
 39:         stop
 40:       endif
 41:       m      = 3
 42:       n      = 3
 43:       nsteps = 2
 44:       one    = 1
 45:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 46:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 47:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nsteps',nsteps,flg,ierr)

 49: !  Create parallel matrix, specifying only its global dimensions.
 50: !  When using MatCreate(), the matrix format can be specified at
 51: !  runtime. Also, the parallel partitioning of the matrix is
 52: !  determined by PETSc at runtime.

 54:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 55:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 56:       call MatSetFromOptions(A,ierr)
 57:       call MatSetUp(A,ierr)

 59: !  The matrix is partitioned by contiguous chunks of rows across the
 60: !  processors.  Determine which rows of the matrix are locally owned.

 62:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 64: !  Set matrix elements.
 65: !   - Each processor needs to insert only elements that it owns
 66: !     locally (but any non-local elements will be sent to the
 67: !     appropriate processor during matrix assembly).
 68: !   - Always specify global rows and columns of matrix entries.

 70:       do 10, II=Istart,Iend-1
 71:         v = -1.0
 72:         i = II/n
 73:         j = II - i*n
 74:         if (i.gt.0) then
 75:           JJ = II - n
 76:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 77:         endif
 78:         if (i.lt.m-1) then
 79:           JJ = II + n
 80:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 81:         endif
 82:         if (j.gt.0) then
 83:           JJ = II - 1
 84:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 85:         endif
 86:         if (j.lt.n-1) then
 87:           JJ = II + 1
 88:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 89:         endif
 90:         v = 4.0
 91:         call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
 92:  10   continue

 94: !  Assemble matrix, using the 2-step process:
 95: !       MatAssemblyBegin(), MatAssemblyEnd()
 96: !  Computations can be done while messages are in transition
 97: !  by placing code between these two statements.

 99:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
100:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

102: !  Create parallel vectors.
103: !   - When using VecCreate(), the parallel partitioning of the vector
104: !     is determined by PETSc at runtime.
105: !   - Note: We form 1 vector from scratch and then duplicate as needed.

107:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
108:       call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
109:       call VecSetFromOptions(u,ierr)
110:       call VecDuplicate(u,b,ierr)
111:       call VecDuplicate(b,x,ierr)

113: !  Create linear solver context

115:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

117: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

119:       call KSPSetFromOptions(ksp,ierr)

121: !  Solve several linear systems in succession

123:       do 100 i=1,nsteps
124:          call solve1(ksp,A,x,b,u,i,nsteps,A2,ierr)
125:  100  continue

127: !  Free work space.  All PETSc objects should be destroyed when they
128: !  are no longer needed.

130:       call VecDestroy(u,ierr)
131:       call VecDestroy(x,ierr)
132:       call VecDestroy(b,ierr)
133:       call MatDestroy(A,ierr)
134:       call KSPDestroy(ksp,ierr)

136:       call PetscFinalize(ierr)
137:       end

139: ! -----------------------------------------------------------------------
140: !
141:       subroutine solve1(ksp,A,x,b,u,count,nsteps,A2,ierr)
142:       use petscksp
143:       implicit none

145: !
146: !   solve1 - This routine is used for repeated linear system solves.
147: !   We update the linear system matrix each time, but retain the same
148: !   preconditioning matrix for all linear solves.
149: !
150: !      A - linear system matrix
151: !      A2 - preconditioning matrix
152: !
153:       PetscScalar  v,val
154:       PetscInt II,Istart,Iend
155:       PetscInt count,nsteps,one
156:       PetscErrorCode ierr
157:       Mat     A
158:       KSP     ksp
159:       Vec     x,b,u

161: ! Use common block to retain matrix between successive subroutine calls
162:       Mat              A2
163:       PetscMPIInt      rank
164:       PetscBool        pflag
165:       common /my_data/ pflag,rank

167:       one = 1
168: ! First time thorough: Create new matrix to define the linear system
169:       if (count .eq. 1) then
170:         call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
171:         pflag = .false.
172:         call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-mat_view',pflag,ierr)
173:         if (pflag) then
174:           if (rank .eq. 0) write(6,100)
175:           call PetscFlush(6)
176:         endif
177:         call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
178: ! All other times: Set previous solution as initial guess for next solve.
179:       else
180:         call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
181:       endif

183: ! Alter the matrix A a bit
184:       call MatGetOwnershipRange(A,Istart,Iend,ierr)
185:       do 20, II=Istart,Iend-1
186:         v = 2.0
187:         call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
188:  20   continue
189:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
190:       if (pflag) then
191:         if (rank .eq. 0) write(6,110)
192:         call PetscFlush(6)
193:       endif
194:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

196: ! Set the exact solution; compute the right-hand-side vector
197:       val = 1.0*real(count)
198:       call VecSet(u,val,ierr)
199:       call MatMult(A,u,b,ierr)

201: ! Set operators, keeping the identical preconditioner matrix for
202: ! all linear solves.  This approach is often effective when the
203: ! linear systems do not change very much between successive steps.
204:       call KSPSetReusePreconditioner(ksp,PETSC_TRUE,ierr)
205:       call KSPSetOperators(ksp,A,A2,ierr)

207: ! Solve linear system
208:       call KSPSolve(ksp,b,x,ierr)

210: ! Destroy the preconditioner matrix on the last time through
211:       if (count .eq. nsteps) call MatDestroy(A2,ierr)

213:  100  format('previous matrix: preconditioning')
214:  110  format('next matrix: defines linear system')

216:       end

218: !/*TEST
219: !
220: !   test:
221: !      args: -pc_type jacobi -mat_view -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
222: !
223: !TEST*/