Actual source code: ex6f.F
1: !
2: ! "$Id: ex6f.F,v 1.36 2001/08/07 03:04:00 balay Exp $";
3: !
4: ! Description: This example demonstrates repeated linear solves as
5: ! well as the use of different preconditioner and linear system
6: ! matrices. This example also illustrates how to save PETSc objects
7: ! in common blocks.
8: !
9: !/*T
10: ! Concepts: KSP^repeatedly solving linear systems;
11: ! Concepts: KSP^different matrices for linear system and preconditioner;
12: ! Processors: n
13: !T*/
14: !
15: ! The following include statements are required for KSP Fortran programs:
16: ! petsc.h - base PETSc routines
17: ! petscvec.h - vectors
18: ! petscmat.h - matrices
19: ! petscpc.h - preconditioners
20: ! petscksp.h - Krylov subspace methods
21: ! Other include statements may be needed if using additional PETSc
22: ! routines in a Fortran program, e.g.,
23: ! petscviewer.h - viewers
24: ! petscis.h - index sets
25: !
26: program main
27: #include include/finclude/petsc.h
28: #include include/finclude/petscvec.h
29: #include include/finclude/petscmat.h
30: #include include/finclude/petscpc.h
31: #include include/finclude/petscksp.h
33: ! Variables:
34: !
35: ! A - matrix that defines linear system
36: ! ksp - KSP context
37: ! ksp - KSP context
38: ! x, b, u - approx solution, RHS, exact solution vectors
39: !
40: Vec x,u,b
41: Mat A
42: KSP ksp
43: integer i,j,II,JJ,ierr,m,n
44: integer Istart,Iend,flg,nsteps
45: PetscScalar v
47: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
48: m = 3
49: n = 3
50: nsteps = 2
51: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
52: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
53: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps, &
54: & flg,ierr)
56: ! Create parallel matrix, specifying only its global dimensions.
57: ! When using MatCreate(), the matrix format can be specified at
58: ! runtime. Also, the parallel partitioning of the matrix is
59: ! determined by PETSc at runtime.
61: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
62: & m*n,A,ierr)
63: call MatSetFromOptions(A,ierr)
65: ! The matrix is partitioned by contiguous chunks of rows across the
66: ! processors. Determine which rows of the matrix are locally owned.
68: call MatGetOwnershipRange(A,Istart,Iend,ierr)
70: ! Set matrix elements.
71: ! - Each processor needs to insert only elements that it owns
72: ! locally (but any non-local elements will be sent to the
73: ! appropriate processor during matrix assembly).
74: ! - Always specify global rows and columns of matrix entries.
76: do 10, II=Istart,Iend-1
77: v = -1.0
78: i = II/n
79: j = II - i*n
80: if (i.gt.0) then
81: JJ = II - n
82: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
83: endif
84: if (i.lt.m-1) then
85: JJ = II + n
86: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
87: endif
88: if (j.gt.0) then
89: JJ = II - 1
90: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
91: endif
92: if (j.lt.n-1) then
93: JJ = II + 1
94: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
95: endif
96: v = 4.0
97: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
98: 10 continue
100: ! Assemble matrix, using the 2-step process:
101: ! MatAssemblyBegin(), MatAssemblyEnd()
102: ! Computations can be done while messages are in transition
103: ! by placing code between these two statements.
105: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
106: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
108: ! Create parallel vectors.
109: ! - When using VecCreate(), the parallel partitioning of the vector
110: ! is determined by PETSc at runtime.
111: ! - Note: We form 1 vector from scratch and then duplicate as needed.
113: call VecCreate(PETSC_COMM_WORLD,u,ierr)
114: call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
115: call VecSetFromOptions(u,ierr)
116: call VecDuplicate(u,b,ierr)
117: call VecDuplicate(b,x,ierr)
119: ! Create linear solver context
121: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
123: ! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
125: call KSPSetFromOptions(ksp,ierr)
127: ! Solve several linear systems in succession
129: do 100 i=1,nsteps
130: call solve1(ksp,A,x,b,u,i,nsteps,ierr)
131: 100 continue
133: ! Free work space. All PETSc objects should be destroyed when they
134: ! are no longer needed.
136: call VecDestroy(u,ierr)
137: call VecDestroy(x,ierr)
138: call VecDestroy(b,ierr)
139: call MatDestroy(A,ierr)
140: call KSPDestroy(ksp,ierr)
142: call PetscFinalize(ierr)
143: end
145: ! -----------------------------------------------------------------------
146: !
147: subroutine solve1(ksp,A,x,b,u,count,nsteps,ierr)
149: #include include/finclude/petsc.h
150: #include include/finclude/petscvec.h
151: #include include/finclude/petscmat.h
152: #include include/finclude/petscpc.h
153: #include include/finclude/petscksp.h
155: !
156: ! solve1 - This routine is used for repeated linear system solves.
157: ! We update the linear system matrix each time, but retain the same
158: ! preconditioning matrix for all linear solves.
159: !
160: ! A - linear system matrix
161: ! A2 - preconditioning matrix
162: !
163: PetscScalar v,val
164: integer II,ierr,Istart,Iend,count,nsteps
165: Mat A
166: KSP ksp
167: Vec x,b,u
169: ! Use common block to retain matrix between successive subroutine calls
170: Mat A2
171: integer rank,pflag
172: common /my_data/ A2,pflag,rank
174: ! First time thorough: Create new matrix to define the linear system
175: if (count .eq. 1) then
176: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
177: pflag = 0
178: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view', &
179: & pflag,ierr)
180: if (pflag .ne. 0) then
181: if (rank .eq. 0) write(6,100)
182: endif
183: call MatConvert(A,MATSAME,A2,ierr)
184: ! All other times: Set previous solution as initial guess for next solve.
185: else
186: call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
187: endif
189: ! Alter the matrix A a bit
190: call MatGetOwnershipRange(A,Istart,Iend,ierr)
191: do 20, II=Istart,Iend-1
192: v = 2.0
193: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
194: 20 continue
195: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
196: if (pflag .ne. 0) then
197: if (rank .eq. 0) write(6,110)
198: endif
199: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
201: ! Set the exact solution; compute the right-hand-side vector
202: val = 1.0*count
203: call VecSet(val,u,ierr)
204: call MatMult(A,u,b,ierr)
206: ! Set operators, keeping the identical preconditioner matrix for
207: ! all linear solves. This approach is often effective when the
208: ! linear systems do not change very much between successive steps.
209: call KSPSetOperators(ksp,A,A2,SAME_PRECONDITIONER,ierr)
211: ! Solve linear system
212: call KSPSetRhs(ksp,b,ierr)
213: call KSPSetSolution(ksp,x,ierr)
214: call KSPSolve(ksp,ierr)
216: ! Destroy the preconditioner matrix on the last time through
217: if (count .eq. nsteps) call MatDestroy(A2,ierr)
219: 100 format('previous matrix: preconditioning')
220: 110 format('next matrix: defines linear system')
222: end