Actual source code: ex11f.F
1: !
2: ! "$Id: ex11f.F,v 1.31 2001/08/07 03:04:00 balay Exp $";
3: !
4: ! Description: Solves a complex linear system in parallel with KSP (Fortran code).
5: !
6: !/*T
7: ! Concepts: KSP^solving a Helmholtz equation
8: ! Concepts: complex numbers
9: ! Processors: n
10: !T*/
11: !
12: ! The model problem:
13: ! Solve Helmholtz equation on the unit square: (0,1) x (0,1)
14: ! -delta u - sigma1*u + i*sigma2*u = f,
15: ! where delta = Laplace operator
16: ! Dirichlet b.c.'s on all sides
17: ! Use the 2-D, five-point finite difference stencil.
18: !
19: ! Compiling the code:
20: ! This code uses the complex numbers version of PETSc, so one of the
21: ! following values of BOPT must be used for compiling the PETSc libraries
22: ! and this example:
23: ! BOPT=g_complex - debugging version
24: ! BOPT=O_complex - optimized version
25: ! BOPT=Opg_complex - profiling version
26: !
27: ! -----------------------------------------------------------------------
29: program main
30: implicit none
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: ! Include files
34: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: !
36: ! The following include statements are required for KSP Fortran programs:
37: ! petsc.h - base PETSc routines
38: ! petscvec.h - vectors
39: ! petscmat.h - matrices
40: ! petscpc.h - preconditioners
41: ! petscksp.h - Krylov subspace methods
42: ! Include the following to use PETSc random numbers:
43: ! petscsys.h - system routines
44: ! Additional include statements may be needed if using other PETSc
45: ! routines in a Fortran program, e.g.,
46: ! petscviewer.h - viewers
47: ! petscis.h - index sets
48: !
49: #include include/finclude/petsc.h
50: #include include/finclude/petscvec.h
51: #include include/finclude/petscmat.h
52: #include include/finclude/petscpc.h
53: #include include/finclude/petscksp.h
54: #include include/finclude/petscsys.h
55: !
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: ! Variable declarations
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: !
60: ! Variables:
61: ! ksp - linear solver context
62: ! x, b, u - approx solution, right-hand-side, exact solution vectors
63: ! A - matrix that defines linear system
64: ! its - iterations for convergence
65: ! norm - norm of error in solution
66: ! rctx - random number context
67: !
69: KSP ksp
70: Mat A
71: Vec x,b,u
72: PetscRandom rctx
73: double precision norm,h2,sigma1
74: PetscScalar none,sigma2,v,pfive
75: integer dim,flg,its,ierr,n,rank
76: integer Istart,Iend,i,j,II,JJ
77: logical use_random
79: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: ! Beginning of program
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
84: #if !defined(PETSC_USE_COMPLEX)
85: write(6,*) "This example requires complex numbers."
86: goto 200
87: #endif
89: none = -1.0
90: n = 6
91: sigma1 = 100.0
92: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
93: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-sigma1',sigma1, &
94: & flg,ierr)
95: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
96: dim = n*n
98: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: ! Compute the matrix and right-hand-side vector that define
100: ! the linear system, Ax = b.
101: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: ! Create parallel matrix, specifying only its global dimensions.
104: ! When using MatCreate(), the matrix format can be specified at
105: ! runtime. Also, the parallel partitioning of the matrix is
106: ! determined by PETSc at runtime.
108: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,dim, &
109: & dim,A,ierr)
110: call MatSetFromOptions(A,ierr)
112: ! Currently, all PETSc parallel matrix formats are partitioned by
113: ! contiguous chunks of rows across the processors. Determine which
114: ! rows of the matrix are locally owned.
116: call MatGetOwnershipRange(A,Istart,Iend,ierr)
118: ! Set matrix elements in parallel.
119: ! - Each processor needs to insert only elements that it owns
120: ! locally (but any non-local elements will be sent to the
121: ! appropriate processor during matrix assembly).
122: ! - Always specify global rows and columns of matrix entries.
124: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-norandom', &
125: & flg,ierr)
126: if (flg .eq. 1) then
127: use_random = .false.
128: sigma2 = 10.0*PETSC_i
129: else
130: use_random = .true.
131: call PetscRandomCreate(PETSC_COMM_WORLD, &
132: & RANDOM_DEFAULT_IMAGINARY,rctx,ierr)
133: endif
134: h2 = 1.0/((n+1)*(n+1))
136: do 10, II=Istart,Iend-1
137: v = -1.0
138: i = II/n
139: j = II - i*n
140: if (i.gt.0) then
141: JJ = II - n
142: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
143: endif
144: if (i.lt.n-1) then
145: JJ = II + n
146: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
147: endif
148: if (j.gt.0) then
149: JJ = II - 1
150: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
151: endif
152: if (j.lt.n-1) then
153: JJ = II + 1
154: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
155: endif
156: if (use_random) call PetscRandomGetValue(rctx,sigma2,ierr)
157: v = 4.0 - sigma1*h2 + sigma2*h2
158: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
159: 10 continue
160: if (use_random) call PetscRandomDestroy(rctx,ierr)
162: ! Assemble matrix, using the 2-step process:
163: ! MatAssemblyBegin(), MatAssemblyEnd()
164: ! Computations can be done while messages are in transition
165: ! by placing code between these two statements.
167: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
168: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
170: ! Create parallel vectors.
171: ! - Here, the parallel partitioning of the vector is determined by
172: ! PETSc at runtime. We could also specify the local dimensions
173: ! if desired.
174: ! - Note: We form 1 vector from scratch and then duplicate as needed.
176: call VecCreate(PETSC_COMM_WORLD,u,ierr)
177: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
178: call VecSetFromOptions(u,ierr)
179: call VecDuplicate(u,b,ierr)
180: call VecDuplicate(b,x,ierr)
182: ! Set exact solution; then compute right-hand-side vector.
184: if (use_random) then
185: call PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT, &
186: & rctx,ierr)
187: call VecSetRandom(rctx,u,ierr)
188: else
189: pfive = 0.5
190: call VecSet(pfive,u,ierr)
191: endif
192: call MatMult(A,u,b,ierr)
194: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: ! Create the linear solver and set various options
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: ! Create linear solver context
200: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
202: ! Set operators. Here the matrix that defines the linear system
203: ! also serves as the preconditioning matrix.
205: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
207: ! Set runtime options, e.g.,
208: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
210: call KSPSetFromOptions(ksp,ierr)
212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: ! Solve the linear system
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: call KSPSetRhs(ksp,b,ierr)
217: call KSPSetSolution(ksp,x,ierr)
218: call KSPSolve(ksp,ierr)
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
221: ! Check solution and clean up
222: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: ! Check the error
226: call VecAXPY(none,u,x,ierr)
227: call VecNorm(x,NORM_2,norm,ierr)
228: call KSPGetIterationNumber(ksp,its,ierr)
229: if (rank .eq. 0) then
230: if (norm .gt. 1.e-12) then
231: write(6,100) norm,its
232: else
233: write(6,110) its
234: endif
235: endif
236: 100 format('Norm of error ',e10.4,',iterations ',i5)
237: 110 format('Norm of error < 1.e-12,iterations ',i5)
239: ! Free work space. All PETSc objects should be destroyed when they
240: ! are no longer needed.
242: if (use_random) call PetscRandomDestroy(rctx,ierr)
243: call KSPDestroy(ksp,ierr)
244: call VecDestroy(u,ierr)
245: call VecDestroy(x,ierr)
246: call VecDestroy(b,ierr)
247: call MatDestroy(A,ierr)
249: 200 continue
250: call PetscFinalize(ierr)
251: end