Actual source code: ex11f.F90
1: !
2: ! Description: Solves a complex linear system in parallel with KSP (Fortran code).
3: !
4: !!/*T
5: ! Concepts: KSP^solving a Helmholtz equation
6: ! Concepts: complex numbers
7: ! Processors: n
8: !T*/
10: !
11: ! The model problem:
12: ! Solve Helmholtz equation on the unit square: (0,1) x (0,1)
13: ! -delta u - sigma1*u + i*sigma2*u = f,
14: ! where delta = Laplace operator
15: ! Dirichlet b.c.'s on all sides
16: ! Use the 2-D, five-point finite difference stencil.
17: !
18: ! Compiling the code:
19: ! This code uses the complex numbers version of PETSc, so configure
20: ! must be run to enable this
21: !
22: !
23: ! -----------------------------------------------------------------------
25: program main
26: #include <petsc/finclude/petscksp.h>
27: use petscksp
28: implicit none
30: !
31: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32: ! Variable declarations
33: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
34: !
35: ! Variables:
36: ! ksp - linear solver context
37: ! x, b, u - approx solution, right-hand-side, exact solution vectors
38: ! A - matrix that defines linear system
39: ! its - iterations for convergence
40: ! norm - norm of error in solution
41: ! rctx - random number context
42: !
44: KSP ksp
45: Mat A
46: Vec x,b,u
47: PetscRandom rctx
48: PetscReal norm,h2,sigma1
49: PetscScalar none,sigma2,v,pfive,czero
50: PetscScalar cone
51: PetscInt dim,its,n,Istart
52: PetscInt Iend,i,j,II,JJ,one
53: PetscErrorCode ierr
54: PetscMPIInt rank
55: PetscBool flg
56: logical use_random
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: ! Beginning of program
60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
62: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
63: if (ierr .ne. 0) then
64: print*,'Unable to initialize PETSc'
65: stop
66: endif
68: none = -1.0
69: n = 6
70: sigma1 = 100.0
71: czero = 0.0
72: cone = PETSC_i
73: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
74: call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-sigma1',sigma1,flg,ierr)
75: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
76: dim = n*n
78: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79: ! Compute the matrix and right-hand-side vector that define
80: ! the linear system, Ax = b.
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: ! Create parallel matrix, specifying only its global dimensions.
84: ! When using MatCreate(), the matrix format can be specified at
85: ! runtime. Also, the parallel partitioning of the matrix is
86: ! determined by PETSc at runtime.
88: call MatCreate(PETSC_COMM_WORLD,A,ierr)
89: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
90: call MatSetFromOptions(A,ierr)
91: call MatSetUp(A,ierr)
93: ! Currently, all PETSc parallel matrix formats are partitioned by
94: ! contiguous chunks of rows across the processors. Determine which
95: ! rows of the matrix are locally owned.
97: call MatGetOwnershipRange(A,Istart,Iend,ierr)
99: ! Set matrix elements in parallel.
100: ! - Each processor needs to insert only elements that it owns
101: ! locally (but any non-local elements will be sent to the
102: ! appropriate processor during matrix assembly).
103: ! - Always specify global rows and columns of matrix entries.
105: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-norandom',flg,ierr)
106: if (flg) then
107: use_random = .false.
108: sigma2 = 10.0*PETSC_i
109: else
110: use_random = .true.
111: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
112: call PetscRandomSetFromOptions(rctx,ierr)
113: call PetscRandomSetInterval(rctx,czero,cone,ierr)
114: endif
115: h2 = 1.0/real((n+1)*(n+1))
117: one = 1
118: do 10, II=Istart,Iend-1
119: v = -1.0
120: i = II/n
121: j = II - i*n
122: if (i.gt.0) then
123: JJ = II - n
124: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
125: endif
126: if (i.lt.n-1) then
127: JJ = II + n
128: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
129: endif
130: if (j.gt.0) then
131: JJ = II - 1
132: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
133: endif
134: if (j.lt.n-1) then
135: JJ = II + 1
136: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
137: endif
138: if (use_random) call PetscRandomGetValue(rctx,sigma2,ierr)
139: v = 4.0 - sigma1*h2 + sigma2*h2
140: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
141: 10 continue
142: if (use_random) call PetscRandomDestroy(rctx,ierr)
144: ! Assemble matrix, using the 2-step process:
145: ! MatAssemblyBegin(), MatAssemblyEnd()
146: ! Computations can be done while messages are in transition
147: ! by placing code between these two statements.
149: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
150: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
152: ! Create parallel vectors.
153: ! - Here, the parallel partitioning of the vector is determined by
154: ! PETSc at runtime. We could also specify the local dimensions
155: ! if desired.
156: ! - Note: We form 1 vector from scratch and then duplicate as needed.
158: call VecCreate(PETSC_COMM_WORLD,u,ierr)
159: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
160: call VecSetFromOptions(u,ierr)
161: call VecDuplicate(u,b,ierr)
162: call VecDuplicate(b,x,ierr)
164: ! Set exact solution; then compute right-hand-side vector.
166: if (use_random) then
167: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
168: call PetscRandomSetFromOptions(rctx,ierr)
169: call VecSetRandom(u,rctx,ierr)
170: else
171: pfive = 0.5
172: call VecSet(u,pfive,ierr)
173: endif
174: call MatMult(A,u,b,ierr)
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: ! Create the linear solver and set various options
178: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180: ! Create linear solver context
182: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
184: ! Set operators. Here the matrix that defines the linear system
185: ! also serves as the preconditioning matrix.
187: call KSPSetOperators(ksp,A,A,ierr)
189: ! Set runtime options, e.g.,
190: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
192: call KSPSetFromOptions(ksp,ierr)
194: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: ! Solve the linear system
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: call KSPSolve(ksp,b,x,ierr)
200: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201: ! Check solution and clean up
202: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: ! Check the error
206: call VecAXPY(x,none,u,ierr)
207: call VecNorm(x,NORM_2,norm,ierr)
208: call KSPGetIterationNumber(ksp,its,ierr)
209: if (rank .eq. 0) then
210: if (norm .gt. 1.e-12) then
211: write(6,100) norm,its
212: else
213: write(6,110) its
214: endif
215: endif
216: 100 format('Norm of error ',e11.4,',iterations ',i5)
217: 110 format('Norm of error < 1.e-12,iterations ',i5)
219: ! Free work space. All PETSc objects should be destroyed when they
220: ! are no longer needed.
222: if (use_random) call PetscRandomDestroy(rctx,ierr)
223: call KSPDestroy(ksp,ierr)
224: call VecDestroy(u,ierr)
225: call VecDestroy(x,ierr)
226: call VecDestroy(b,ierr)
227: call MatDestroy(A,ierr)
229: call PetscFinalize(ierr)
230: end
232: !
233: !/*TEST
234: !
235: ! build:
236: ! requires: complex
237: !
238: ! test:
239: ! args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
240: ! output_file: output/ex11f_1.out
241: !
242: !TEST*/