Actual source code: ex1.c
1: static char help[] = "Solves a tridiagonal linear system with KSP.\n\n";
3: /*
4: Include "petscksp.h" so that we can use KSP solvers. Note that this file
5: automatically includes:
6: petscsys.h - base PETSc routines petscvec.h - vectors
7: petscmat.h - matrices petscpc.h - preconditioners
8: petscis.h - index sets
9: petscviewer.h - viewers
11: Note: The corresponding parallel example is ex23.c
12: */
13: #include <petscksp.h>
15: int main(int argc, char **args)
16: {
17: Vec x, b, u; /* approx solution, RHS, exact solution */
18: Mat A; /* linear system matrix */
19: KSP ksp; /* linear solver context */
20: PC pc; /* preconditioner context */
21: PetscReal norm; /* norm of solution error */
22: PetscInt i, n = 10, col[3], its;
23: PetscMPIInt size;
24: PetscScalar value[3];
26: PetscFunctionBeginUser;
27: PetscCall(PetscInitialize(&argc, &args, NULL, help));
28: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
29: PetscCheck(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This is a uniprocessor example only!");
31: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
33: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
34: Compute the matrix and right-hand-side vector that define
35: the linear system, Ax = b.
36: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
38: /*
39: Create vectors. Note that we form 1 vector from scratch and
40: then duplicate as needed.
41: */
42: PetscCall(VecCreate(PETSC_COMM_SELF, &x));
43: PetscCall(PetscObjectSetName((PetscObject)x, "Solution"));
44: PetscCall(VecSetSizes(x, PETSC_DECIDE, n));
45: PetscCall(VecSetFromOptions(x));
46: PetscCall(VecDuplicate(x, &b));
47: PetscCall(VecDuplicate(x, &u));
49: /*
50: Create matrix. When using MatCreate(), the matrix format can
51: be specified at runtime.
53: Performance tuning note: For problems of substantial size,
54: preallocation of matrix memory is crucial for attaining good
55: performance. See the matrix chapter of the users manual for details.
56: */
57: PetscCall(MatCreate(PETSC_COMM_SELF, &A));
58: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, n, n));
59: PetscCall(MatSetFromOptions(A));
60: PetscCall(MatSetUp(A));
62: /*
63: Assemble matrix
64: */
65: value[0] = -1.0;
66: value[1] = 2.0;
67: value[2] = -1.0;
68: for (i = 1; i < n - 1; i++) {
69: col[0] = i - 1;
70: col[1] = i;
71: col[2] = i + 1;
72: PetscCall(MatSetValues(A, 1, &i, 3, col, value, INSERT_VALUES));
73: }
74: i = n - 1;
75: col[0] = n - 2;
76: col[1] = n - 1;
77: PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
78: i = 0;
79: col[0] = 0;
80: col[1] = 1;
81: value[0] = 2.0;
82: value[1] = -1.0;
83: PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
84: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
85: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
87: /*
88: Set exact solution; then compute right-hand-side vector.
89: */
90: PetscCall(VecSet(u, 1.0));
91: PetscCall(MatMult(A, u, b));
93: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94: Create the linear solver and set various options
95: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96: PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
98: /*
99: Set operators. Here the matrix that defines the linear system
100: also serves as the matrix that defines the preconditioner.
101: */
102: PetscCall(KSPSetOperators(ksp, A, A));
104: /*
105: Set linear solver defaults for this problem (optional).
106: - By extracting the KSP and PC contexts from the KSP context,
107: we can then directly call any KSP and PC routines to set
108: various options.
109: - The following four statements are optional; all of these
110: parameters could alternatively be specified at runtime via
111: KSPSetFromOptions();
112: */
113: if (!PCMPIServerActive) { /* cannot directly set KSP/PC options when using the MPI linear solver server */
114: PetscCall(KSPGetPC(ksp, &pc));
115: PetscCall(PCSetType(pc, PCJACOBI));
116: PetscCall(KSPSetTolerances(ksp, 1.e-5, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
117: }
119: /*
120: Set runtime options, e.g.,
121: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
122: These options will override those specified above as long as
123: KSPSetFromOptions() is called _after_ any other customization
124: routines.
125: */
126: PetscCall(KSPSetFromOptions(ksp));
128: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129: Solve the linear system
130: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
131: PetscCall(KSPSolve(ksp, b, x));
133: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134: Check the solution and clean up
135: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136: PetscCall(VecAXPY(x, -1.0, u));
137: PetscCall(VecNorm(x, NORM_2, &norm));
138: PetscCall(KSPGetIterationNumber(ksp, &its));
139: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Norm of error %g, Iterations %" PetscInt_FMT "\n", (double)norm, its));
141: /* check that KSP automatically handles the fact that the new non-zero values in the matrix are propagated to the KSP solver */
142: PetscCall(MatShift(A, 2.0));
143: PetscCall(KSPSolve(ksp, b, x));
145: /*
146: Free work space. All PETSc objects should be destroyed when they
147: are no longer needed.
148: */
149: PetscCall(KSPDestroy(&ksp));
151: /* test if prefixes properly propagate to PCMPI objects */
152: if (PCMPIServerActive) {
153: PetscCall(KSPCreate(PETSC_COMM_SELF, &ksp));
154: PetscCall(KSPSetOptionsPrefix(ksp, "prefix_test_"));
155: PetscCall(MatSetOptionsPrefix(A, "prefix_test_"));
156: PetscCall(KSPSetOperators(ksp, A, A));
157: PetscCall(KSPSetFromOptions(ksp));
158: PetscCall(KSPSolve(ksp, b, x));
159: PetscCall(KSPDestroy(&ksp));
160: }
162: PetscCall(VecDestroy(&x));
163: PetscCall(VecDestroy(&u));
164: PetscCall(VecDestroy(&b));
165: PetscCall(MatDestroy(&A));
167: /*
168: Always call PetscFinalize() before exiting a program. This routine
169: - finalizes the PETSc libraries as well as MPI
170: - provides summary and diagnostic information if certain runtime
171: options are chosen (e.g., -log_view).
172: */
173: PetscCall(PetscFinalize());
174: return 0;
175: }
177: /*TEST
179: test:
180: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
182: test:
183: suffix: 2
184: args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
186: test:
187: suffix: 2_aijcusparse
188: requires: cuda
189: args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda
190: args: -ksp_view
192: test:
193: suffix: 3
194: args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
196: test:
197: suffix: 3_aijcusparse
198: requires: cuda
199: args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda -ksp_view
201: test:
202: suffix: aijcusparse
203: requires: cuda
204: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda -ksp_view
205: output_file: output/ex1_1_aijcusparse.out
207: test:
208: requires: defined(PETSC_USE_SINGLE_LIBRARY)
209: suffix: mpi_linear_solver_server_1
210: nsize: 3
211: filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory"
212: # use the MPI Linear Solver Server
213: args: -mpi_linear_solver_server -mpi_linear_solver_server_view
214: # controls for the use of PCMPI on a particular system
215: args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view -mpi_linear_solver_server_mat_view
216: # the usual options for the linear solver (in this case using the server)
217: args: -ksp_monitor -ksp_converged_reason -mat_view -ksp_view -ksp_type cg -pc_type none
218: # the options for the prefixed objects
219: args: -prefix_test_mpi_linear_solver_server_mat_view -prefix_test_ksp_monitor -prefix_test_mpi_linear_solver_server_minimum_count_per_rank 5
221: test:
222: requires: defined(PETSC_USE_SINGLE_LIBRARY)
223: suffix: mpi_linear_solver_server_1_shared_memory_false
224: nsize: 3
225: filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN
226: # use the MPI Linear Solver Server
227: args: -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
228: # controls for the use of PCMPI on a particular system
229: args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view -mpi_linear_solver_server_mat_view
230: # the usual options for the linear solver (in this case using the server)
231: args: -ksp_monitor -ksp_converged_reason -mat_view -ksp_view -ksp_type cg -pc_type none
232: # the options for the prefixed objects
233: args: -prefix_test_mpi_linear_solver_server_mat_view -prefix_test_ksp_monitor -prefix_test_mpi_linear_solver_server_minimum_count_per_rank 5
235: test:
236: requires: !__float128
237: suffix: minit
238: args: -ksp_monitor -pc_type none -ksp_min_it 8
240: TEST*/