Actual source code: ex26.c
petsc-3.15.0 2021-03-30
1: static char help[] ="Solves Laplacian with multigrid. Tests block API for PCMG\n\
2: -mx <xg>, where <xg> = number of grid points in the x-direction\n\
3: -my <yg>, where <yg> = number of grid points in the y-direction\n\
4: -Nx <npx>, where <npx> = number of processors in the x-direction\n\
5: -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
7: /* Modified from ~src/ksp/tests/ex19.c. Used for testing ML 6.2 interface.
9: This problem is modeled by
10: the partial differential equation
12: -Laplacian u = g, 0 < x,y < 1,
14: with boundary conditions
16: u = 0 for x = 0, x = 1, y = 0, y = 1.
18: A finite difference approximation with the usual 5-point stencil
19: is used to discretize the boundary value problem to obtain a linear
20: system of equations.
22: Usage: ./ex26 -ksp_monitor_short -pc_type ml
23: -mg_coarse_ksp_max_it 10
24: -mg_levels_1_ksp_max_it 10 -mg_levels_2_ksp_max_it 10
25: -mg_fine_ksp_max_it 10
26: */
28: #include <petscksp.h>
29: #include <petscdm.h>
30: #include <petscdmda.h>
32: /* User-defined application contexts */
33: typedef struct {
34: PetscInt mx,my; /* number grid points in x and y direction */
35: Vec localX,localF; /* local vectors with ghost region */
36: DM da;
37: Vec x,b,r; /* global vectors */
38: Mat J; /* Jacobian on grid */
39: Mat A,P,R;
40: KSP ksp;
41: } GridCtx;
43: static PetscErrorCode FormJacobian_Grid(GridCtx*,Mat);
45: int main(int argc,char **argv)
46: {
48: PetscInt i,its,Nx=PETSC_DECIDE,Ny=PETSC_DECIDE,nlocal,nrhs = 1;
49: PetscScalar one = 1.0;
50: Mat A,B,X;
51: GridCtx fine_ctx;
52: KSP ksp;
53: PetscBool Brand = PETSC_FALSE,flg;
55: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
56: /* set up discretization matrix for fine grid */
57: fine_ctx.mx = 9;
58: fine_ctx.my = 9;
59: PetscOptionsGetInt(NULL,NULL,"-mx",&fine_ctx.mx,NULL);
60: PetscOptionsGetInt(NULL,NULL,"-my",&fine_ctx.my,NULL);
61: PetscOptionsGetInt(NULL,NULL,"-nrhs",&nrhs,NULL);
62: PetscOptionsGetInt(NULL,NULL,"-Nx",&Nx,NULL);
63: PetscOptionsGetInt(NULL,NULL,"-Ny",&Ny,NULL);
64: PetscOptionsGetBool(NULL,NULL,"-rand",&Brand,NULL);
65: PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",fine_ctx.mx,fine_ctx.my);
67: /* Set up distributed array for fine grid */
68: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,fine_ctx.mx,fine_ctx.my,Nx,Ny,1,1,NULL,NULL,&fine_ctx.da);
69: DMSetFromOptions(fine_ctx.da);
70: DMSetUp(fine_ctx.da);
71: DMCreateGlobalVector(fine_ctx.da,&fine_ctx.x);
72: VecDuplicate(fine_ctx.x,&fine_ctx.b);
73: VecGetLocalSize(fine_ctx.x,&nlocal);
74: DMCreateLocalVector(fine_ctx.da,&fine_ctx.localX);
75: VecDuplicate(fine_ctx.localX,&fine_ctx.localF);
76: DMCreateMatrix(fine_ctx.da,&A);
77: FormJacobian_Grid(&fine_ctx,A);
79: /* create linear solver */
80: KSPCreate(PETSC_COMM_WORLD,&ksp);
81: KSPSetDM(ksp,fine_ctx.da);
82: KSPSetDMActive(ksp,PETSC_FALSE);
84: /* set values for rhs vector */
85: VecSet(fine_ctx.b,one);
87: /* set options, then solve system */
88: KSPSetFromOptions(ksp); /* calls PCSetFromOptions_ML if 'pc_type=ml' */
89: KSPSetOperators(ksp,A,A);
90: KSPSolve(ksp,fine_ctx.b,fine_ctx.x);
91: VecViewFromOptions(fine_ctx.x,NULL,"-debug");
92: KSPGetIterationNumber(ksp,&its);
93: KSPGetIterationNumber(ksp,&its);
94: PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);
96: /* test multiple right-hand side */
97: MatCreateDense(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,fine_ctx.mx*fine_ctx.my,nrhs,NULL,&B);
98: MatSetOptionsPrefix(B,"rhs_");
99: MatSetFromOptions(B);
100: MatDuplicate(B,MAT_DO_NOT_COPY_VALUES,&X);
101: if (Brand) {
102: MatSetRandom(B,NULL);
103: } else {
104: PetscScalar *b;
106: MatDenseGetArrayWrite(B,&b);
107: for (i=0;i<nlocal*nrhs;i++) b[i] = 1.0;
108: MatDenseRestoreArrayWrite(B,&b);
109: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
110: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
111: }
112: KSPMatSolve(ksp,B,X);
113: MatViewFromOptions(X,NULL,"-debug");
115: PetscObjectTypeCompare((PetscObject)ksp,KSPPREONLY,&flg);
116: if ((flg || nrhs == 1) && !Brand) {
117: PetscInt n;
118: const PetscScalar *xx,*XX;
120: VecGetArrayRead(fine_ctx.x,&xx);
121: MatDenseGetArrayRead(X,&XX);
122: for (n=0;n<nrhs;n++) {
123: for (i=0;i<nlocal;i++) {
124: if (PetscAbsScalar(xx[i] - XX[nlocal*n + i]) > PETSC_SMALL) {
125: PetscPrintf(PETSC_COMM_SELF,"[%d] Error local solve %D, entry %D -> %g + i %g != %g + i %g\n",PetscGlobalRank,n,i,(double)PetscRealPart(xx[i]),(double)PetscImaginaryPart(xx[i]),(double)PetscRealPart(XX[i]),(double)PetscImaginaryPart(XX[i]));
126: }
127: }
128: }
129: MatDenseRestoreArrayRead(X,&XX);
130: VecRestoreArrayRead(fine_ctx.x,&xx);
131: }
133: /* free data structures */
134: VecDestroy(&fine_ctx.x);
135: VecDestroy(&fine_ctx.b);
136: DMDestroy(&fine_ctx.da);
137: VecDestroy(&fine_ctx.localX);
138: VecDestroy(&fine_ctx.localF);
139: MatDestroy(&A);
140: MatDestroy(&B);
141: MatDestroy(&X);
142: KSPDestroy(&ksp);
144: PetscFinalize();
145: return ierr;
146: }
148: PetscErrorCode FormJacobian_Grid(GridCtx *grid,Mat jac)
149: {
150: PetscErrorCode ierr;
151: PetscInt i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
152: PetscInt grow;
153: const PetscInt *ltog;
154: PetscScalar two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
155: ISLocalToGlobalMapping ltogm;
158: mx = grid->mx; my = grid->my;
159: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
160: hxdhy = hx/hy; hydhx = hy/hx;
162: /* Get ghost points */
163: DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
164: DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
165: DMGetLocalToGlobalMapping(grid->da,<ogm);
166: ISLocalToGlobalMappingGetIndices(ltogm,<og);
168: /* Evaluate Jacobian of function */
169: for (j=ys; j<ys+ym; j++) {
170: row = (j - Ys)*Xm + xs - Xs - 1;
171: for (i=xs; i<xs+xm; i++) {
172: row++;
173: grow = ltog[row];
174: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
175: v[0] = -hxdhy; col[0] = ltog[row - Xm];
176: v[1] = -hydhx; col[1] = ltog[row - 1];
177: v[2] = two*(hydhx + hxdhy); col[2] = grow;
178: v[3] = -hydhx; col[3] = ltog[row + 1];
179: v[4] = -hxdhy; col[4] = ltog[row + Xm];
180: MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
181: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
182: value = .5*two*(hydhx + hxdhy);
183: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
184: } else {
185: value = .25*two*(hydhx + hxdhy);
186: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
187: }
188: }
189: }
190: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
191: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
192: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
193: return(0);
194: }
196: /*TEST
198: test:
199: args: -ksp_monitor_short
201: test:
202: suffix: 2
203: args: -ksp_monitor_short
204: nsize: 3
206: test:
207: suffix: ml_1
208: args: -ksp_monitor_short -pc_type ml -mat_no_inode
209: nsize: 3
210: requires: ml
212: test:
213: suffix: ml_2
214: args: -ksp_monitor_short -pc_type ml -mat_no_inode -ksp_max_it 3
215: nsize: 3
216: requires: ml
218: test:
219: suffix: ml_3
220: args: -ksp_monitor_short -pc_type ml -mat_no_inode -pc_mg_type ADDITIVE -ksp_max_it 7
221: nsize: 1
222: requires: ml
224: test:
225: suffix: cycles
226: nsize: {{1 2}}
227: args: -ksp_view_final_residual -pc_type mg -mx 5 -my 5 -pc_mg_levels 3 -pc_mg_galerkin -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 1
229: test:
230: suffix: matcycles
231: nsize: {{1 2}}
232: args: -ksp_view_final_residual -ksp_type preonly -pc_type mg -mx 5 -my 5 -pc_mg_levels 3 -pc_mg_galerkin -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
234: test:
235: requires: ml
236: suffix: matcycles_ml
237: nsize: {{1 2}}
238: args: -ksp_view_final_residual -ksp_type preonly -pc_type ml -mx 5 -my 5 -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
240: test:
241: requires: hpddm
242: suffix: matcycles_hpddm_mg
243: nsize: {{1 2}}
244: args: -ksp_view_final_residual -ksp_type hpddm -pc_type mg -pc_mg_levels 3 -pc_mg_galerkin -mx 5 -my 5 -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
246: test:
247: requires: hpddm
248: nsize: {{1 2}}
249: suffix: matcycles_hpddm_ilu
250: args: -ksp_view_final_residual -ksp_type hpddm -pc_type redundant -redundant_pc_type ilu -mx 5 -my 5 -ksp_monitor -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
252: TEST*/