Actual source code: ex96.c
2: static char help[] ="Tests sequential and parallel DMGetMatrix(), MatMatMult() and MatPtAP()\n\
3: -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4: -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5: -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6: -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7: -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8: -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
10: /*
11: This test is modified from ~src/ksp/examples/tests/ex19.c.
12: Example of usage: mpiexec -n 3 ex96 -Mx 10 -My 10 -Mz 10
13: */
15: #include <petscdmda.h>
16: #include <../src/mat/impls/aij/seq/aij.h>
17: #include <../src/mat/impls/aij/mpi/mpiaij.h>
19: /* User-defined application contexts */
20: typedef struct {
21: PetscInt mx,my,mz; /* number grid points in x, y and z direction */
22: Vec localX,localF; /* local vectors with ghost region */
23: DM da;
24: Vec x,b,r; /* global vectors */
25: Mat J; /* Jacobian on grid */
26: } GridCtx;
27: typedef struct {
28: GridCtx fine;
29: GridCtx coarse;
30: PetscInt ratio;
31: Mat Ii; /* interpolation from coarse to fine */
32: } AppCtx;
34: #define COARSE_LEVEL 0
35: #define FINE_LEVEL 1
37: /*
38: Mm_ratio - ration of grid lines between fine and coarse grids.
39: */
42: int main(int argc,char **argv)
43: {
45: AppCtx user;
46: PetscInt Npx=PETSC_DECIDE,Npy=PETSC_DECIDE,Npz=PETSC_DECIDE;
47: PetscMPIInt size,rank;
48: PetscInt m,n,M,N,i,nrows,*ia,*ja;
49: PetscScalar one = 1.0;
50: PetscReal fill=2.0;
51: Mat A,A_tmp,P,C,C1,C2;
52: PetscScalar *array,none = -1.0,alpha;
53: Vec x,v1,v2,v3,v4;
54: PetscReal norm,norm_tmp,norm_tmp1,tol=1.e-12;
55: PetscRandom rdm;
56: PetscBool Test_MatMatMult=PETSC_TRUE,Test_MatPtAP=PETSC_TRUE,Test_3D=PETSC_FALSE,flg;
58: PetscInitialize(&argc,&argv,PETSC_NULL,help);
59: PetscOptionsGetReal(PETSC_NULL,"-tol",&tol,PETSC_NULL);
61: user.ratio = 2;
62: user.coarse.mx = 2; user.coarse.my = 2; user.coarse.mz = 0;
63: PetscOptionsGetInt(PETSC_NULL,"-Mx",&user.coarse.mx,PETSC_NULL);
64: PetscOptionsGetInt(PETSC_NULL,"-My",&user.coarse.my,PETSC_NULL);
65: PetscOptionsGetInt(PETSC_NULL,"-Mz",&user.coarse.mz,PETSC_NULL);
66: PetscOptionsGetInt(PETSC_NULL,"-ratio",&user.ratio,PETSC_NULL);
67: if (user.coarse.mz) Test_3D = PETSC_TRUE;
69: user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
70: user.fine.my = user.ratio*(user.coarse.my-1)+1;
71: user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
72: MPI_Comm_size(PETSC_COMM_WORLD,&size);
73: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
74: PetscOptionsGetInt(PETSC_NULL,"-Npx",&Npx,PETSC_NULL);
75: PetscOptionsGetInt(PETSC_NULL,"-Npy",&Npy,PETSC_NULL);
76: PetscOptionsGetInt(PETSC_NULL,"-Npz",&Npz,PETSC_NULL);
78: /* Set up distributed array for fine grid */
79: if (!Test_3D){
80: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,
81: user.fine.my,Npx,Npy,1,1,PETSC_NULL,PETSC_NULL,&user.fine.da);
82: } else {
83: DMDACreate3d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,
84: user.fine.mx,user.fine.my,user.fine.mz,Npx,Npy,Npz,
85: 1,1,PETSC_NULL,PETSC_NULL,PETSC_NULL,&user.fine.da);
86: }
88: /* Test DMGetMatrix() */
89: /*------------------------------------------------------------*/
90: DMGetMatrix(user.fine.da,MATAIJ,&A);
91: DMGetMatrix(user.fine.da,MATBAIJ,&C);
92:
93: MatConvert(C,MATAIJ,MAT_INITIAL_MATRIX,&A_tmp); /* not work for mpisbaij matrix! */
94: MatEqual(A,A_tmp,&flg);
95: if (!flg) {
96: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMETYPE,"A != C");
97: }
98: MatDestroy(&C);
99: MatDestroy(&A_tmp);
100:
101: /*------------------------------------------------------------*/
102:
103: MatGetLocalSize(A,&m,&n);
104: MatGetSize(A,&M,&N);
105: /* set val=one to A */
106: if (size == 1){
107: MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
108: if (flg){
109: MatGetArray(A,&array);
110: for (i=0; i<ia[nrows]; i++) array[i] = one;
111: MatRestoreArray(A,&array);
112: }
113: MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
114: } else {
115: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
116: Mat_SeqAIJ *a=(Mat_SeqAIJ*)(aij->A)->data, *b=(Mat_SeqAIJ*)(aij->B)->data;
117: /* A_part */
118: for (i=0; i<a->i[m]; i++) a->a[i] = one;
119: /* B_part */
120: for (i=0; i<b->i[m]; i++) b->a[i] = one;
121:
122: }
123: /* MatView(A, PETSC_VIEWER_STDOUT_WORLD); */
125: /* Set up distributed array for coarse grid */
126: if (!Test_3D){
127: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,
128: user.coarse.my,Npx,Npy,1,1,PETSC_NULL,PETSC_NULL,&user.coarse.da);
129: } else {
130: DMDACreate3d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,
131: user.coarse.mx,user.coarse.my,user.coarse.mz,Npx,Npy,Npz,
132: 1,1,PETSC_NULL,PETSC_NULL,PETSC_NULL,&user.coarse.da);
133: }
135: /* Create interpolation between the levels */
136: DMGetInterpolation(user.coarse.da,user.fine.da,&P,PETSC_NULL);
137:
138: MatGetLocalSize(P,&m,&n);
139: MatGetSize(P,&M,&N);
141: /* Create vectors v1 and v2 that are compatible with A */
142: VecCreate(PETSC_COMM_WORLD,&v1);
143: MatGetLocalSize(A,&m,PETSC_NULL);
144: VecSetSizes(v1,m,PETSC_DECIDE);
145: VecSetFromOptions(v1);
146: VecDuplicate(v1,&v2);
147: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
148: PetscRandomSetFromOptions(rdm);
150: /* Test MatMatMult(): C = A*P */
151: /*----------------------------*/
152: if (Test_MatMatMult){
153: MatDuplicate(A,MAT_COPY_VALUES,&A_tmp);
154: MatMatMult(A_tmp,P,MAT_INITIAL_MATRIX,fill,&C);
155:
156: /* Test MAT_REUSE_MATRIX - reuse symbolic C */
157: alpha=1.0;
158: for (i=0; i<2; i++){
159: alpha -=0.1;
160: MatScale(A_tmp,alpha);
161: MatMatMult(A_tmp,P,MAT_REUSE_MATRIX,fill,&C);
162: }
164: /* Test MatDuplicate() */
165: /*----------------------------*/
166: MatDuplicate(C,MAT_COPY_VALUES,&C1);
167: MatDuplicate(C1,MAT_COPY_VALUES,&C2);
168: MatDestroy(&C1);
169: MatDestroy(&C2);
171: /* Create vector x that is compatible with P */
172: VecCreate(PETSC_COMM_WORLD,&x);
173: MatGetLocalSize(P,PETSC_NULL,&n);
174: VecSetSizes(x,n,PETSC_DECIDE);
175: VecSetFromOptions(x);
177: norm = 0.0;
178: for (i=0; i<10; i++) {
179: VecSetRandom(x,rdm);
180: MatMult(P,x,v1);
181: MatMult(A_tmp,v1,v2); /* v2 = A*P*x */
182: MatMult(C,x,v1); /* v1 = C*x */
183: VecAXPY(v1,none,v2);
184: VecNorm(v1,NORM_1,&norm_tmp);
185: VecNorm(v2,NORM_1,&norm_tmp1);
186: norm_tmp /= norm_tmp1;
187: if (norm_tmp > norm) norm = norm_tmp;
188: }
189: if (norm >= tol && !rank) {
190: PetscPrintf(PETSC_COMM_SELF,"Error: MatMatMult(), |v1 - v2|/|v2|: %G\n",norm);
191: }
192:
193: VecDestroy(&x);
194: MatDestroy(&C);
195: MatDestroy(&A_tmp);
196: }
198: /* Test P^T * A * P - MatPtAP() */
199: /*------------------------------*/
200: if (Test_MatPtAP){
201: MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&C);
202: MatGetLocalSize(C,&m,&n);
203:
204: /* Test MAT_REUSE_MATRIX - reuse symbolic C */
205: alpha=1.0;
206: for (i=0; i<1; i++){
207: alpha -=0.1;
208: MatScale(A,alpha);
209: MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&C);
210: }
212: /* Test MatDuplicate() */
213: /*----------------------------*/
214: MatDuplicate(C,MAT_COPY_VALUES,&C1);
215: MatDuplicate(C1,MAT_COPY_VALUES,&C2);
216: MatDestroy(&C1);
217: MatDestroy(&C2);
219: /* Create vector x that is compatible with P */
220: VecCreate(PETSC_COMM_WORLD,&x);
221: MatGetLocalSize(P,&m,&n);
222: VecSetSizes(x,n,PETSC_DECIDE);
223: VecSetFromOptions(x);
224:
225: VecCreate(PETSC_COMM_WORLD,&v3);
226: VecSetSizes(v3,n,PETSC_DECIDE);
227: VecSetFromOptions(v3);
228: VecDuplicate(v3,&v4);
230: norm = 0.0;
231: for (i=0; i<10; i++) {
232: VecSetRandom(x,rdm);
233: MatMult(P,x,v1);
234: MatMult(A,v1,v2); /* v2 = A*P*x */
236: MatMultTranspose(P,v2,v3); /* v3 = Pt*A*P*x */
237: MatMult(C,x,v4); /* v3 = C*x */
238: VecAXPY(v4,none,v3);
239: VecNorm(v4,NORM_1,&norm_tmp);
240: VecNorm(v3,NORM_1,&norm_tmp1);
241: norm_tmp /= norm_tmp1;
242: if (norm_tmp > norm) norm = norm_tmp;
243: }
244: if (norm >= tol && !rank) {
245: PetscPrintf(PETSC_COMM_SELF,"Error: MatPtAP(), |v3 - v4|/|v3|: %G\n",norm);
246: }
247:
248: MatDestroy(&C);
249: VecDestroy(&v3);
250: VecDestroy(&v4);
251: VecDestroy(&x);
252:
253: }
255: /* Clean up */
256: MatDestroy(&A);
257: PetscRandomDestroy(&rdm);
258: VecDestroy(&v1);
259: VecDestroy(&v2);
260: DMDestroy(&user.fine.da);
261: DMDestroy(&user.coarse.da);
262: MatDestroy(&P);
264: PetscFinalize();
266: return 0;
267: }