Actual source code: ex41.c
petsc-3.6.4 2016-04-12
2: static char help[] ="Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
3: -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4: -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5: -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6: -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7: -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8: -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
10: /*
11: Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
12: */
14: #include <petscdm.h>
15: #include <petscdmda.h>
16: #include <../src/mat/impls/aij/seq/aij.h>
17: #include <../src/mat/impls/aij/mpi/mpiaij.h>
19: /* User-defined application contexts */
20: typedef struct {
21: PetscInt mx,my,mz; /* number grid points in x, y and z direction */
22: Vec localX,localF; /* local vectors with ghost region */
23: DM da;
24: Vec x,b,r; /* global vectors */
25: Mat J; /* Jacobian on grid */
26: } GridCtx;
27: typedef struct {
28: GridCtx fine;
29: GridCtx coarse;
30: PetscInt ratio;
31: Mat Ii; /* interpolation from coarse to fine */
32: } AppCtx;
34: #define COARSE_LEVEL 0
35: #define FINE_LEVEL 1
37: /*
38: Mm_ratio - ration of grid lines between fine and coarse grids.
39: */
42: int main(int argc,char **argv)
43: {
45: AppCtx user;
46: PetscMPIInt size,rank;
47: PetscInt m,n,M,N,i,nrows;
48: PetscScalar one = 1.0;
49: PetscReal fill=2.0;
50: Mat A,P,R,C,PtAP;
51: PetscScalar *array;
52: PetscRandom rdm;
53: PetscBool Test_3D=PETSC_FALSE,flg;
55: PetscInitialize(&argc,&argv,NULL,help);
56: MPI_Comm_size(PETSC_COMM_WORLD,&size);
57: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
59: /* Get size of fine grids and coarse grids */
60: user.ratio = 2;
61: user.coarse.mx = 2; user.coarse.my = 2; user.coarse.mz = 0;
63: PetscOptionsGetInt(NULL,"-Mx",&user.coarse.mx,NULL);
64: PetscOptionsGetInt(NULL,"-My",&user.coarse.my,NULL);
65: PetscOptionsGetInt(NULL,"-Mz",&user.coarse.mz,NULL);
66: PetscOptionsGetInt(NULL,"-ratio",&user.ratio,NULL);
67: if (user.coarse.mz) Test_3D = PETSC_TRUE;
69: user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
70: user.fine.my = user.ratio*(user.coarse.my-1)+1;
71: user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
73: if (!rank) {
74: if (!Test_3D) {
75: PetscPrintf(PETSC_COMM_SELF,"coarse grids: %d %d; fine grids: %d %d\n",user.coarse.mx,user.coarse.my,user.fine.mx,user.fine.my);
76: } else {
77: PetscPrintf(PETSC_COMM_SELF,"coarse grids: %d %d %d; fine grids: %d %d %d\n",user.coarse.mx,user.coarse.my,user.coarse.mz,user.fine.mx,user.fine.my,user.fine.mz);
78: }
79: }
81: /* Set up distributed array for fine grid */
82: if (!Test_3D) {
83: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,
84: user.fine.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.fine.da);
85: } else {
86: DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,
87: user.fine.mx,user.fine.my,user.fine.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
88: 1,1,NULL,NULL,NULL,&user.fine.da);
89: }
91: /* Create and set A at fine grids */
92: DMSetMatType(user.fine.da,MATAIJ);
93: DMCreateMatrix(user.fine.da,&A);
94: MatGetLocalSize(A,&m,&n);
95: MatGetSize(A,&M,&N);
97: /* set val=one to A (replace with random values!) */
98: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
99: PetscRandomSetFromOptions(rdm);
100: if (size == 1) {
101: const PetscInt *ia,*ja;
102: MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
103: if (flg) {
104: MatSeqAIJGetArray(A,&array);
105: for (i=0; i<ia[nrows]; i++) array[i] = one;
106: MatSeqAIJRestoreArray(A,&array);
107: }
108: MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
109: } else {
110: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
111: Mat_SeqAIJ *a = (Mat_SeqAIJ*)(aij->A)->data, *b=(Mat_SeqAIJ*)(aij->B)->data;
112: /* A_part */
113: for (i=0; i<a->i[m]; i++) a->a[i] = one;
114: /* B_part */
115: for (i=0; i<b->i[m]; i++) b->a[i] = one;
117: }
118: /* if (!rank) printf("A:\n"); */
119: /* MatView(A, PETSC_VIEWER_STDOUT_WORLD); */
121: /* Set up distributed array for coarse grid */
122: if (!Test_3D) {
123: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,
124: user.coarse.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.coarse.da);
125: } else {
126: DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,
127: user.coarse.mx,user.coarse.my,user.coarse.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
128: 1,1,NULL,NULL,NULL,&user.coarse.da);
129: }
131: /* Create interpolation between the fine and coarse grids */
132: DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL);
133: /* if (!rank) printf("P:\n"); */
134: /* MatView(P, PETSC_VIEWER_STDOUT_WORLD); */
136: /* Get R = P^T */
137: MatTranspose(P,MAT_INITIAL_MATRIX,&R);
138: /* if (!rank) printf("R:\n"); */
139: /* MatView(R, PETSC_VIEWER_STDOUT_WORLD); */
141: /* C = R*A*P */
142: MatMatMatMult(R,A,P,MAT_INITIAL_MATRIX,fill,&C);
143: MatMatMatMult(R,A,P,MAT_REUSE_MATRIX,fill,&C);
145: /* Test C == PtAP */
146: MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&PtAP);
147: MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&PtAP);
148: MatEqual(C,PtAP,&flg);
149: if (!flg) printf("RAP != PtAP\n");
150: MatDestroy(&PtAP);
152: /* Clean up */
153: MatDestroy(&A);
154: PetscRandomDestroy(&rdm);
155: DMDestroy(&user.fine.da);
156: DMDestroy(&user.coarse.da);
157: MatDestroy(&P);
158: MatDestroy(&R);
159: MatDestroy(&C);
160: PetscFinalize();
161: return 0;
162: }