Actual source code: matelem.cxx
petsc-3.13.2 2020-06-02
1: #include <../src/mat/impls/elemental/matelemimpl.h>
3: /*
4: The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5: is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6: */
7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
9: /*@C
10: PetscElementalInitializePackage - Initialize Elemental package
12: Logically Collective
14: Level: developer
16: .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
17: @*/
18: PetscErrorCode PetscElementalInitializePackage(void)
19: {
23: if (El::Initialized()) return(0);
24: El::Initialize(); /* called by the 1st call of MatCreate_Elemental */
25: PetscRegisterFinalize(PetscElementalFinalizePackage);
26: return(0);
27: }
29: /*@C
30: PetscElementalFinalizePackage - Finalize Elemental package
32: Logically Collective
34: Level: developer
36: .seealso: MATELEMENTAL, PetscElementalInitializePackage()
37: @*/
38: PetscErrorCode PetscElementalFinalizePackage(void)
39: {
41: El::Finalize(); /* called by PetscFinalize() */
42: return(0);
43: }
45: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
46: {
48: Mat_Elemental *a = (Mat_Elemental*)A->data;
49: PetscBool iascii;
52: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
53: if (iascii) {
54: PetscViewerFormat format;
55: PetscViewerGetFormat(viewer,&format);
56: if (format == PETSC_VIEWER_ASCII_INFO) {
57: /* call elemental viewing function */
58: PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
59: PetscViewerASCIIPrintf(viewer," allocated entries=%d\n",(*a->emat).AllocatedMemory());
60: PetscViewerASCIIPrintf(viewer," grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
61: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
62: /* call elemental viewing function */
63: PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
64: }
66: } else if (format == PETSC_VIEWER_DEFAULT) {
67: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
68: El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
69: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
70: if (A->factortype == MAT_FACTOR_NONE){
71: Mat Adense;
72: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
73: MatView(Adense,viewer);
74: MatDestroy(&Adense);
75: }
76: } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
77: } else {
78: /* convert to dense format and call MatView() */
79: Mat Adense;
80: MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
81: MatView(Adense,viewer);
82: MatDestroy(&Adense);
83: }
84: return(0);
85: }
87: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
88: {
89: Mat_Elemental *a = (Mat_Elemental*)A->data;
92: info->block_size = 1.0;
94: if (flag == MAT_LOCAL) {
95: info->nz_allocated = (*a->emat).AllocatedMemory(); /* locally allocated */
96: info->nz_used = info->nz_allocated;
97: } else if (flag == MAT_GLOBAL_MAX) {
98: //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
99: /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
100: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
101: } else if (flag == MAT_GLOBAL_SUM) {
102: //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
103: info->nz_allocated = (*a->emat).AllocatedMemory(); /* locally allocated */
104: info->nz_used = info->nz_allocated; /* assume Elemental does accurate allocation */
105: //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
106: //PetscPrintf(PETSC_COMM_SELF," ... [%d] locally allocated %g\n",rank,info->nz_allocated);
107: }
109: info->nz_unneeded = 0.0;
110: info->assemblies = A->num_ass;
111: info->mallocs = 0;
112: info->memory = ((PetscObject)A)->mem;
113: info->fill_ratio_given = 0; /* determined by Elemental */
114: info->fill_ratio_needed = 0;
115: info->factor_mallocs = 0;
116: return(0);
117: }
119: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
120: {
121: Mat_Elemental *a = (Mat_Elemental*)A->data;
124: switch (op) {
125: case MAT_NEW_NONZERO_LOCATIONS:
126: case MAT_NEW_NONZERO_LOCATION_ERR:
127: case MAT_NEW_NONZERO_ALLOCATION_ERR:
128: case MAT_SYMMETRIC:
129: case MAT_SORTED_FULL:
130: case MAT_HERMITIAN:
131: break;
132: case MAT_ROW_ORIENTED:
133: a->roworiented = flg;
134: break;
135: default:
136: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
137: }
138: return(0);
139: }
141: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
142: {
143: Mat_Elemental *a = (Mat_Elemental*)A->data;
144: PetscInt i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
147: // TODO: Initialize matrix to all zeros?
149: // Count the number of queues from this process
150: if (a->roworiented) {
151: for (i=0; i<nr; i++) {
152: if (rows[i] < 0) continue;
153: P2RO(A,0,rows[i],&rrank,&ridx);
154: RO2E(A,0,rrank,ridx,&erow);
155: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
156: for (j=0; j<nc; j++) {
157: if (cols[j] < 0) continue;
158: P2RO(A,1,cols[j],&crank,&cidx);
159: RO2E(A,1,crank,cidx,&ecol);
160: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
161: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
162: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
163: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
164: ++numQueues;
165: continue;
166: }
167: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
168: switch (imode) {
169: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
170: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
171: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
172: }
173: }
174: }
176: /* printf("numQueues=%d\n",numQueues); */
177: a->emat->Reserve( numQueues );
178: for (i=0; i<nr; i++) {
179: if (rows[i] < 0) continue;
180: P2RO(A,0,rows[i],&rrank,&ridx);
181: RO2E(A,0,rrank,ridx,&erow);
182: for (j=0; j<nc; j++) {
183: if (cols[j] < 0) continue;
184: P2RO(A,1,cols[j],&crank,&cidx);
185: RO2E(A,1,crank,cidx,&ecol);
186: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
187: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
188: a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
189: }
190: }
191: }
192: } else { /* columnoriented */
193: for (j=0; j<nc; j++) {
194: if (cols[j] < 0) continue;
195: P2RO(A,1,cols[j],&crank,&cidx);
196: RO2E(A,1,crank,cidx,&ecol);
197: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
198: for (i=0; i<nr; i++) {
199: if (rows[i] < 0) continue;
200: P2RO(A,0,rows[i],&rrank,&ridx);
201: RO2E(A,0,rrank,ridx,&erow);
202: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
203: if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
204: /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
205: if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
206: ++numQueues;
207: continue;
208: }
209: /* printf("Locally updating (%d,%d)\n",erow,ecol); */
210: switch (imode) {
211: case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
212: case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
213: default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
214: }
215: }
216: }
218: /* printf("numQueues=%d\n",numQueues); */
219: a->emat->Reserve( numQueues );
220: for (j=0; j<nc; j++) {
221: if (cols[j] < 0) continue;
222: P2RO(A,1,cols[j],&crank,&cidx);
223: RO2E(A,1,crank,cidx,&ecol);
225: for (i=0; i<nr; i++) {
226: if (rows[i] < 0) continue;
227: P2RO(A,0,rows[i],&rrank,&ridx);
228: RO2E(A,0,rrank,ridx,&erow);
229: if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
230: /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
231: a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
232: }
233: }
234: }
235: }
236: return(0);
237: }
239: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
240: {
241: Mat_Elemental *a = (Mat_Elemental*)A->data;
242: PetscErrorCode ierr;
243: const PetscElemScalar *x;
244: PetscElemScalar *y;
245: PetscElemScalar one = 1,zero = 0;
248: VecGetArrayRead(X,(const PetscScalar **)&x);
249: VecGetArray(Y,(PetscScalar **)&y);
250: { /* Scoping so that constructor is called before pointer is returned */
251: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
252: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
253: ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
254: El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
255: }
256: VecRestoreArrayRead(X,(const PetscScalar **)&x);
257: VecRestoreArray(Y,(PetscScalar **)&y);
258: return(0);
259: }
261: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
262: {
263: Mat_Elemental *a = (Mat_Elemental*)A->data;
264: PetscErrorCode ierr;
265: const PetscElemScalar *x;
266: PetscElemScalar *y;
267: PetscElemScalar one = 1,zero = 0;
270: VecGetArrayRead(X,(const PetscScalar **)&x);
271: VecGetArray(Y,(PetscScalar **)&y);
272: { /* Scoping so that constructor is called before pointer is returned */
273: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
274: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
275: ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
276: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
277: }
278: VecRestoreArrayRead(X,(const PetscScalar **)&x);
279: VecRestoreArray(Y,(PetscScalar **)&y);
280: return(0);
281: }
283: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
284: {
285: Mat_Elemental *a = (Mat_Elemental*)A->data;
286: PetscErrorCode ierr;
287: const PetscElemScalar *x;
288: PetscElemScalar *z;
289: PetscElemScalar one = 1;
292: if (Y != Z) {VecCopy(Y,Z);}
293: VecGetArrayRead(X,(const PetscScalar **)&x);
294: VecGetArray(Z,(PetscScalar **)&z);
295: { /* Scoping so that constructor is called before pointer is returned */
296: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
297: xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
298: ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
299: El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
300: }
301: VecRestoreArrayRead(X,(const PetscScalar **)&x);
302: VecRestoreArray(Z,(PetscScalar **)&z);
303: return(0);
304: }
306: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
307: {
308: Mat_Elemental *a = (Mat_Elemental*)A->data;
309: PetscErrorCode ierr;
310: const PetscElemScalar *x;
311: PetscElemScalar *z;
312: PetscElemScalar one = 1;
315: if (Y != Z) {VecCopy(Y,Z);}
316: VecGetArrayRead(X,(const PetscScalar **)&x);
317: VecGetArray(Z,(PetscScalar **)&z);
318: { /* Scoping so that constructor is called before pointer is returned */
319: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
320: xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
321: ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
322: El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
323: }
324: VecRestoreArrayRead(X,(const PetscScalar **)&x);
325: VecRestoreArray(Z,(PetscScalar **)&z);
326: return(0);
327: }
329: PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
330: {
331: Mat_Elemental *a = (Mat_Elemental*)A->data;
332: Mat_Elemental *b = (Mat_Elemental*)B->data;
333: Mat_Elemental *c = (Mat_Elemental*)C->data;
334: PetscElemScalar one = 1,zero = 0;
337: { /* Scoping so that constructor is called before pointer is returned */
338: El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
339: }
340: C->assembled = PETSC_TRUE;
341: return(0);
342: }
344: PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
345: {
349: MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
350: MatSetType(Ce,MATELEMENTAL);
351: MatSetUp(Ce);
352: Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
353: return(0);
354: }
356: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
357: {
358: Mat_Elemental *a = (Mat_Elemental*)A->data;
359: Mat_Elemental *b = (Mat_Elemental*)B->data;
360: Mat_Elemental *c = (Mat_Elemental*)C->data;
361: PetscElemScalar one = 1,zero = 0;
364: { /* Scoping so that constructor is called before pointer is returned */
365: El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
366: }
367: C->assembled = PETSC_TRUE;
368: return(0);
369: }
371: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
372: {
376: MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
377: MatSetType(C,MATELEMENTAL);
378: MatSetUp(C);
379: return(0);
380: }
382: /* --------------------------------------- */
383: static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
384: {
386: C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
387: C->ops->productsymbolic = MatProductSymbolic_AB;
388: return(0);
389: }
391: static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
392: {
394: C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
395: C->ops->productsymbolic = MatProductSymbolic_ABt;
396: return(0);
397: }
399: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
400: {
402: Mat_Product *product = C->product;
405: switch (product->type) {
406: case MATPRODUCT_AB:
407: MatProductSetFromOptions_Elemental_AB(C);
408: break;
409: case MATPRODUCT_ABt:
410: MatProductSetFromOptions_Elemental_ABt(C);
411: break;
412: default: SETERRQ1(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"MatProduct type %s is not supported for Elemental and Elemental matrices",MatProductTypes[product->type]);
413: }
414: return(0);
415: }
416: /* --------------------------------------- */
418: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
419: {
420: PetscInt i,nrows,ncols,nD,rrank,ridx,crank,cidx;
421: Mat_Elemental *a = (Mat_Elemental*)A->data;
422: PetscErrorCode ierr;
423: PetscElemScalar v;
424: MPI_Comm comm;
427: PetscObjectGetComm((PetscObject)A,&comm);
428: MatGetSize(A,&nrows,&ncols);
429: nD = nrows>ncols ? ncols : nrows;
430: for (i=0; i<nD; i++) {
431: PetscInt erow,ecol;
432: P2RO(A,0,i,&rrank,&ridx);
433: RO2E(A,0,rrank,ridx,&erow);
434: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
435: P2RO(A,1,i,&crank,&cidx);
436: RO2E(A,1,crank,cidx,&ecol);
437: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
438: v = a->emat->Get(erow,ecol);
439: VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
440: }
441: VecAssemblyBegin(D);
442: VecAssemblyEnd(D);
443: return(0);
444: }
446: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
447: {
448: Mat_Elemental *x = (Mat_Elemental*)X->data;
449: const PetscElemScalar *d;
450: PetscErrorCode ierr;
453: if (R) {
454: VecGetArrayRead(R,(const PetscScalar **)&d);
455: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
456: de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
457: El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
458: VecRestoreArrayRead(R,(const PetscScalar **)&d);
459: }
460: if (L) {
461: VecGetArrayRead(L,(const PetscScalar **)&d);
462: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
463: de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
464: El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
465: VecRestoreArrayRead(L,(const PetscScalar **)&d);
466: }
467: return(0);
468: }
470: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
471: {
473: *missing = PETSC_FALSE;
474: return(0);
475: }
477: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
478: {
479: Mat_Elemental *x = (Mat_Elemental*)X->data;
482: El::Scale((PetscElemScalar)a,*x->emat);
483: return(0);
484: }
486: /*
487: MatAXPY - Computes Y = a*X + Y.
488: */
489: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
490: {
491: Mat_Elemental *x = (Mat_Elemental*)X->data;
492: Mat_Elemental *y = (Mat_Elemental*)Y->data;
496: El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
497: PetscObjectStateIncrease((PetscObject)Y);
498: return(0);
499: }
501: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
502: {
503: Mat_Elemental *a=(Mat_Elemental*)A->data;
504: Mat_Elemental *b=(Mat_Elemental*)B->data;
508: El::Copy(*a->emat,*b->emat);
509: PetscObjectStateIncrease((PetscObject)B);
510: return(0);
511: }
513: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
514: {
515: Mat Be;
516: MPI_Comm comm;
517: Mat_Elemental *a=(Mat_Elemental*)A->data;
521: PetscObjectGetComm((PetscObject)A,&comm);
522: MatCreate(comm,&Be);
523: MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
524: MatSetType(Be,MATELEMENTAL);
525: MatSetUp(Be);
526: *B = Be;
527: if (op == MAT_COPY_VALUES) {
528: Mat_Elemental *b=(Mat_Elemental*)Be->data;
529: El::Copy(*a->emat,*b->emat);
530: }
531: Be->assembled = PETSC_TRUE;
532: return(0);
533: }
535: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
536: {
537: Mat Be = *B;
539: MPI_Comm comm;
540: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
543: PetscObjectGetComm((PetscObject)A,&comm);
544: /* Only out-of-place supported */
545: if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
546: if (reuse == MAT_INITIAL_MATRIX) {
547: MatCreate(comm,&Be);
548: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
549: MatSetType(Be,MATELEMENTAL);
550: MatSetUp(Be);
551: *B = Be;
552: }
553: b = (Mat_Elemental*)Be->data;
554: El::Transpose(*a->emat,*b->emat);
555: Be->assembled = PETSC_TRUE;
556: return(0);
557: }
559: static PetscErrorCode MatConjugate_Elemental(Mat A)
560: {
561: Mat_Elemental *a = (Mat_Elemental*)A->data;
564: El::Conjugate(*a->emat);
565: return(0);
566: }
568: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
569: {
570: Mat Be = *B;
572: MPI_Comm comm;
573: Mat_Elemental *a = (Mat_Elemental*)A->data, *b;
576: PetscObjectGetComm((PetscObject)A,&comm);
577: /* Only out-of-place supported */
578: if (reuse == MAT_INITIAL_MATRIX){
579: MatCreate(comm,&Be);
580: MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
581: MatSetType(Be,MATELEMENTAL);
582: MatSetUp(Be);
583: *B = Be;
584: }
585: b = (Mat_Elemental*)Be->data;
586: El::Adjoint(*a->emat,*b->emat);
587: Be->assembled = PETSC_TRUE;
588: return(0);
589: }
591: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
592: {
593: Mat_Elemental *a = (Mat_Elemental*)A->data;
594: PetscErrorCode ierr;
595: PetscElemScalar *x;
596: PetscInt pivoting = a->pivoting;
599: VecCopy(B,X);
600: VecGetArray(X,(PetscScalar **)&x);
602: El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
603: xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
604: El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
605: switch (A->factortype) {
606: case MAT_FACTOR_LU:
607: if (pivoting == 0) {
608: El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
609: } else if (pivoting == 1) {
610: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
611: } else { /* pivoting == 2 */
612: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
613: }
614: break;
615: case MAT_FACTOR_CHOLESKY:
616: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
617: break;
618: default:
619: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
620: break;
621: }
622: El::Copy(xer,xe);
624: VecRestoreArray(X,(PetscScalar **)&x);
625: return(0);
626: }
628: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
629: {
630: PetscErrorCode ierr;
633: MatSolve_Elemental(A,B,X);
634: VecAXPY(X,1,Y);
635: return(0);
636: }
638: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
639: {
640: Mat_Elemental *a=(Mat_Elemental*)A->data;
641: Mat_Elemental *b=(Mat_Elemental*)B->data;
642: Mat_Elemental *x=(Mat_Elemental*)X->data;
643: PetscInt pivoting = a->pivoting;
646: El::Copy(*b->emat,*x->emat);
647: switch (A->factortype) {
648: case MAT_FACTOR_LU:
649: if (pivoting == 0) {
650: El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
651: } else if (pivoting == 1) {
652: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
653: } else {
654: El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
655: }
656: break;
657: case MAT_FACTOR_CHOLESKY:
658: El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
659: break;
660: default:
661: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
662: break;
663: }
664: return(0);
665: }
667: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
668: {
669: Mat_Elemental *a = (Mat_Elemental*)A->data;
671: PetscInt pivoting = a->pivoting;
674: if (pivoting == 0) {
675: El::LU(*a->emat);
676: } else if (pivoting == 1) {
677: El::LU(*a->emat,*a->P);
678: } else {
679: El::LU(*a->emat,*a->P,*a->Q);
680: }
681: A->factortype = MAT_FACTOR_LU;
682: A->assembled = PETSC_TRUE;
684: PetscFree(A->solvertype);
685: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
686: return(0);
687: }
689: static PetscErrorCode MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
690: {
694: MatCopy(A,F,SAME_NONZERO_PATTERN);
695: MatLUFactor_Elemental(F,0,0,info);
696: return(0);
697: }
699: static PetscErrorCode MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
700: {
702: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
703: return(0);
704: }
706: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
707: {
708: Mat_Elemental *a = (Mat_Elemental*)A->data;
709: El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
713: El::Cholesky(El::UPPER,*a->emat);
714: A->factortype = MAT_FACTOR_CHOLESKY;
715: A->assembled = PETSC_TRUE;
717: PetscFree(A->solvertype);
718: PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
719: return(0);
720: }
722: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
723: {
727: MatCopy(A,F,SAME_NONZERO_PATTERN);
728: MatCholeskyFactor_Elemental(F,0,info);
729: return(0);
730: }
732: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
733: {
735: /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
736: return(0);
737: }
739: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
740: {
742: *type = MATSOLVERELEMENTAL;
743: return(0);
744: }
746: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
747: {
748: Mat B;
752: /* Create the factorization matrix */
753: MatCreate(PetscObjectComm((PetscObject)A),&B);
754: MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
755: MatSetType(B,MATELEMENTAL);
756: MatSetUp(B);
757: B->factortype = ftype;
758: PetscFree(B->solvertype);
759: PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);
761: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
762: *F = B;
763: return(0);
764: }
766: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
767: {
771: MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
772: MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL, MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
773: return(0);
774: }
776: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
777: {
778: Mat_Elemental *a=(Mat_Elemental*)A->data;
781: switch (type){
782: case NORM_1:
783: *nrm = El::OneNorm(*a->emat);
784: break;
785: case NORM_FROBENIUS:
786: *nrm = El::FrobeniusNorm(*a->emat);
787: break;
788: case NORM_INFINITY:
789: *nrm = El::InfinityNorm(*a->emat);
790: break;
791: default:
792: printf("Error: unsupported norm type!\n");
793: }
794: return(0);
795: }
797: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
798: {
799: Mat_Elemental *a=(Mat_Elemental*)A->data;
802: El::Zero(*a->emat);
803: return(0);
804: }
806: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
807: {
808: Mat_Elemental *a = (Mat_Elemental*)A->data;
810: PetscInt i,m,shift,stride,*idx;
813: if (rows) {
814: m = a->emat->LocalHeight();
815: shift = a->emat->ColShift();
816: stride = a->emat->ColStride();
817: PetscMalloc1(m,&idx);
818: for (i=0; i<m; i++) {
819: PetscInt rank,offset;
820: E2RO(A,0,shift+i*stride,&rank,&offset);
821: RO2P(A,0,rank,offset,&idx[i]);
822: }
823: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
824: }
825: if (cols) {
826: m = a->emat->LocalWidth();
827: shift = a->emat->RowShift();
828: stride = a->emat->RowStride();
829: PetscMalloc1(m,&idx);
830: for (i=0; i<m; i++) {
831: PetscInt rank,offset;
832: E2RO(A,1,shift+i*stride,&rank,&offset);
833: RO2P(A,1,rank,offset,&idx[i]);
834: }
835: ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
836: }
837: return(0);
838: }
840: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
841: {
842: Mat Bmpi;
843: Mat_Elemental *a = (Mat_Elemental*)A->data;
844: MPI_Comm comm;
845: PetscErrorCode ierr;
846: IS isrows,iscols;
847: PetscInt rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
848: const PetscInt *rows,*cols;
849: PetscElemScalar v;
850: const El::Grid &grid = a->emat->Grid();
853: PetscObjectGetComm((PetscObject)A,&comm);
855: if (reuse == MAT_REUSE_MATRIX) {
856: Bmpi = *B;
857: } else {
858: MatCreate(comm,&Bmpi);
859: MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
860: MatSetType(Bmpi,MATDENSE);
861: MatSetUp(Bmpi);
862: }
864: /* Get local entries of A */
865: MatGetOwnershipIS(A,&isrows,&iscols);
866: ISGetLocalSize(isrows,&nrows);
867: ISGetIndices(isrows,&rows);
868: ISGetLocalSize(iscols,&ncols);
869: ISGetIndices(iscols,&cols);
871: if (a->roworiented) {
872: for (i=0; i<nrows; i++) {
873: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
874: RO2E(A,0,rrank,ridx,&erow);
875: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
876: for (j=0; j<ncols; j++) {
877: P2RO(A,1,cols[j],&crank,&cidx);
878: RO2E(A,1,crank,cidx,&ecol);
879: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
881: elrow = erow / grid.MCSize(); /* Elemental local row index */
882: elcol = ecol / grid.MRSize(); /* Elemental local column index */
883: v = a->emat->GetLocal(elrow,elcol);
884: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
885: }
886: }
887: } else { /* column-oriented */
888: for (j=0; j<ncols; j++) {
889: P2RO(A,1,cols[j],&crank,&cidx);
890: RO2E(A,1,crank,cidx,&ecol);
891: if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
892: for (i=0; i<nrows; i++) {
893: P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
894: RO2E(A,0,rrank,ridx,&erow);
895: if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
897: elrow = erow / grid.MCSize(); /* Elemental local row index */
898: elcol = ecol / grid.MRSize(); /* Elemental local column index */
899: v = a->emat->GetLocal(elrow,elcol);
900: MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
901: }
902: }
903: }
904: MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
905: MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
906: if (reuse == MAT_INPLACE_MATRIX) {
907: MatHeaderReplace(A,&Bmpi);
908: } else {
909: *B = Bmpi;
910: }
911: ISDestroy(&isrows);
912: ISDestroy(&iscols);
913: return(0);
914: }
916: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
917: {
918: Mat mat_elemental;
919: PetscErrorCode ierr;
920: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols;
921: const PetscInt *cols;
922: const PetscScalar *vals;
925: if (reuse == MAT_REUSE_MATRIX) {
926: mat_elemental = *newmat;
927: MatZeroEntries(mat_elemental);
928: } else {
929: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
930: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
931: MatSetType(mat_elemental,MATELEMENTAL);
932: MatSetUp(mat_elemental);
933: }
934: for (row=0; row<M; row++) {
935: MatGetRow(A,row,&ncols,&cols,&vals);
936: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
937: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
938: MatRestoreRow(A,row,&ncols,&cols,&vals);
939: }
940: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
941: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
943: if (reuse == MAT_INPLACE_MATRIX) {
944: MatHeaderReplace(A,&mat_elemental);
945: } else {
946: *newmat = mat_elemental;
947: }
948: return(0);
949: }
951: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
952: {
953: Mat mat_elemental;
954: PetscErrorCode ierr;
955: PetscInt row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
956: const PetscInt *cols;
957: const PetscScalar *vals;
960: if (reuse == MAT_REUSE_MATRIX) {
961: mat_elemental = *newmat;
962: MatZeroEntries(mat_elemental);
963: } else {
964: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
965: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
966: MatSetType(mat_elemental,MATELEMENTAL);
967: MatSetUp(mat_elemental);
968: }
969: for (row=rstart; row<rend; row++) {
970: MatGetRow(A,row,&ncols,&cols,&vals);
971: for (j=0; j<ncols; j++) {
972: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
973: MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
974: }
975: MatRestoreRow(A,row,&ncols,&cols,&vals);
976: }
977: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
978: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
980: if (reuse == MAT_INPLACE_MATRIX) {
981: MatHeaderReplace(A,&mat_elemental);
982: } else {
983: *newmat = mat_elemental;
984: }
985: return(0);
986: }
988: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
989: {
990: Mat mat_elemental;
991: PetscErrorCode ierr;
992: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j;
993: const PetscInt *cols;
994: const PetscScalar *vals;
997: if (reuse == MAT_REUSE_MATRIX) {
998: mat_elemental = *newmat;
999: MatZeroEntries(mat_elemental);
1000: } else {
1001: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1002: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1003: MatSetType(mat_elemental,MATELEMENTAL);
1004: MatSetUp(mat_elemental);
1005: }
1006: MatGetRowUpperTriangular(A);
1007: for (row=0; row<M; row++) {
1008: MatGetRow(A,row,&ncols,&cols,&vals);
1009: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1010: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1011: for (j=0; j<ncols; j++) { /* lower triangular part */
1012: PetscScalar v;
1013: if (cols[j] == row) continue;
1014: v = A->hermitian ? PetscConj(vals[j]) : vals[j];
1015: MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1016: }
1017: MatRestoreRow(A,row,&ncols,&cols,&vals);
1018: }
1019: MatRestoreRowUpperTriangular(A);
1020: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1021: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1023: if (reuse == MAT_INPLACE_MATRIX) {
1024: MatHeaderReplace(A,&mat_elemental);
1025: } else {
1026: *newmat = mat_elemental;
1027: }
1028: return(0);
1029: }
1031: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1032: {
1033: Mat mat_elemental;
1034: PetscErrorCode ierr;
1035: PetscInt M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1036: const PetscInt *cols;
1037: const PetscScalar *vals;
1040: if (reuse == MAT_REUSE_MATRIX) {
1041: mat_elemental = *newmat;
1042: MatZeroEntries(mat_elemental);
1043: } else {
1044: MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1045: MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1046: MatSetType(mat_elemental,MATELEMENTAL);
1047: MatSetUp(mat_elemental);
1048: }
1049: MatGetRowUpperTriangular(A);
1050: for (row=rstart; row<rend; row++) {
1051: MatGetRow(A,row,&ncols,&cols,&vals);
1052: /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1053: MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1054: for (j=0; j<ncols; j++) { /* lower triangular part */
1055: PetscScalar v;
1056: if (cols[j] == row) continue;
1057: v = A->hermitian ? PetscConj(vals[j]) : vals[j];
1058: MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1059: }
1060: MatRestoreRow(A,row,&ncols,&cols,&vals);
1061: }
1062: MatRestoreRowUpperTriangular(A);
1063: MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1064: MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);
1066: if (reuse == MAT_INPLACE_MATRIX) {
1067: MatHeaderReplace(A,&mat_elemental);
1068: } else {
1069: *newmat = mat_elemental;
1070: }
1071: return(0);
1072: }
1074: static PetscErrorCode MatDestroy_Elemental(Mat A)
1075: {
1076: Mat_Elemental *a = (Mat_Elemental*)A->data;
1077: PetscErrorCode ierr;
1078: Mat_Elemental_Grid *commgrid;
1079: PetscBool flg;
1080: MPI_Comm icomm;
1083: delete a->emat;
1084: delete a->P;
1085: delete a->Q;
1087: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1088: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1089: MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1090: if (--commgrid->grid_refct == 0) {
1091: delete commgrid->grid;
1092: PetscFree(commgrid);
1093: MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1094: }
1095: PetscCommDestroy(&icomm);
1096: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1097: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1098: PetscFree(A->data);
1099: return(0);
1100: }
1102: PetscErrorCode MatSetUp_Elemental(Mat A)
1103: {
1104: Mat_Elemental *a = (Mat_Elemental*)A->data;
1106: MPI_Comm comm;
1107: PetscMPIInt rsize,csize;
1108: PetscInt n;
1111: PetscLayoutSetUp(A->rmap);
1112: PetscLayoutSetUp(A->cmap);
1114: /* Check if local row and clomun sizes are equally distributed.
1115: Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1116: exactly. The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1117: PetscSplitOwnership(comm,&n,&N)), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1118: PetscObjectGetComm((PetscObject)A,&comm);
1119: n = PETSC_DECIDE;
1120: PetscSplitOwnership(comm,&n,&A->rmap->N);
1121: if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);
1123: n = PETSC_DECIDE;
1124: PetscSplitOwnership(comm,&n,&A->cmap->N);
1125: if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);
1127: a->emat->Resize(A->rmap->N,A->cmap->N);
1128: El::Zero(*a->emat);
1130: MPI_Comm_size(A->rmap->comm,&rsize);
1131: MPI_Comm_size(A->cmap->comm,&csize);
1132: if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1133: a->commsize = rsize;
1134: a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1135: a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1136: a->m[0] = A->rmap->N / rsize + (a->mr[0] != rsize);
1137: a->m[1] = A->cmap->N / csize + (a->mr[1] != csize);
1138: return(0);
1139: }
1141: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1142: {
1143: Mat_Elemental *a = (Mat_Elemental*)A->data;
1146: /* printf("Calling ProcessQueues\n"); */
1147: a->emat->ProcessQueues();
1148: /* printf("Finished ProcessQueues\n"); */
1149: return(0);
1150: }
1152: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1153: {
1155: /* Currently does nothing */
1156: return(0);
1157: }
1159: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1160: {
1162: Mat Adense,Ae;
1163: MPI_Comm comm;
1166: PetscObjectGetComm((PetscObject)newMat,&comm);
1167: MatCreate(comm,&Adense);
1168: MatSetType(Adense,MATDENSE);
1169: MatLoad(Adense,viewer);
1170: MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1171: MatDestroy(&Adense);
1172: MatHeaderReplace(newMat,&Ae);
1173: return(0);
1174: }
1176: /* -------------------------------------------------------------------*/
1177: static struct _MatOps MatOps_Values = {
1178: MatSetValues_Elemental,
1179: 0,
1180: 0,
1181: MatMult_Elemental,
1182: /* 4*/ MatMultAdd_Elemental,
1183: MatMultTranspose_Elemental,
1184: MatMultTransposeAdd_Elemental,
1185: MatSolve_Elemental,
1186: MatSolveAdd_Elemental,
1187: 0,
1188: /*10*/ 0,
1189: MatLUFactor_Elemental,
1190: MatCholeskyFactor_Elemental,
1191: 0,
1192: MatTranspose_Elemental,
1193: /*15*/ MatGetInfo_Elemental,
1194: 0,
1195: MatGetDiagonal_Elemental,
1196: MatDiagonalScale_Elemental,
1197: MatNorm_Elemental,
1198: /*20*/ MatAssemblyBegin_Elemental,
1199: MatAssemblyEnd_Elemental,
1200: MatSetOption_Elemental,
1201: MatZeroEntries_Elemental,
1202: /*24*/ 0,
1203: MatLUFactorSymbolic_Elemental,
1204: MatLUFactorNumeric_Elemental,
1205: MatCholeskyFactorSymbolic_Elemental,
1206: MatCholeskyFactorNumeric_Elemental,
1207: /*29*/ MatSetUp_Elemental,
1208: 0,
1209: 0,
1210: 0,
1211: 0,
1212: /*34*/ MatDuplicate_Elemental,
1213: 0,
1214: 0,
1215: 0,
1216: 0,
1217: /*39*/ MatAXPY_Elemental,
1218: 0,
1219: 0,
1220: 0,
1221: MatCopy_Elemental,
1222: /*44*/ 0,
1223: MatScale_Elemental,
1224: MatShift_Basic,
1225: 0,
1226: 0,
1227: /*49*/ 0,
1228: 0,
1229: 0,
1230: 0,
1231: 0,
1232: /*54*/ 0,
1233: 0,
1234: 0,
1235: 0,
1236: 0,
1237: /*59*/ 0,
1238: MatDestroy_Elemental,
1239: MatView_Elemental,
1240: 0,
1241: 0,
1242: /*64*/ 0,
1243: 0,
1244: 0,
1245: 0,
1246: 0,
1247: /*69*/ 0,
1248: 0,
1249: MatConvert_Elemental_Dense,
1250: 0,
1251: 0,
1252: /*74*/ 0,
1253: 0,
1254: 0,
1255: 0,
1256: 0,
1257: /*79*/ 0,
1258: 0,
1259: 0,
1260: 0,
1261: MatLoad_Elemental,
1262: /*84*/ 0,
1263: 0,
1264: 0,
1265: 0,
1266: 0,
1267: /*89*/ 0,
1268: 0,
1269: MatMatMultNumeric_Elemental,
1270: 0,
1271: 0,
1272: /*94*/ 0,
1273: 0,
1274: 0,
1275: MatMatTransposeMultNumeric_Elemental,
1276: 0,
1277: /*99*/ MatProductSetFromOptions_Elemental,
1278: 0,
1279: 0,
1280: MatConjugate_Elemental,
1281: 0,
1282: /*104*/0,
1283: 0,
1284: 0,
1285: 0,
1286: 0,
1287: /*109*/MatMatSolve_Elemental,
1288: 0,
1289: 0,
1290: 0,
1291: MatMissingDiagonal_Elemental,
1292: /*114*/0,
1293: 0,
1294: 0,
1295: 0,
1296: 0,
1297: /*119*/0,
1298: MatHermitianTranspose_Elemental,
1299: 0,
1300: 0,
1301: 0,
1302: /*124*/0,
1303: 0,
1304: 0,
1305: 0,
1306: 0,
1307: /*129*/0,
1308: 0,
1309: 0,
1310: 0,
1311: 0,
1312: /*134*/0,
1313: 0,
1314: 0,
1315: 0,
1316: 0,
1317: 0,
1318: /*140*/0,
1319: 0,
1320: 0,
1321: 0,
1322: 0,
1323: /*145*/0,
1324: 0,
1325: 0
1326: };
1328: /*MC
1329: MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1331: Use ./configure --download-elemental to install PETSc to use Elemental
1333: Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver
1335: Options Database Keys:
1336: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1337: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1339: Level: beginner
1341: .seealso: MATDENSE
1342: M*/
1344: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1345: {
1346: Mat_Elemental *a;
1347: PetscErrorCode ierr;
1348: PetscBool flg,flg1;
1349: Mat_Elemental_Grid *commgrid;
1350: MPI_Comm icomm;
1351: PetscInt optv1;
1354: PetscElementalInitializePackage();
1355: PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1356: A->insertmode = NOT_SET_VALUES;
1358: PetscNewLog(A,&a);
1359: A->data = (void*)a;
1361: /* Set up the elemental matrix */
1362: El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1364: /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1365: if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1366: MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1367: }
1368: PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1369: MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1370: if (!flg) {
1371: PetscNewLog(A,&commgrid);
1373: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1374: /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1375: PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1376: if (flg1) {
1377: if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1378: SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1379: }
1380: commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1381: } else {
1382: commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1383: /* printf("new commgrid->grid = %p\n",commgrid->grid); -- memory leak revealed by valgrind? */
1384: }
1385: commgrid->grid_refct = 1;
1386: MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);
1388: a->pivoting = 1;
1389: PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);
1391: PetscOptionsEnd();
1392: } else {
1393: commgrid->grid_refct++;
1394: }
1395: PetscCommDestroy(&icomm);
1396: a->grid = commgrid->grid;
1397: a->emat = new El::DistMatrix<PetscElemScalar>(*a->grid);
1398: a->roworiented = PETSC_TRUE;
1400: PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1401: PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1402: return(0);
1403: }