Actual source code: mhypre.c
petsc-3.12.0 2019-09-29
2: /*
3: Creates hypre ijmatrix from PETSc matrix
4: */
6: #include <petscpkg_version.h>
7: #include <petsc/private/petschypre.h>
8: #include <petscmathypre.h>
9: #include <petsc/private/matimpl.h>
10: #include <../src/mat/impls/hypre/mhypre.h>
11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
12: #include <../src/vec/vec/impls/hypre/vhyp.h>
13: #include <HYPRE.h>
14: #include <HYPRE_utilities.h>
15: #include <_hypre_parcsr_ls.h>
16: #include <_hypre_sstruct_ls.h>
18: PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
20: static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
21: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
22: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
23: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat,HYPRE_IJMatrix);
24: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat,HYPRE_Complex,Vec,HYPRE_Complex,Vec,PetscBool);
25: static PetscErrorCode hypre_array_destroy(void*);
26: PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode ins);
28: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
29: {
31: PetscInt i,n_d,n_o;
32: const PetscInt *ia_d,*ia_o;
33: PetscBool done_d=PETSC_FALSE,done_o=PETSC_FALSE;
34: HYPRE_Int *nnz_d=NULL,*nnz_o=NULL;
37: if (A_d) { /* determine number of nonzero entries in local diagonal part */
38: MatGetRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,&n_d,&ia_d,NULL,&done_d);
39: if (done_d) {
40: PetscMalloc1(n_d,&nnz_d);
41: for (i=0; i<n_d; i++) {
42: nnz_d[i] = ia_d[i+1] - ia_d[i];
43: }
44: }
45: MatRestoreRowIJ(A_d,0,PETSC_FALSE,PETSC_FALSE,NULL,&ia_d,NULL,&done_d);
46: }
47: if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
48: MatGetRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
49: if (done_o) {
50: PetscMalloc1(n_o,&nnz_o);
51: for (i=0; i<n_o; i++) {
52: nnz_o[i] = ia_o[i+1] - ia_o[i];
53: }
54: }
55: MatRestoreRowIJ(A_o,0,PETSC_FALSE,PETSC_FALSE,&n_o,&ia_o,NULL,&done_o);
56: }
57: if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */
58: if (!done_o) { /* only diagonal part */
59: PetscMalloc1(n_d,&nnz_o);
60: for (i=0; i<n_d; i++) {
61: nnz_o[i] = 0;
62: }
63: }
64: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
65: { /* If we don't do this, the columns of the matrix will be all zeros! */
66: hypre_AuxParCSRMatrix *aux_matrix;
67: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
68: hypre_AuxParCSRMatrixDestroy(aux_matrix);
69: hypre_IJMatrixTranslator(ij) = NULL;
70: PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
71: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
72: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
73: }
74: #else
75: PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(ij,nnz_d,nnz_o));
76: #endif
77: PetscFree(nnz_d);
78: PetscFree(nnz_o);
79: }
80: return(0);
81: }
83: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
84: {
86: PetscInt rstart,rend,cstart,cend;
89: PetscLayoutSetUp(A->rmap);
90: PetscLayoutSetUp(A->cmap);
91: rstart = A->rmap->rstart;
92: rend = A->rmap->rend;
93: cstart = A->cmap->rstart;
94: cend = A->cmap->rend;
95: PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
96: PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
97: {
98: PetscBool same;
99: Mat A_d,A_o;
100: const PetscInt *colmap;
101: PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&same);
102: if (same) {
103: MatMPIAIJGetSeqAIJ(A,&A_d,&A_o,&colmap);
104: MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
105: return(0);
106: }
107: PetscObjectBaseTypeCompare((PetscObject)A,MATMPIBAIJ,&same);
108: if (same) {
109: MatMPIBAIJGetSeqBAIJ(A,&A_d,&A_o,&colmap);
110: MatHYPRE_IJMatrixPreallocate(A_d,A_o,hA->ij);
111: return(0);
112: }
113: PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&same);
114: if (same) {
115: MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
116: return(0);
117: }
118: PetscObjectBaseTypeCompare((PetscObject)A,MATSEQBAIJ,&same);
119: if (same) {
120: MatHYPRE_IJMatrixPreallocate(A,NULL,hA->ij);
121: return(0);
122: }
123: }
124: return(0);
125: }
127: static PetscErrorCode MatHYPRE_IJMatrixCopy(Mat A, HYPRE_IJMatrix ij)
128: {
129: PetscErrorCode ierr;
130: PetscInt i,rstart,rend,ncols,nr,nc;
131: const PetscScalar *values;
132: const PetscInt *cols;
133: PetscBool flg;
136: PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&flg);
137: MatGetSize(A,&nr,&nc);
138: if (flg && nr == nc) {
139: MatHYPRE_IJMatrixFastCopy_MPIAIJ(A,ij);
140: return(0);
141: }
142: PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&flg);
143: if (flg) {
144: MatHYPRE_IJMatrixFastCopy_SeqAIJ(A,ij);
145: return(0);
146: }
148: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
149: MatGetOwnershipRange(A,&rstart,&rend);
150: for (i=rstart; i<rend; i++) {
151: MatGetRow(A,i,&ncols,&cols,&values);
152: if (ncols) {
153: HYPRE_Int nc = (HYPRE_Int)ncols;
155: if ((PetscInt)nc != ncols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",ncols,i);
156: PetscStackCallStandard(HYPRE_IJMatrixSetValues,(ij,1,&nc,(HYPRE_BigInt *)&i,(HYPRE_BigInt *)cols,(HYPRE_Complex *)values));
157: }
158: MatRestoreRow(A,i,&ncols,&cols,&values);
159: }
160: return(0);
161: }
163: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
164: {
165: PetscErrorCode ierr;
166: Mat_SeqAIJ *pdiag = (Mat_SeqAIJ*)A->data;
167: HYPRE_Int type;
168: hypre_ParCSRMatrix *par_matrix;
169: hypre_AuxParCSRMatrix *aux_matrix;
170: hypre_CSRMatrix *hdiag;
171: PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
174: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
175: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
176: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
177: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
178: hdiag = hypre_ParCSRMatrixDiag(par_matrix);
179: /*
180: this is the Hack part where we monkey directly with the hypre datastructures
181: */
182: if (sameint) {
183: PetscArraycpy(hdiag->i,pdiag->i,A->rmap->n + 1);
184: PetscArraycpy(hdiag->j,pdiag->j,pdiag->nz);
185: } else {
186: PetscInt i;
188: for (i=0;i<A->rmap->n + 1;i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
189: for (i=0;i<pdiag->nz;i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
190: }
191: PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);
193: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
194: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
195: return(0);
196: }
198: static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
199: {
200: PetscErrorCode ierr;
201: Mat_MPIAIJ *pA = (Mat_MPIAIJ*)A->data;
202: Mat_SeqAIJ *pdiag,*poffd;
203: PetscInt i,*garray = pA->garray,*jj,cstart,*pjj;
204: HYPRE_Int *hjj,type;
205: hypre_ParCSRMatrix *par_matrix;
206: hypre_AuxParCSRMatrix *aux_matrix;
207: hypre_CSRMatrix *hdiag,*hoffd;
208: PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
211: pdiag = (Mat_SeqAIJ*) pA->A->data;
212: poffd = (Mat_SeqAIJ*) pA->B->data;
213: /* cstart is only valid for square MPIAIJ layed out in the usual way */
214: MatGetOwnershipRange(A,&cstart,NULL);
216: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(ij));
217: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(ij,&type));
218: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
219: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(ij,(void**)&par_matrix));
220: hdiag = hypre_ParCSRMatrixDiag(par_matrix);
221: hoffd = hypre_ParCSRMatrixOffd(par_matrix);
223: /*
224: this is the Hack part where we monkey directly with the hypre datastructures
225: */
226: if (sameint) {
227: PetscArraycpy(hdiag->i,pdiag->i,pA->A->rmap->n + 1);
228: } else {
229: for (i=0; i<pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]);
230: }
231: /* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
232: hjj = hdiag->j;
233: pjj = pdiag->j;
234: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
235: for (i=0; i<pdiag->nz; i++) hjj[i] = pjj[i];
236: #else
237: for (i=0; i<pdiag->nz; i++) hjj[i] = cstart + pjj[i];
238: #endif
239: PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);
240: if (sameint) {
241: PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);
242: } else {
243: for (i=0; i<pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]);
244: }
246: /* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
247: If we hacked a hypre a bit more we might be able to avoid this step */
248: #if PETSC_PKG_HYPRE_VERSION_GE(2,16,0)
249: PetscStackCallStandard(hypre_CSRMatrixBigInitialize,(hoffd));
250: jj = (PetscInt*) hoffd->big_j;
251: #else
252: jj = (PetscInt*) hoffd->j;
253: #endif
254: pjj = poffd->j;
255: for (i=0; i<poffd->nz; i++) jj[i] = garray[pjj[i]];
257: PetscArraycpy(hoffd->data,poffd->a,poffd->nz);
259: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
260: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
261: return(0);
262: }
264: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat* B)
265: {
266: Mat_HYPRE* mhA = (Mat_HYPRE*)(A->data);
267: Mat lA;
268: ISLocalToGlobalMapping rl2g,cl2g;
269: IS is;
270: hypre_ParCSRMatrix *hA;
271: hypre_CSRMatrix *hdiag,*hoffd;
272: MPI_Comm comm;
273: HYPRE_Complex *hdd,*hod,*aa;
274: PetscScalar *data;
275: HYPRE_BigInt *col_map_offd;
276: HYPRE_Int *hdi,*hdj,*hoi,*hoj;
277: PetscInt *ii,*jj,*iptr,*jptr;
278: PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo,M,N;
279: HYPRE_Int type;
280: PetscErrorCode ierr;
283: comm = PetscObjectComm((PetscObject)A);
284: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(mhA->ij,&type));
285: if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
286: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(mhA->ij,(void**)&hA));
287: M = hypre_ParCSRMatrixGlobalNumRows(hA);
288: N = hypre_ParCSRMatrixGlobalNumCols(hA);
289: str = hypre_ParCSRMatrixFirstRowIndex(hA);
290: stc = hypre_ParCSRMatrixFirstColDiag(hA);
291: hdiag = hypre_ParCSRMatrixDiag(hA);
292: hoffd = hypre_ParCSRMatrixOffd(hA);
293: dr = hypre_CSRMatrixNumRows(hdiag);
294: dc = hypre_CSRMatrixNumCols(hdiag);
295: nnz = hypre_CSRMatrixNumNonzeros(hdiag);
296: hdi = hypre_CSRMatrixI(hdiag);
297: hdj = hypre_CSRMatrixJ(hdiag);
298: hdd = hypre_CSRMatrixData(hdiag);
299: oc = hypre_CSRMatrixNumCols(hoffd);
300: nnz += hypre_CSRMatrixNumNonzeros(hoffd);
301: hoi = hypre_CSRMatrixI(hoffd);
302: hoj = hypre_CSRMatrixJ(hoffd);
303: hod = hypre_CSRMatrixData(hoffd);
304: if (reuse != MAT_REUSE_MATRIX) {
305: PetscInt *aux;
307: /* generate l2g maps for rows and cols */
308: ISCreateStride(comm,dr,str,1,&is);
309: ISLocalToGlobalMappingCreateIS(is,&rl2g);
310: ISDestroy(&is);
311: col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
312: PetscMalloc1(dc+oc,&aux);
313: for (i=0; i<dc; i++) aux[i] = i+stc;
314: for (i=0; i<oc; i++) aux[i+dc] = col_map_offd[i];
315: ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);
316: ISLocalToGlobalMappingCreateIS(is,&cl2g);
317: ISDestroy(&is);
318: /* create MATIS object */
319: MatCreate(comm,B);
320: MatSetSizes(*B,dr,dc,M,N);
321: MatSetType(*B,MATIS);
322: MatSetLocalToGlobalMapping(*B,rl2g,cl2g);
323: ISLocalToGlobalMappingDestroy(&rl2g);
324: ISLocalToGlobalMappingDestroy(&cl2g);
326: /* allocate CSR for local matrix */
327: PetscMalloc1(dr+1,&iptr);
328: PetscMalloc1(nnz,&jptr);
329: PetscMalloc1(nnz,&data);
330: } else {
331: PetscInt nr;
332: PetscBool done;
333: MatISGetLocalMat(*B,&lA);
334: MatGetRowIJ(lA,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&iptr,(const PetscInt**)&jptr,&done);
335: if (nr != dr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of rows in local mat! %D != %D",nr,dr);
336: if (iptr[nr] < nnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in local mat! reuse %D requested %D",iptr[nr],nnz);
337: MatSeqAIJGetArray(lA,&data);
338: }
339: /* merge local matrices */
340: ii = iptr;
341: jj = jptr;
342: aa = (HYPRE_Complex*)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
343: *ii = *(hdi++) + *(hoi++);
344: for (jd=0,jo=0,cum=0; *ii<nnz; cum++) {
345: PetscScalar *aold = (PetscScalar*)aa;
346: PetscInt *jold = jj,nc = jd+jo;
347: for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
348: for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
349: *(++ii) = *(hdi++) + *(hoi++);
350: PetscSortIntWithScalarArray(jd+jo-nc,jold,aold);
351: }
352: for (; cum<dr; cum++) *(++ii) = nnz;
353: if (reuse != MAT_REUSE_MATRIX) {
354: Mat_SeqAIJ* a;
356: MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,iptr,jptr,data,&lA);
357: MatISSetLocalMat(*B,lA);
358: /* hack SeqAIJ */
359: a = (Mat_SeqAIJ*)(lA->data);
360: a->free_a = PETSC_TRUE;
361: a->free_ij = PETSC_TRUE;
362: MatDestroy(&lA);
363: }
364: MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
365: MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
366: if (reuse == MAT_INPLACE_MATRIX) {
367: MatHeaderReplace(A,B);
368: }
369: return(0);
370: }
372: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
373: {
374: Mat M = NULL;
375: Mat_HYPRE *hB;
376: MPI_Comm comm = PetscObjectComm((PetscObject)A);
380: if (reuse == MAT_REUSE_MATRIX) {
381: /* always destroy the old matrix and create a new memory;
382: hope this does not churn the memory too much. The problem
383: is I do not know if it is possible to put the matrix back to
384: its initial state so that we can directly copy the values
385: the second time through. */
386: hB = (Mat_HYPRE*)((*B)->data);
387: PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hB->ij));
388: } else {
389: MatCreate(comm,&M);
390: MatSetType(M,MATHYPRE);
391: MatSetSizes(M,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
392: hB = (Mat_HYPRE*)(M->data);
393: if (reuse == MAT_INITIAL_MATRIX) *B = M;
394: }
395: MatHYPRE_CreateFromMat(A,hB);
396: MatHYPRE_IJMatrixCopy(A,hB->ij);
397: if (reuse == MAT_INPLACE_MATRIX) {
398: MatHeaderReplace(A,&M);
399: }
400: (*B)->preallocated = PETSC_TRUE;
401: MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);
402: MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);
403: return(0);
404: }
406: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
407: {
408: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
409: hypre_ParCSRMatrix *parcsr;
410: hypre_CSRMatrix *hdiag,*hoffd;
411: MPI_Comm comm;
412: PetscScalar *da,*oa,*aptr;
413: PetscInt *dii,*djj,*oii,*ojj,*iptr;
414: PetscInt i,dnnz,onnz,m,n;
415: HYPRE_Int type;
416: PetscMPIInt size;
417: PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
418: PetscErrorCode ierr;
421: comm = PetscObjectComm((PetscObject)A);
422: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
423: if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
424: if (reuse == MAT_REUSE_MATRIX) {
425: PetscBool ismpiaij,isseqaij;
426: PetscObjectBaseTypeCompare((PetscObject)*B,MATMPIAIJ,&ismpiaij);
427: PetscObjectBaseTypeCompare((PetscObject)*B,MATSEQAIJ,&isseqaij);
428: if (!ismpiaij && !isseqaij) SETERRQ(comm,PETSC_ERR_SUP,"Only MATMPIAIJ or MATSEQAIJ are supported");
429: }
430: MPI_Comm_size(comm,&size);
432: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
433: hdiag = hypre_ParCSRMatrixDiag(parcsr);
434: hoffd = hypre_ParCSRMatrixOffd(parcsr);
435: m = hypre_CSRMatrixNumRows(hdiag);
436: n = hypre_CSRMatrixNumCols(hdiag);
437: dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
438: onnz = hypre_CSRMatrixNumNonzeros(hoffd);
439: if (reuse == MAT_INITIAL_MATRIX) {
440: PetscMalloc1(m+1,&dii);
441: PetscMalloc1(dnnz,&djj);
442: PetscMalloc1(dnnz,&da);
443: } else if (reuse == MAT_REUSE_MATRIX) {
444: PetscInt nr;
445: PetscBool done;
446: if (size > 1) {
447: Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
449: MatGetRowIJ(b->A,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
450: if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in diag part! %D != %D",nr,m);
451: if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in diag part! reuse %D hypre %D",dii[nr],dnnz);
452: MatSeqAIJGetArray(b->A,&da);
453: } else {
454: MatGetRowIJ(*B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&dii,(const PetscInt**)&djj,&done);
455: if (nr != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows! %D != %D",nr,m);
456: if (dii[nr] < dnnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros! reuse %D hypre %D",dii[nr],dnnz);
457: MatSeqAIJGetArray(*B,&da);
458: }
459: } else { /* MAT_INPLACE_MATRIX */
460: if (!sameint) {
461: PetscMalloc1(m+1,&dii);
462: PetscMalloc1(dnnz,&djj);
463: } else {
464: dii = (PetscInt*)hypre_CSRMatrixI(hdiag);
465: djj = (PetscInt*)hypre_CSRMatrixJ(hdiag);
466: }
467: da = (PetscScalar*)hypre_CSRMatrixData(hdiag);
468: }
470: if (!sameint) {
471: for (i=0;i<m+1;i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]);
472: for (i=0;i<dnnz;i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]);
473: } else {
474: PetscArraycpy(dii,hypre_CSRMatrixI(hdiag),m+1);
475: PetscArraycpy(djj,hypre_CSRMatrixJ(hdiag),dnnz);
476: }
477: PetscArraycpy(da,hypre_CSRMatrixData(hdiag),dnnz);
478: iptr = djj;
479: aptr = da;
480: for (i=0; i<m; i++) {
481: PetscInt nc = dii[i+1]-dii[i];
482: PetscSortIntWithScalarArray(nc,iptr,aptr);
483: iptr += nc;
484: aptr += nc;
485: }
486: if (size > 1) {
487: HYPRE_BigInt *coffd;
488: HYPRE_Int *offdj;
490: if (reuse == MAT_INITIAL_MATRIX) {
491: PetscMalloc1(m+1,&oii);
492: PetscMalloc1(onnz,&ojj);
493: PetscMalloc1(onnz,&oa);
494: } else if (reuse == MAT_REUSE_MATRIX) {
495: Mat_MPIAIJ *b = (Mat_MPIAIJ*)((*B)->data);
496: PetscInt nr,hr = hypre_CSRMatrixNumRows(hoffd);
497: PetscBool done;
499: MatGetRowIJ(b->B,0,PETSC_FALSE,PETSC_FALSE,&nr,(const PetscInt**)&oii,(const PetscInt**)&ojj,&done);
500: if (nr != hr) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of local rows in offdiag part! %D != %D",nr,hr);
501: if (oii[nr] < onnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot reuse mat: invalid number of nonzeros in offdiag part! reuse %D hypre %D",oii[nr],onnz);
502: MatSeqAIJGetArray(b->B,&oa);
503: } else { /* MAT_INPLACE_MATRIX */
504: if (!sameint) {
505: PetscMalloc1(m+1,&oii);
506: PetscMalloc1(onnz,&ojj);
507: } else {
508: oii = (PetscInt*)hypre_CSRMatrixI(hoffd);
509: ojj = (PetscInt*)hypre_CSRMatrixJ(hoffd);
510: }
511: oa = (PetscScalar*)hypre_CSRMatrixData(hoffd);
512: }
513: if (!sameint) {
514: for (i=0;i<m+1;i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]);
515: } else {
516: PetscArraycpy(oii,hypre_CSRMatrixI(hoffd),m+1);
517: }
518: offdj = hypre_CSRMatrixJ(hoffd);
519: coffd = hypre_ParCSRMatrixColMapOffd(parcsr);
520: for (i=0; i<onnz; i++) ojj[i] = coffd[offdj[i]];
521: PetscArraycpy(oa,hypre_CSRMatrixData(hoffd),onnz);
522: iptr = ojj;
523: aptr = oa;
524: for (i=0; i<m; i++) {
525: PetscInt nc = oii[i+1]-oii[i];
526: PetscSortIntWithScalarArray(nc,iptr,aptr);
527: iptr += nc;
528: aptr += nc;
529: }
530: if (reuse == MAT_INITIAL_MATRIX) {
531: Mat_MPIAIJ *b;
532: Mat_SeqAIJ *d,*o;
534: MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,B);
535: /* hack MPIAIJ */
536: b = (Mat_MPIAIJ*)((*B)->data);
537: d = (Mat_SeqAIJ*)b->A->data;
538: o = (Mat_SeqAIJ*)b->B->data;
539: d->free_a = PETSC_TRUE;
540: d->free_ij = PETSC_TRUE;
541: o->free_a = PETSC_TRUE;
542: o->free_ij = PETSC_TRUE;
543: } else if (reuse == MAT_INPLACE_MATRIX) {
544: Mat T;
546: MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,djj,da,oii,ojj,oa,&T);
547: if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
548: hypre_CSRMatrixI(hdiag) = NULL;
549: hypre_CSRMatrixJ(hdiag) = NULL;
550: hypre_CSRMatrixI(hoffd) = NULL;
551: hypre_CSRMatrixJ(hoffd) = NULL;
552: } else { /* Hack MPIAIJ -> free ij but not a */
553: Mat_MPIAIJ *b = (Mat_MPIAIJ*)(T->data);
554: Mat_SeqAIJ *d = (Mat_SeqAIJ*)(b->A->data);
555: Mat_SeqAIJ *o = (Mat_SeqAIJ*)(b->B->data);
557: d->free_ij = PETSC_TRUE;
558: o->free_ij = PETSC_TRUE;
559: }
560: hypre_CSRMatrixData(hdiag) = NULL;
561: hypre_CSRMatrixData(hoffd) = NULL;
562: MatHeaderReplace(A,&T);
563: }
564: } else {
565: oii = NULL;
566: ojj = NULL;
567: oa = NULL;
568: if (reuse == MAT_INITIAL_MATRIX) {
569: Mat_SeqAIJ* b;
571: MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,B);
572: /* hack SeqAIJ */
573: b = (Mat_SeqAIJ*)((*B)->data);
574: b->free_a = PETSC_TRUE;
575: b->free_ij = PETSC_TRUE;
576: } else if (reuse == MAT_INPLACE_MATRIX) {
577: Mat T;
579: MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,&T);
580: if (sameint) { /* ownership of CSR pointers is transferred to PETSc */
581: hypre_CSRMatrixI(hdiag) = NULL;
582: hypre_CSRMatrixJ(hdiag) = NULL;
583: } else { /* free ij but not a */
584: Mat_SeqAIJ* b = (Mat_SeqAIJ*)(T->data);
586: b->free_ij = PETSC_TRUE;
587: }
588: hypre_CSRMatrixData(hdiag) = NULL;
589: MatHeaderReplace(A,&T);
590: }
591: }
593: /* we have to use hypre_Tfree to free the HYPRE arrays
594: that PETSc now onws */
595: if (reuse == MAT_INPLACE_MATRIX) {
596: PetscInt nh;
597: void *ptrs[6] = {da,oa,dii,djj,oii,ojj};
598: const char *names[6] = {"_hypre_csr_da",
599: "_hypre_csr_oa",
600: "_hypre_csr_dii",
601: "_hypre_csr_djj",
602: "_hypre_csr_oii",
603: "_hypre_csr_ojj"};
604: nh = sameint ? 6 : 2;
605: for (i=0; i<nh; i++) {
606: PetscContainer c;
608: PetscContainerCreate(comm,&c);
609: PetscContainerSetPointer(c,ptrs[i]);
610: PetscContainerSetUserDestroy(c,hypre_array_destroy);
611: PetscObjectCompose((PetscObject)(*B),names[i],(PetscObject)c);
612: PetscContainerDestroy(&c);
613: }
614: }
615: return(0);
616: }
618: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
619: {
620: hypre_ParCSRMatrix *tA;
621: hypre_CSRMatrix *hdiag,*hoffd;
622: Mat_SeqAIJ *diag,*offd;
623: PetscInt *garray,i,noffd,dnnz,onnz,*row_starts,*col_starts;
624: MPI_Comm comm = PetscObjectComm((PetscObject)A);
625: PetscBool ismpiaij,isseqaij;
626: PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
627: PetscErrorCode ierr;
630: PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
631: PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
632: if (!ismpiaij && !isseqaij) SETERRQ1(comm,PETSC_ERR_SUP,"Unsupported type %s",((PetscObject)A)->type_name);
633: if (ismpiaij) {
634: Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A->data);
636: diag = (Mat_SeqAIJ*)a->A->data;
637: offd = (Mat_SeqAIJ*)a->B->data;
638: garray = a->garray;
639: noffd = a->B->cmap->N;
640: dnnz = diag->nz;
641: onnz = offd->nz;
642: } else {
643: diag = (Mat_SeqAIJ*)A->data;
644: offd = NULL;
645: garray = NULL;
646: noffd = 0;
647: dnnz = diag->nz;
648: onnz = 0;
649: }
651: /* create a temporary ParCSR */
652: if (HYPRE_AssumedPartitionCheck()) {
653: PetscMPIInt myid;
655: MPI_Comm_rank(comm,&myid);
656: row_starts = A->rmap->range + myid;
657: col_starts = A->cmap->range + myid;
658: } else {
659: row_starts = A->rmap->range;
660: col_starts = A->cmap->range;
661: }
662: tA = hypre_ParCSRMatrixCreate(comm,A->rmap->N,A->cmap->N,(HYPRE_BigInt*)row_starts,(HYPRE_BigInt*)col_starts,noffd,dnnz,onnz);
663: hypre_ParCSRMatrixSetRowStartsOwner(tA,0);
664: hypre_ParCSRMatrixSetColStartsOwner(tA,0);
666: /* set diagonal part */
667: hdiag = hypre_ParCSRMatrixDiag(tA);
668: if (sameint) { /* reuse CSR pointers */
669: hypre_CSRMatrixI(hdiag) = (HYPRE_Int*)diag->i;
670: hypre_CSRMatrixJ(hdiag) = (HYPRE_Int*)diag->j;
671: } else { /* malloc CSR pointers */
672: HYPRE_Int *hi,*hj;
674: PetscMalloc2(A->rmap->n+1,&hi,dnnz,&hj);
675: for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(diag->i[i]);
676: for (i = 0; i < dnnz; i++) hj[i] = (HYPRE_Int)(diag->j[i]);
677: hypre_CSRMatrixI(hdiag) = hi;
678: hypre_CSRMatrixJ(hdiag) = hj;
679: }
680: hypre_CSRMatrixData(hdiag) = (HYPRE_Complex*)diag->a;
681: hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
682: hypre_CSRMatrixSetRownnz(hdiag);
683: hypre_CSRMatrixSetDataOwner(hdiag,0);
685: /* set offdiagonal part */
686: hoffd = hypre_ParCSRMatrixOffd(tA);
687: if (offd) {
688: if (sameint) { /* reuse CSR pointers */
689: hypre_CSRMatrixI(hoffd) = (HYPRE_Int*)offd->i;
690: hypre_CSRMatrixJ(hoffd) = (HYPRE_Int*)offd->j;
691: } else { /* malloc CSR pointers */
692: HYPRE_Int *hi,*hj;
694: PetscMalloc2(A->rmap->n+1,&hi,onnz,&hj);
695: for (i = 0; i < A->rmap->n+1; i++) hi[i] = (HYPRE_Int)(offd->i[i]);
696: for (i = 0; i < onnz; i++) hj[i] = (HYPRE_Int)(offd->j[i]);
697: hypre_CSRMatrixI(hoffd) = hi;
698: hypre_CSRMatrixJ(hoffd) = hj;
699: }
700: hypre_CSRMatrixData(hoffd) = (HYPRE_Complex*)offd->a;
701: hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
702: hypre_CSRMatrixSetRownnz(hoffd);
703: hypre_CSRMatrixSetDataOwner(hoffd,0);
704: hypre_ParCSRMatrixSetNumNonzeros(tA);
705: hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt*)garray;
706: }
707: *hA = tA;
708: return(0);
709: }
711: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
712: {
713: hypre_CSRMatrix *hdiag,*hoffd;
714: PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
715: PetscErrorCode ierr;
718: hdiag = hypre_ParCSRMatrixDiag(*hA);
719: hoffd = hypre_ParCSRMatrixOffd(*hA);
720: /* free temporary memory allocated by PETSc */
721: if (!sameint) {
722: HYPRE_Int *hi,*hj;
724: hi = hypre_CSRMatrixI(hdiag);
725: hj = hypre_CSRMatrixJ(hdiag);
726: PetscFree2(hi,hj);
727: if (hoffd) {
728: hi = hypre_CSRMatrixI(hoffd);
729: hj = hypre_CSRMatrixJ(hoffd);
730: PetscFree2(hi,hj);
731: }
732: }
733: /* set pointers to NULL before destroying tA */
734: hypre_CSRMatrixI(hdiag) = NULL;
735: hypre_CSRMatrixJ(hdiag) = NULL;
736: hypre_CSRMatrixData(hdiag) = NULL;
737: hypre_CSRMatrixI(hoffd) = NULL;
738: hypre_CSRMatrixJ(hoffd) = NULL;
739: hypre_CSRMatrixData(hoffd) = NULL;
740: hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
741: hypre_ParCSRMatrixDestroy(*hA);
742: *hA = NULL;
743: return(0);
744: }
746: /* calls RAP from BoomerAMG:
747: the resulting ParCSR will not own the column and row starts
748: It looks like we don't need to have the diagonal entries
749: ordered first in the rows of the diagonal part
750: for boomerAMGBuildCoarseOperator to work */
751: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA,hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
752: {
753: HYPRE_Int P_owns_col_starts,R_owns_row_starts;
756: P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
757: R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
758: PetscStackCallStandard(hypre_BoomerAMGBuildCoarseOperator,(hR,hA,hP,hRAP));
759: PetscStackCallStandard(hypre_ParCSRMatrixSetNumNonzeros,(*hRAP));
760: /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
761: hypre_ParCSRMatrixSetRowStartsOwner(*hRAP,0);
762: hypre_ParCSRMatrixSetColStartsOwner(*hRAP,0);
763: if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP,1);
764: if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR,1);
765: return(0);
766: }
768: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat P,Mat C)
769: {
770: Mat B;
771: hypre_ParCSRMatrix *hA,*hP,*hPtAP;
772: PetscErrorCode ierr;
775: MatAIJGetParCSR_Private(A,&hA);
776: MatAIJGetParCSR_Private(P,&hP);
777: MatHYPRE_ParCSR_RAP(hP,hA,hP,&hPtAP);
778: MatCreateFromParCSR(hPtAP,MATAIJ,PETSC_OWN_POINTER,&B);
779: MatHeaderMerge(C,&B);
780: MatAIJRestoreParCSR_Private(A,&hA);
781: MatAIJRestoreParCSR_Private(P,&hP);
782: return(0);
783: }
785: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat P,PetscReal fill,Mat *C)
786: {
790: MatCreate(PetscObjectComm((PetscObject)A),C);
791: MatSetType(*C,MATAIJ);
792: (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
793: return(0);
794: }
796: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A,Mat P,Mat C)
797: {
798: Mat B;
799: Mat_HYPRE *hP;
800: hypre_ParCSRMatrix *hA = NULL,*Pparcsr,*ptapparcsr;
801: HYPRE_Int type;
802: MPI_Comm comm = PetscObjectComm((PetscObject)A);
803: PetscBool ishypre;
804: PetscErrorCode ierr;
807: PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
808: if (!ishypre) SETERRQ1(comm,PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
809: hP = (Mat_HYPRE*)P->data;
810: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
811: if (type != HYPRE_PARCSR) SETERRQ(comm,PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
812: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
814: MatAIJGetParCSR_Private(A,&hA);
815: MatHYPRE_ParCSR_RAP(Pparcsr,hA,Pparcsr,&ptapparcsr);
816: MatAIJRestoreParCSR_Private(A,&hA);
818: /* create temporary matrix and merge to C */
819: MatCreateFromParCSR(ptapparcsr,((PetscObject)C)->type_name,PETSC_OWN_POINTER,&B);
820: MatHeaderMerge(C,&B);
821: return(0);
822: }
824: static PetscErrorCode MatPtAP_AIJ_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
825: {
829: if (scall == MAT_INITIAL_MATRIX) {
830: const char *deft = MATAIJ;
831: char type[256];
832: PetscBool flg;
834: PetscObjectOptionsBegin((PetscObject)A);
835: PetscOptionsFList("-matptap_hypre_outtype","Matrix type",NULL,MatList,deft,type,256,&flg);
836: PetscOptionsEnd();
837: PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
838: MatCreate(PetscObjectComm((PetscObject)A),C);
839: if (flg) {
840: MatSetType(*C,type);
841: } else {
842: MatSetType(*C,deft);
843: }
844: (*C)->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE;
845: PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
846: }
847: PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
848: (*(*C)->ops->ptapnumeric)(A,P,*C);
849: PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
850: return(0);
851: }
853: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A,Mat P,Mat C)
854: {
855: Mat B;
856: hypre_ParCSRMatrix *Aparcsr,*Pparcsr,*ptapparcsr;
857: Mat_HYPRE *hA,*hP;
858: PetscBool ishypre;
859: HYPRE_Int type;
860: PetscErrorCode ierr;
863: PetscObjectTypeCompare((PetscObject)P,MATHYPRE,&ishypre);
864: if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)P),PETSC_ERR_USER,"P should be of type %s",MATHYPRE);
865: PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
866: if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
867: hA = (Mat_HYPRE*)A->data;
868: hP = (Mat_HYPRE*)P->data;
869: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
870: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
871: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hP->ij,&type));
872: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)P),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
873: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
874: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hP->ij,(void**)&Pparcsr));
875: MatHYPRE_ParCSR_RAP(Pparcsr,Aparcsr,Pparcsr,&ptapparcsr);
876: MatCreateFromParCSR(ptapparcsr,MATHYPRE,PETSC_OWN_POINTER,&B);
877: MatHeaderMerge(C,&B);
878: return(0);
879: }
881: static PetscErrorCode MatPtAP_HYPRE_HYPRE(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
882: {
886: if (scall == MAT_INITIAL_MATRIX) {
887: PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);
888: MatCreate(PetscObjectComm((PetscObject)A),C);
889: MatSetType(*C,MATHYPRE);
890: (*C)->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE;
891: PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);
892: }
893: PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);
894: (*(*C)->ops->ptapnumeric)(A,P,*C);
895: PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);
896: return(0);
897: }
899: /* calls hypre_ParMatmul
900: hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
901: hypre_ParMatrixCreate does not duplicate the communicator
902: It looks like we don't need to have the diagonal entries
903: ordered first in the rows of the diagonal part
904: for boomerAMGBuildCoarseOperator to work */
905: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
906: {
908: PetscStackPush("hypre_ParMatmul");
909: *hAB = hypre_ParMatmul(hA,hB);
910: PetscStackPop;
911: return(0);
912: }
914: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C)
915: {
916: Mat D;
917: hypre_ParCSRMatrix *hA,*hB,*hAB = NULL;
918: PetscErrorCode ierr;
921: MatAIJGetParCSR_Private(A,&hA);
922: MatAIJGetParCSR_Private(B,&hB);
923: MatHYPRE_ParCSR_MatMatMult(hA,hB,&hAB);
924: MatCreateFromParCSR(hAB,MATAIJ,PETSC_OWN_POINTER,&D);
925: MatHeaderMerge(C,&D);
926: MatAIJRestoreParCSR_Private(A,&hA);
927: MatAIJRestoreParCSR_Private(B,&hB);
928: return(0);
929: }
931: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A,Mat B,PetscReal fill,Mat *C)
932: {
936: MatCreate(PetscObjectComm((PetscObject)A),C);
937: MatSetType(*C,MATAIJ);
938: (*C)->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
939: return(0);
940: }
942: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A,Mat B,Mat C)
943: {
944: Mat D;
945: hypre_ParCSRMatrix *Aparcsr,*Bparcsr,*ABparcsr = NULL;
946: Mat_HYPRE *hA,*hB;
947: PetscBool ishypre;
948: HYPRE_Int type;
949: PetscErrorCode ierr;
952: PetscObjectTypeCompare((PetscObject)B,MATHYPRE,&ishypre);
953: if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_USER,"B should be of type %s",MATHYPRE);
954: PetscObjectTypeCompare((PetscObject)A,MATHYPRE,&ishypre);
955: if (!ishypre) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_USER,"A should be of type %s",MATHYPRE);
956: hA = (Mat_HYPRE*)A->data;
957: hB = (Mat_HYPRE*)B->data;
958: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
959: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
960: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hB->ij,&type));
961: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Only HYPRE_PARCSR is supported");
962: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&Aparcsr));
963: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hB->ij,(void**)&Bparcsr));
964: MatHYPRE_ParCSR_MatMatMult(Aparcsr,Bparcsr,&ABparcsr);
965: MatCreateFromParCSR(ABparcsr,MATHYPRE,PETSC_OWN_POINTER,&D);
966: /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
967: MatHeaderReplace(C,&D);
968: C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
969: return(0);
970: }
972: static PetscErrorCode MatMatMult_HYPRE_HYPRE(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
973: {
977: if (scall == MAT_INITIAL_MATRIX) {
978: PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);
979: MatCreate(PetscObjectComm((PetscObject)A),C);
980: MatSetType(*C,MATHYPRE);
981: (*C)->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
982: PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);
983: }
984: PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);
985: (*(*C)->ops->matmultnumeric)(A,B,*C);
986: PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);
987: return(0);
988: }
990: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,Mat D)
991: {
992: Mat E;
993: hypre_ParCSRMatrix *hA,*hB,*hC,*hABC;
994: PetscErrorCode ierr;
997: MatAIJGetParCSR_Private(A,&hA);
998: MatAIJGetParCSR_Private(B,&hB);
999: MatAIJGetParCSR_Private(C,&hC);
1000: MatHYPRE_ParCSR_RAP(hA,hB,hC,&hABC);
1001: MatCreateFromParCSR(hABC,MATAIJ,PETSC_OWN_POINTER,&E);
1002: MatHeaderMerge(D,&E);
1003: MatAIJRestoreParCSR_Private(A,&hA);
1004: MatAIJRestoreParCSR_Private(B,&hB);
1005: MatAIJRestoreParCSR_Private(C,&hC);
1006: return(0);
1007: }
1009: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A,Mat B,Mat C,PetscReal fill,Mat *D)
1010: {
1014: MatCreate(PetscObjectComm((PetscObject)A),D);
1015: MatSetType(*D,MATAIJ);
1016: return(0);
1017: }
1019: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1020: {
1024: MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_TRUE);
1025: return(0);
1026: }
1028: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1029: {
1033: MatHYPRE_MultKernel_Private(A,1.0,x,0.0,y,PETSC_FALSE);
1034: return(0);
1035: }
1037: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1038: {
1042: if (y != z) {
1043: VecCopy(y,z);
1044: }
1045: MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_FALSE);
1046: return(0);
1047: }
1049: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1050: {
1054: if (y != z) {
1055: VecCopy(y,z);
1056: }
1057: MatHYPRE_MultKernel_Private(A,1.0,x,1.0,z,PETSC_TRUE);
1058: return(0);
1059: }
1061: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1062: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1063: {
1064: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1065: hypre_ParCSRMatrix *parcsr;
1066: hypre_ParVector *hx,*hy;
1067: HYPRE_Complex *ax,*ay,*sax,*say;
1068: PetscErrorCode ierr;
1071: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)&parcsr));
1072: PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->x,(void**)&hx));
1073: PetscStackCallStandard(HYPRE_IJVectorGetObject,(hA->b,(void**)&hy));
1074: VecGetArrayRead(x,(const PetscScalar**)&ax);
1075: VecGetArray(y,(PetscScalar**)&ay);
1076: if (trans) {
1077: VecHYPRE_ParVectorReplacePointer(hA->x,ay,say);
1078: VecHYPRE_ParVectorReplacePointer(hA->b,ax,sax);
1079: hypre_ParCSRMatrixMatvecT(a,parcsr,hy,b,hx);
1080: VecHYPRE_ParVectorReplacePointer(hA->x,say,ay);
1081: VecHYPRE_ParVectorReplacePointer(hA->b,sax,ax);
1082: } else {
1083: VecHYPRE_ParVectorReplacePointer(hA->x,ax,sax);
1084: VecHYPRE_ParVectorReplacePointer(hA->b,ay,say);
1085: hypre_ParCSRMatrixMatvec(a,parcsr,hx,b,hy);
1086: VecHYPRE_ParVectorReplacePointer(hA->x,sax,ax);
1087: VecHYPRE_ParVectorReplacePointer(hA->b,say,ay);
1088: }
1089: VecRestoreArrayRead(x,(const PetscScalar**)&ax);
1090: VecRestoreArray(y,(PetscScalar**)&ay);
1091: return(0);
1092: }
1094: static PetscErrorCode MatDestroy_HYPRE(Mat A)
1095: {
1096: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1100: if (hA->x) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->x));
1101: if (hA->b) PetscStackCallStandard(HYPRE_IJVectorDestroy,(hA->b));
1102: if (hA->ij) {
1103: if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1104: PetscStackCallStandard(HYPRE_IJMatrixDestroy,(hA->ij));
1105: }
1106: if (hA->comm) { MPI_Comm_free(&hA->comm); }
1108: MatStashDestroy_Private(&A->stash);
1110: PetscFree(hA->array);
1112: PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_aij_C",NULL);
1113: PetscObjectComposeFunction((PetscObject)A,"MatConvert_hypre_is_C",NULL);
1114: PetscObjectComposeFunction((PetscObject)A,"MatPtAP_seqaij_hypre_C",NULL);
1115: PetscObjectComposeFunction((PetscObject)A,"MatPtAP_mpiaij_hypre_C",NULL);
1116: PetscObjectComposeFunction((PetscObject)A,"MatHYPRESetPreallocation_C",NULL);
1117: PetscObjectComposeFunction((PetscObject)A,"MatHYPREGetParCSR_C",NULL);
1118: PetscObjectComposeFunction((PetscObject)A,"MatPtAP_is_hypre_C",NULL);
1119: PetscFree(A->data);
1120: return(0);
1121: }
1123: static PetscErrorCode MatSetUp_HYPRE(Mat A)
1124: {
1128: MatHYPRESetPreallocation(A,PETSC_DEFAULT,NULL,PETSC_DEFAULT,NULL);
1129: return(0);
1130: }
1132: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1133: {
1134: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1135: Vec x,b;
1136: PetscMPIInt n;
1137: PetscInt i,j,rstart,ncols,flg;
1138: PetscInt *row,*col;
1139: PetscScalar *val;
1140: PetscErrorCode ierr;
1143: if (mode == MAT_FLUSH_ASSEMBLY) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");
1145: if (!A->nooffprocentries) {
1146: while (1) {
1147: MatStashScatterGetMesg_Private(&A->stash,&n,&row,&col,&val,&flg);
1148: if (!flg) break;
1150: for (i=0; i<n; ) {
1151: /* Now identify the consecutive vals belonging to the same row */
1152: for (j=i,rstart=row[j]; j<n; j++) {
1153: if (row[j] != rstart) break;
1154: }
1155: if (j < n) ncols = j-i;
1156: else ncols = n-i;
1157: /* Now assemble all these values with a single function call */
1158: MatSetValues_HYPRE(A,1,row+i,ncols,col+i,val+i,A->insertmode);
1160: i = j;
1161: }
1162: }
1163: MatStashScatterEnd_Private(&A->stash);
1164: }
1166: PetscStackCallStandard(HYPRE_IJMatrixAssemble,(hA->ij));
1167: /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitalize */
1168: {
1169: hypre_AuxParCSRMatrix *aux_matrix;
1171: /* call destroy just to make sure we do not leak anything */
1172: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1173: PetscStackCallStandard(hypre_AuxParCSRMatrixDestroy,(aux_matrix));
1174: hypre_IJMatrixTranslator(hA->ij) = NULL;
1176: /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1177: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1178: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1179: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1180: PetscStackCallStandard(hypre_AuxParCSRMatrixInitialize,(aux_matrix));
1181: }
1182: if (hA->x) return(0);
1183: PetscLayoutSetUp(A->rmap);
1184: PetscLayoutSetUp(A->cmap);
1185: VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->cmap->n,A->cmap->N,NULL,&x);
1186: VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,A->rmap->n,A->rmap->N,NULL,&b);
1187: VecHYPRE_IJVectorCreate(x,&hA->x);
1188: VecHYPRE_IJVectorCreate(b,&hA->b);
1189: VecDestroy(&x);
1190: VecDestroy(&b);
1191: return(0);
1192: }
1194: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1195: {
1196: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1197: PetscErrorCode ierr;
1200: if (!hA->available) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Temporary space is in use");
1202: if (hA->size >= size) {
1203: *array = hA->array;
1204: } else {
1205: PetscFree(hA->array);
1206: hA->size = size;
1207: PetscMalloc(hA->size,&hA->array);
1208: *array = hA->array;
1209: }
1211: hA->available = PETSC_FALSE;
1212: return(0);
1213: }
1215: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1216: {
1217: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1220: *array = NULL;
1221: hA->available = PETSC_TRUE;
1222: return(0);
1223: }
1225: PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1226: {
1227: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1228: PetscScalar *vals = (PetscScalar *)v;
1229: HYPRE_Complex *sscr;
1230: PetscInt *cscr[2];
1231: PetscInt i,nzc;
1232: void *array = NULL;
1233: PetscErrorCode ierr;
1236: MatGetArray_HYPRE(A,sizeof(PetscInt)*(2*nc)+sizeof(HYPRE_Complex)*nc*nr,&array);
1237: cscr[0] = (PetscInt*)array;
1238: cscr[1] = ((PetscInt*)array)+nc;
1239: sscr = (HYPRE_Complex*)(((PetscInt*)array)+nc*2);
1240: for (i=0,nzc=0;i<nc;i++) {
1241: if (cols[i] >= 0) {
1242: cscr[0][nzc ] = cols[i];
1243: cscr[1][nzc++] = i;
1244: }
1245: }
1246: if (!nzc) {
1247: MatRestoreArray_HYPRE(A,&array);
1248: return(0);
1249: }
1251: if (ins == ADD_VALUES) {
1252: for (i=0;i<nr;i++) {
1253: if (rows[i] >= 0 && nzc) {
1254: PetscInt j;
1255: HYPRE_Int hnc = (HYPRE_Int)nzc;
1257: if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1258: for (j=0;j<nzc;j++) { PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1259: PetscStackCallStandard(HYPRE_IJMatrixAddToValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1260: }
1261: vals += nc;
1262: }
1263: } else { /* INSERT_VALUES */
1264: PetscInt rst,ren;
1266: MatGetOwnershipRange(A,&rst,&ren);
1267: for (i=0;i<nr;i++) {
1268: if (rows[i] >= 0 && nzc) {
1269: PetscInt j;
1270: HYPRE_Int hnc = (HYPRE_Int)nzc;
1272: if ((PetscInt)hnc != nzc) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hypre overflow! number of columns %D for row %D",nzc,rows[i]);
1273: for (j=0;j<nzc;j++) { PetscHYPREScalarCast(vals[cscr[1][j]],&sscr[j]); }
1274: /* nonlocal values */
1275: if (rows[i] < rst || rows[i] >= ren) { MatStashValuesRow_Private(&A->stash,rows[i],nzc,cscr[0],(PetscScalar*)sscr,PETSC_FALSE); }
1276: /* local values */
1277: else PetscStackCallStandard(HYPRE_IJMatrixSetValues,(hA->ij,1,&hnc,(HYPRE_BigInt*)(rows+i),(HYPRE_BigInt*)cscr[0],sscr));
1278: }
1279: vals += nc;
1280: }
1281: }
1283: MatRestoreArray_HYPRE(A,&array);
1284: return(0);
1285: }
1287: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1288: {
1289: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1290: HYPRE_Int *hdnnz,*honnz;
1291: PetscInt i,rs,re,cs,ce,bs;
1292: PetscMPIInt size;
1296: MatGetBlockSize(A,&bs);
1297: PetscLayoutSetUp(A->rmap);
1298: PetscLayoutSetUp(A->cmap);
1299: rs = A->rmap->rstart;
1300: re = A->rmap->rend;
1301: cs = A->cmap->rstart;
1302: ce = A->cmap->rend;
1303: if (!hA->ij) {
1304: PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rs,re-1,cs,ce-1,&hA->ij));
1305: PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1306: } else {
1307: HYPRE_BigInt hrs,hre,hcs,hce;
1308: PetscStackCallStandard(HYPRE_IJMatrixGetLocalRange,(hA->ij,&hrs,&hre,&hcs,&hce));
1309: if (hre-hrs+1 != re -rs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local rows: IJMatrix [%D,%D), PETSc [%D,%D)",hrs,hre+1,rs,re);
1310: if (hce-hcs+1 != ce -cs) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local cols: IJMatrix [%D,%D), PETSc [%D,%D)",hcs,hce+1,cs,ce);
1311: }
1312: if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10*bs;
1313: if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10*bs;
1315: if (!dnnz) {
1316: PetscMalloc1(A->rmap->n,&hdnnz);
1317: for (i=0;i<A->rmap->n;i++) hdnnz[i] = dnz;
1318: } else {
1319: hdnnz = (HYPRE_Int*)dnnz;
1320: }
1321: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1322: if (size > 1) {
1323: hypre_AuxParCSRMatrix *aux_matrix;
1324: if (!onnz) {
1325: PetscMalloc1(A->rmap->n,&honnz);
1326: for (i=0;i<A->rmap->n;i++) honnz[i] = onz;
1327: } else {
1328: honnz = (HYPRE_Int*)onnz;
1329: }
1330: /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1331: they assume the user will input the entire row values, properly sorted
1332: In PETSc, we don't make such an assumption, and we instead set this flag to 1
1333: Also, to avoid possible memory leaks, we destroy and recreate the translator
1334: This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1335: the IJ matrix for us */
1336: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1337: hypre_AuxParCSRMatrixDestroy(aux_matrix);
1338: hypre_IJMatrixTranslator(hA->ij) = NULL;
1339: PetscStackCallStandard(HYPRE_IJMatrixSetDiagOffdSizes,(hA->ij,hdnnz,honnz));
1340: aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(hA->ij);
1341: hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
1342: } else {
1343: honnz = NULL;
1344: PetscStackCallStandard(HYPRE_IJMatrixSetRowSizes,(hA->ij,hdnnz));
1345: }
1347: /* reset assembled flag and call the initialize method */
1348: hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1349: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1350: if (!dnnz) {
1351: PetscFree(hdnnz);
1352: }
1353: if (!onnz && honnz) {
1354: PetscFree(honnz);
1355: }
1357: /* Match AIJ logic */
1358: A->preallocated = PETSC_TRUE;
1359: A->assembled = PETSC_FALSE;
1360: return(0);
1361: }
1363: /*@C
1364: MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format
1366: Collective on Mat
1368: Input Parameters:
1369: + A - the matrix
1370: . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix
1371: (same value is used for all local rows)
1372: . dnnz - array containing the number of nonzeros in the various rows of the
1373: DIAGONAL portion of the local submatrix (possibly different for each row)
1374: or NULL (PETSC_NULL_INTEGER in Fortran), if d_nz is used to specify the nonzero structure.
1375: The size of this array is equal to the number of local rows, i.e 'm'.
1376: For matrices that will be factored, you must leave room for (and set)
1377: the diagonal entry even if it is zero.
1378: . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local
1379: submatrix (same value is used for all local rows).
1380: - onnz - array containing the number of nonzeros in the various rows of the
1381: OFF-DIAGONAL portion of the local submatrix (possibly different for
1382: each row) or NULL (PETSC_NULL_INTEGER in Fortran), if o_nz is used to specify the nonzero
1383: structure. The size of this array is equal to the number
1384: of local rows, i.e 'm'.
1386: Notes:
1387: If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, onz and onnz are ignored.
1389: Level: intermediate
1391: .seealso: MatCreate(), MatMPIAIJSetPreallocation(), MATHYPRE
1392: @*/
1393: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1394: {
1400: PetscTryMethod(A,"MatHYPRESetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(A,dnz,dnnz,onz,onnz));
1401: return(0);
1402: }
1404: /*
1405: MatCreateFromParCSR - Creates a matrix from a hypre_ParCSRMatrix
1407: Collective
1409: Input Parameters:
1410: + parcsr - the pointer to the hypre_ParCSRMatrix
1411: . mtype - matrix type to be created. Currently MATAIJ, MATIS and MATHYPRE are supported.
1412: - copymode - PETSc copying options
1414: Output Parameter:
1415: . A - the matrix
1417: Level: intermediate
1419: .seealso: MatHYPRE, PetscCopyMode
1420: */
1421: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat* A)
1422: {
1423: Mat T;
1424: Mat_HYPRE *hA;
1425: MPI_Comm comm;
1426: PetscInt rstart,rend,cstart,cend,M,N;
1427: PetscBool isseqaij,ismpiaij,isaij,ishyp,isis;
1428: PetscErrorCode ierr;
1431: comm = hypre_ParCSRMatrixComm(parcsr);
1432: PetscStrcmp(mtype,MATSEQAIJ,&isseqaij);
1433: PetscStrcmp(mtype,MATMPIAIJ,&ismpiaij);
1434: PetscStrcmp(mtype,MATAIJ,&isaij);
1435: PetscStrcmp(mtype,MATHYPRE,&ishyp);
1436: PetscStrcmp(mtype,MATIS,&isis);
1437: isaij = (PetscBool)(isseqaij || ismpiaij || isaij);
1438: if (!isaij && !ishyp && !isis) SETERRQ6(comm,PETSC_ERR_SUP,"Unsupported MatType %s! Supported types are %s, %s, %s, %s, and %s",mtype,MATAIJ,MATSEQAIJ,MATMPIAIJ,MATIS,MATHYPRE);
1439: /* access ParCSRMatrix */
1440: rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1441: rend = hypre_ParCSRMatrixLastRowIndex(parcsr);
1442: cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1443: cend = hypre_ParCSRMatrixLastColDiag(parcsr);
1444: M = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1445: N = hypre_ParCSRMatrixGlobalNumCols(parcsr);
1447: /* fix for empty local rows/columns */
1448: if (rend < rstart) rend = rstart;
1449: if (cend < cstart) cend = cstart;
1451: /* PETSc convention */
1452: rend++;
1453: cend++;
1454: rend = PetscMin(rend,M);
1455: cend = PetscMin(cend,N);
1457: /* create PETSc matrix with MatHYPRE */
1458: MatCreate(comm,&T);
1459: MatSetSizes(T,rend-rstart,cend-cstart,M,N);
1460: MatSetType(T,MATHYPRE);
1461: hA = (Mat_HYPRE*)(T->data);
1463: /* create HYPRE_IJMatrix */
1464: PetscStackCallStandard(HYPRE_IJMatrixCreate,(hA->comm,rstart,rend-1,cstart,cend-1,&hA->ij));
1465: PetscStackCallStandard(HYPRE_IJMatrixSetObjectType,(hA->ij,HYPRE_PARCSR));
1467: /* create new ParCSR object if needed */
1468: if (ishyp && copymode == PETSC_COPY_VALUES) {
1469: hypre_ParCSRMatrix *new_parcsr;
1470: hypre_CSRMatrix *hdiag,*hoffd,*ndiag,*noffd;
1472: new_parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1473: hdiag = hypre_ParCSRMatrixDiag(parcsr);
1474: hoffd = hypre_ParCSRMatrixOffd(parcsr);
1475: ndiag = hypre_ParCSRMatrixDiag(new_parcsr);
1476: noffd = hypre_ParCSRMatrixOffd(new_parcsr);
1477: PetscArraycpy(hypre_CSRMatrixData(ndiag),hypre_CSRMatrixData(hdiag),hypre_CSRMatrixNumNonzeros(hdiag));
1478: PetscArraycpy(hypre_CSRMatrixData(noffd),hypre_CSRMatrixData(hoffd),hypre_CSRMatrixNumNonzeros(hoffd));
1479: parcsr = new_parcsr;
1480: copymode = PETSC_OWN_POINTER;
1481: }
1483: /* set ParCSR object */
1484: hypre_IJMatrixObject(hA->ij) = parcsr;
1485: T->preallocated = PETSC_TRUE;
1487: /* set assembled flag */
1488: hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1489: PetscStackCallStandard(HYPRE_IJMatrixInitialize,(hA->ij));
1490: if (ishyp) {
1491: PetscMPIInt myid = 0;
1493: /* make sure we always have row_starts and col_starts available */
1494: if (HYPRE_AssumedPartitionCheck()) {
1495: MPI_Comm_rank(comm,&myid);
1496: }
1497: if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1498: PetscLayout map;
1500: MatGetLayouts(T,NULL,&map);
1501: PetscLayoutSetUp(map);
1502: hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1503: }
1504: if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1505: PetscLayout map;
1507: MatGetLayouts(T,&map,NULL);
1508: PetscLayoutSetUp(map);
1509: hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt*)(map->range + myid);
1510: }
1511: /* prevent from freeing the pointer */
1512: if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1513: *A = T;
1514: MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);
1515: MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);
1516: } else if (isaij) {
1517: if (copymode != PETSC_OWN_POINTER) {
1518: /* prevent from freeing the pointer */
1519: hA->inner_free = PETSC_FALSE;
1520: MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INITIAL_MATRIX,A);
1521: MatDestroy(&T);
1522: } else { /* AIJ return type with PETSC_OWN_POINTER */
1523: MatConvert_HYPRE_AIJ(T,MATAIJ,MAT_INPLACE_MATRIX,&T);
1524: *A = T;
1525: }
1526: } else if (isis) {
1527: MatConvert_HYPRE_IS(T,MATIS,MAT_INITIAL_MATRIX,A);
1528: if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1529: MatDestroy(&T);
1530: }
1531: return(0);
1532: }
1534: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1535: {
1536: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1537: HYPRE_Int type;
1540: if (!hA->ij) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"HYPRE_IJMatrix not present");
1541: PetscStackCallStandard(HYPRE_IJMatrixGetObjectType,(hA->ij,&type));
1542: if (type != HYPRE_PARCSR) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1543: PetscStackCallStandard(HYPRE_IJMatrixGetObject,(hA->ij,(void**)parcsr));
1544: return(0);
1545: }
1547: /*
1548: MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix
1550: Not collective
1552: Input Parameters:
1553: + A - the MATHYPRE object
1555: Output Parameter:
1556: . parcsr - the pointer to the hypre_ParCSRMatrix
1558: Level: intermediate
1560: .seealso: MatHYPRE, PetscCopyMode
1561: */
1562: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1563: {
1569: PetscUseMethod(A,"MatHYPREGetParCSR_C",(Mat,hypre_ParCSRMatrix**),(A,parcsr));
1570: return(0);
1571: }
1573: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1574: {
1575: hypre_ParCSRMatrix *parcsr;
1576: hypre_CSRMatrix *ha;
1577: PetscInt rst;
1578: PetscErrorCode ierr;
1581: if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented with non-square diagonal blocks");
1582: MatGetOwnershipRange(A,&rst,NULL);
1583: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1584: if (missing) *missing = PETSC_FALSE;
1585: if (dd) *dd = -1;
1586: ha = hypre_ParCSRMatrixDiag(parcsr);
1587: if (ha) {
1588: PetscInt size,i;
1589: HYPRE_Int *ii,*jj;
1591: size = hypre_CSRMatrixNumRows(ha);
1592: ii = hypre_CSRMatrixI(ha);
1593: jj = hypre_CSRMatrixJ(ha);
1594: for (i = 0; i < size; i++) {
1595: PetscInt j;
1596: PetscBool found = PETSC_FALSE;
1598: for (j = ii[i]; j < ii[i+1] && !found; j++)
1599: found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;
1601: if (!found) {
1602: PetscInfo1(A,"Matrix is missing local diagonal entry %D\n",i);
1603: if (missing) *missing = PETSC_TRUE;
1604: if (dd) *dd = i+rst;
1605: return(0);
1606: }
1607: }
1608: if (!size) {
1609: PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1610: if (missing) *missing = PETSC_TRUE;
1611: if (dd) *dd = rst;
1612: }
1613: } else {
1614: PetscInfo(A,"Matrix has no diagonal entries therefore is missing diagonal\n");
1615: if (missing) *missing = PETSC_TRUE;
1616: if (dd) *dd = rst;
1617: }
1618: return(0);
1619: }
1621: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1622: {
1623: hypre_ParCSRMatrix *parcsr;
1624: hypre_CSRMatrix *ha;
1625: PetscErrorCode ierr;
1626: HYPRE_Complex hs;
1629: PetscHYPREScalarCast(s,&hs);
1630: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1631: /* diagonal part */
1632: ha = hypre_ParCSRMatrixDiag(parcsr);
1633: if (ha) {
1634: PetscInt size,i;
1635: HYPRE_Int *ii;
1636: HYPRE_Complex *a;
1638: size = hypre_CSRMatrixNumRows(ha);
1639: a = hypre_CSRMatrixData(ha);
1640: ii = hypre_CSRMatrixI(ha);
1641: for (i = 0; i < ii[size]; i++) a[i] *= hs;
1642: }
1643: /* offdiagonal part */
1644: ha = hypre_ParCSRMatrixOffd(parcsr);
1645: if (ha) {
1646: PetscInt size,i;
1647: HYPRE_Int *ii;
1648: HYPRE_Complex *a;
1650: size = hypre_CSRMatrixNumRows(ha);
1651: a = hypre_CSRMatrixData(ha);
1652: ii = hypre_CSRMatrixI(ha);
1653: for (i = 0; i < ii[size]; i++) a[i] *= hs;
1654: }
1655: return(0);
1656: }
1658: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1659: {
1660: hypre_ParCSRMatrix *parcsr;
1661: HYPRE_Int *lrows;
1662: PetscInt rst,ren,i;
1663: PetscErrorCode ierr;
1666: if (x || b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"To be implemented");
1667: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1668: PetscMalloc1(numRows,&lrows);
1669: MatGetOwnershipRange(A,&rst,&ren);
1670: for (i=0;i<numRows;i++) {
1671: if (rows[i] < rst || rows[i] >= ren)
1672: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Non-local rows not yet supported");
1673: lrows[i] = rows[i] - rst;
1674: }
1675: PetscStackCallStandard(hypre_ParCSRMatrixEliminateRowsCols,(parcsr,numRows,lrows));
1676: PetscFree(lrows);
1677: return(0);
1678: }
1680: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1681: {
1682: PetscErrorCode ierr;
1685: if (ha) {
1686: HYPRE_Int *ii, size;
1687: HYPRE_Complex *a;
1689: size = hypre_CSRMatrixNumRows(ha);
1690: a = hypre_CSRMatrixData(ha);
1691: ii = hypre_CSRMatrixI(ha);
1693: if (a) {PetscArrayzero(a,ii[size]);}
1694: }
1695: return(0);
1696: }
1698: PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1699: {
1700: hypre_ParCSRMatrix *parcsr;
1701: PetscErrorCode ierr;
1704: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1705: /* diagonal part */
1706: MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr));
1707: /* off-diagonal part */
1708: MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr));
1709: return(0);
1710: }
1712: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA,PetscInt N,const PetscInt rows[],HYPRE_Complex diag)
1713: {
1714: PetscInt ii;
1715: HYPRE_Int *i, *j;
1716: HYPRE_Complex *a;
1719: if (!hA) return(0);
1721: i = hypre_CSRMatrixI(hA);
1722: j = hypre_CSRMatrixJ(hA);
1723: a = hypre_CSRMatrixData(hA);
1725: for (ii = 0; ii < N; ii++) {
1726: HYPRE_Int jj, ibeg, iend, irow;
1728: irow = rows[ii];
1729: ibeg = i[irow];
1730: iend = i[irow+1];
1731: for (jj = ibeg; jj < iend; jj++)
1732: if (j[jj] == irow) a[jj] = diag;
1733: else a[jj] = 0.0;
1734: }
1735: return(0);
1736: }
1738: static PetscErrorCode MatZeroRows_HYPRE(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1739: {
1740: hypre_ParCSRMatrix *parcsr;
1741: PetscInt *lrows,len;
1742: HYPRE_Complex hdiag;
1743: PetscErrorCode ierr;
1746: if (x || b) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
1747: PetscHYPREScalarCast(diag,&hdiag);
1748: /* retrieve the internal matrix */
1749: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1750: /* get locally owned rows */
1751: MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);
1752: /* zero diagonal part */
1753: MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr),len,lrows,hdiag);
1754: /* zero off-diagonal part */
1755: MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr),len,lrows,0.0);
1757: PetscFree(lrows);
1758: return(0);
1759: }
1761: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat,MatAssemblyType mode)
1762: {
1766: if (mat->nooffprocentries) return(0);
1768: MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);
1769: return(0);
1770: }
1772: static PetscErrorCode MatGetRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1773: {
1774: hypre_ParCSRMatrix *parcsr;
1775: HYPRE_Int hnz;
1776: PetscErrorCode ierr;
1779: /* retrieve the internal matrix */
1780: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1781: /* call HYPRE API */
1782: PetscStackCallStandard(HYPRE_ParCSRMatrixGetRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1783: if (nz) *nz = (PetscInt)hnz;
1784: return(0);
1785: }
1787: static PetscErrorCode MatRestoreRow_HYPRE(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1788: {
1789: hypre_ParCSRMatrix *parcsr;
1790: HYPRE_Int hnz;
1791: PetscErrorCode ierr;
1794: /* retrieve the internal matrix */
1795: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1796: /* call HYPRE API */
1797: hnz = nz ? (HYPRE_Int)(*nz) : 0;
1798: PetscStackCallStandard(HYPRE_ParCSRMatrixRestoreRow,(parcsr,row,&hnz,(HYPRE_BigInt**)idx,(HYPRE_Complex**)v));
1799: return(0);
1800: }
1802: static PetscErrorCode MatGetValues_HYPRE(Mat A,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1803: {
1804: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1805: PetscInt i;
1808: if (!m || !n) return(0);
1809: /* Ignore negative row indices
1810: * And negative column indices should be automatically ignored in hypre
1811: * */
1812: for (i=0; i<m; i++) {
1813: if (idxm[i] >= 0) {
1814: HYPRE_Int hn = (HYPRE_Int)n;
1815: PetscStackCallStandard(HYPRE_IJMatrixGetValues,(hA->ij,1,&hn,(HYPRE_BigInt*)&idxm[i],(HYPRE_BigInt*)idxn,(HYPRE_Complex*)(v + i*n)));
1816: }
1817: }
1818: return(0);
1819: }
1821: static PetscErrorCode MatSetOption_HYPRE(Mat A,MatOption op,PetscBool flg)
1822: {
1823: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1826: switch (op) {
1827: case MAT_NO_OFF_PROC_ENTRIES:
1828: if (flg) {
1829: PetscStackCallStandard(HYPRE_IJMatrixSetMaxOffProcElmts,(hA->ij,0));
1830: }
1831: break;
1832: default:
1833: break;
1834: }
1835: return(0);
1836: }
1838: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
1839: {
1840: hypre_ParCSRMatrix *parcsr;
1841: PetscErrorCode ierr;
1842: Mat B;
1843: PetscViewerFormat format;
1844: PetscErrorCode (*mview)(Mat,PetscViewer) = NULL;
1847: PetscViewerGetFormat(view,&format);
1848: if (format != PETSC_VIEWER_NATIVE) {
1849: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1850: MatCreateFromParCSR(parcsr,MATAIJ,PETSC_USE_POINTER,&B);
1851: MatGetOperation(B,MATOP_VIEW,(void(**)(void))&mview);
1852: if (!mview) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Missing view operation");
1853: (*mview)(B,view);
1854: MatDestroy(&B);
1855: } else {
1856: Mat_HYPRE *hA = (Mat_HYPRE*)A->data;
1857: PetscMPIInt size;
1858: PetscBool isascii;
1859: const char *filename;
1861: /* HYPRE uses only text files */
1862: PetscObjectTypeCompare((PetscObject)view,PETSCVIEWERASCII,&isascii);
1863: if (!isascii) SETERRQ1(PetscObjectComm((PetscObject)view),PETSC_ERR_SUP,"PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII",((PetscObject)view)->type_name);
1864: PetscViewerFileGetName(view,&filename);
1865: PetscStackCallStandard(HYPRE_IJMatrixPrint,(hA->ij,filename));
1866: MPI_Comm_size(hA->comm,&size);
1867: if (size > 1) {
1868: PetscViewerASCIIPrintf(view,"Matrix files: %s.%05d ... %s.%05d\n",filename,0,filename,size-1);
1869: } else {
1870: PetscViewerASCIIPrintf(view,"Matrix file: %s.%05d\n",filename,0);
1871: }
1872: }
1873: return(0);
1874: }
1876: static PetscErrorCode MatDuplicate_HYPRE(Mat A,MatDuplicateOption op, Mat *B)
1877: {
1878: hypre_ParCSRMatrix *parcsr;
1879: PetscErrorCode ierr;
1880: PetscCopyMode cpmode;
1883: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1884: if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
1885: parcsr = hypre_ParCSRMatrixCompleteClone(parcsr);
1886: cpmode = PETSC_OWN_POINTER;
1887: } else {
1888: cpmode = PETSC_COPY_VALUES;
1889: }
1890: MatCreateFromParCSR(parcsr,MATHYPRE,cpmode,B);
1891: return(0);
1892: }
1894: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
1895: {
1896: hypre_ParCSRMatrix *acsr,*bcsr;
1897: PetscErrorCode ierr;
1900: if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
1901: MatHYPREGetParCSR_HYPRE(A,&acsr);
1902: MatHYPREGetParCSR_HYPRE(B,&bcsr);
1903: PetscStackCallStandard(hypre_ParCSRMatrixCopy,(acsr,bcsr,1));
1904: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
1905: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
1906: } else {
1907: MatCopy_Basic(A,B,str);
1908: }
1909: return(0);
1910: }
1912: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
1913: {
1914: hypre_ParCSRMatrix *parcsr;
1915: hypre_CSRMatrix *dmat;
1916: HYPRE_Complex *a;
1917: HYPRE_Complex *data = NULL;
1918: HYPRE_Int *diag = NULL;
1919: PetscInt i;
1920: PetscBool cong;
1921: PetscErrorCode ierr;
1924: MatHasCongruentLayouts(A,&cong);
1925: if (!cong) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Only for square matrices with same local distributions of rows and columns");
1926: #if defined(PETSC_USE_DEBUG)
1927: {
1928: PetscBool miss;
1929: MatMissingDiagonal(A,&miss,NULL);
1930: if (miss && A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not implemented when diagonal entries are missing");
1931: }
1932: #endif
1933: MatHYPREGetParCSR_HYPRE(A,&parcsr);
1934: dmat = hypre_ParCSRMatrixDiag(parcsr);
1935: if (dmat) {
1936: /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
1937: VecGetArray(d,(PetscScalar**)&a);
1938: diag = hypre_CSRMatrixI(dmat);
1939: data = hypre_CSRMatrixData(dmat);
1940: for (i=0;i<A->rmap->n;i++) a[i] = data[diag[i]];
1941: VecRestoreArray(d,(PetscScalar**)&a);
1942: }
1943: return(0);
1944: }
1946: #include <petscblaslapack.h>
1948: static PetscErrorCode MatAXPY_HYPRE(Mat Y,PetscScalar a,Mat X,MatStructure str)
1949: {
1953: if (str == SAME_NONZERO_PATTERN) {
1954: hypre_ParCSRMatrix *x,*y;
1955: hypre_CSRMatrix *xloc,*yloc;
1956: PetscInt xnnz,ynnz;
1957: HYPRE_Complex *xarr,*yarr;
1958: PetscBLASInt one=1,bnz;
1960: MatHYPREGetParCSR(Y,&y);
1961: MatHYPREGetParCSR(X,&x);
1963: /* diagonal block */
1964: xloc = hypre_ParCSRMatrixDiag(x);
1965: yloc = hypre_ParCSRMatrixDiag(y);
1966: xnnz = 0;
1967: ynnz = 0;
1968: xarr = NULL;
1969: yarr = NULL;
1970: if (xloc) {
1971: xarr = hypre_CSRMatrixData(xloc);
1972: xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1973: }
1974: if (yloc) {
1975: yarr = hypre_CSRMatrixData(yloc);
1976: ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1977: }
1978: if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in diagonal block %D != %D",xnnz,ynnz);
1979: PetscBLASIntCast(xnnz,&bnz);
1980: PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
1982: /* off-diagonal block */
1983: xloc = hypre_ParCSRMatrixOffd(x);
1984: yloc = hypre_ParCSRMatrixOffd(y);
1985: xnnz = 0;
1986: ynnz = 0;
1987: xarr = NULL;
1988: yarr = NULL;
1989: if (xloc) {
1990: xarr = hypre_CSRMatrixData(xloc);
1991: xnnz = hypre_CSRMatrixNumNonzeros(xloc);
1992: }
1993: if (yloc) {
1994: yarr = hypre_CSRMatrixData(yloc);
1995: ynnz = hypre_CSRMatrixNumNonzeros(yloc);
1996: }
1997: if (xnnz != ynnz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Different number of nonzeros in off-diagonal block %D != %D",xnnz,ynnz);
1998: PetscBLASIntCast(xnnz,&bnz);
1999: PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&a,(PetscScalar*)xarr,&one,(PetscScalar*)yarr,&one));
2000: } else if (str == SUBSET_NONZERO_PATTERN) {
2001: MatAXPY_Basic(Y,a,X,str);
2002: } else {
2003: Mat B;
2005: MatAXPY_Basic_Preallocate(Y,X,&B);
2006: MatAXPY_BasicWithPreallocation(B,Y,a,X,str);
2007: MatHeaderReplace(Y,&B);
2008: }
2009: return(0);
2010: }
2012: /*MC
2013: MATHYPRE - MATHYPRE = "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2014: based on the hypre IJ interface.
2016: Level: intermediate
2018: .seealso: MatCreate()
2019: M*/
2021: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2022: {
2023: Mat_HYPRE *hB;
2027: PetscNewLog(B,&hB);
2028: hB->inner_free = PETSC_TRUE;
2029: hB->available = PETSC_TRUE;
2030: hB->size = 0;
2031: hB->array = NULL;
2033: B->data = (void*)hB;
2034: B->rmap->bs = 1;
2035: B->assembled = PETSC_FALSE;
2037: PetscMemzero(B->ops,sizeof(struct _MatOps));
2038: B->ops->mult = MatMult_HYPRE;
2039: B->ops->multtranspose = MatMultTranspose_HYPRE;
2040: B->ops->multadd = MatMultAdd_HYPRE;
2041: B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE;
2042: B->ops->setup = MatSetUp_HYPRE;
2043: B->ops->destroy = MatDestroy_HYPRE;
2044: B->ops->assemblyend = MatAssemblyEnd_HYPRE;
2045: B->ops->assemblybegin = MatAssemblyBegin_HYPRE;
2046: B->ops->ptap = MatPtAP_HYPRE_HYPRE;
2047: B->ops->matmult = MatMatMult_HYPRE_HYPRE;
2048: B->ops->setvalues = MatSetValues_HYPRE;
2049: B->ops->missingdiagonal = MatMissingDiagonal_HYPRE;
2050: B->ops->scale = MatScale_HYPRE;
2051: B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE;
2052: B->ops->zeroentries = MatZeroEntries_HYPRE;
2053: B->ops->zerorows = MatZeroRows_HYPRE;
2054: B->ops->getrow = MatGetRow_HYPRE;
2055: B->ops->restorerow = MatRestoreRow_HYPRE;
2056: B->ops->getvalues = MatGetValues_HYPRE;
2057: B->ops->setoption = MatSetOption_HYPRE;
2058: B->ops->duplicate = MatDuplicate_HYPRE;
2059: B->ops->copy = MatCopy_HYPRE;
2060: B->ops->view = MatView_HYPRE;
2061: B->ops->getdiagonal = MatGetDiagonal_HYPRE;
2062: B->ops->axpy = MatAXPY_HYPRE;
2064: /* build cache for off array entries formed */
2065: MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);
2067: MPI_Comm_dup(PetscObjectComm((PetscObject)B),&hB->comm);
2068: PetscObjectChangeTypeName((PetscObject)B,MATHYPRE);
2069: PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_aij_C",MatConvert_HYPRE_AIJ);
2070: PetscObjectComposeFunction((PetscObject)B,"MatConvert_hypre_is_C",MatConvert_HYPRE_IS);
2071: PetscObjectComposeFunction((PetscObject)B,"MatPtAP_seqaij_hypre_C",MatPtAP_AIJ_HYPRE);
2072: PetscObjectComposeFunction((PetscObject)B,"MatPtAP_mpiaij_hypre_C",MatPtAP_AIJ_HYPRE);
2073: PetscObjectComposeFunction((PetscObject)B,"MatHYPRESetPreallocation_C",MatHYPRESetPreallocation_HYPRE);
2074: PetscObjectComposeFunction((PetscObject)B,"MatHYPREGetParCSR_C",MatHYPREGetParCSR_HYPRE);
2075: PetscObjectComposeFunction((PetscObject)B,"MatPtAP_is_hypre_C",MatPtAP_IS_XAIJ);
2076: return(0);
2077: }
2079: static PetscErrorCode hypre_array_destroy(void *ptr)
2080: {
2082: hypre_TFree(ptr,HYPRE_MEMORY_HOST);
2083: return(0);
2084: }