Actual source code: pdvec.c
petsc-3.8.3 2017-12-09
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
6: #include <petsc/private/glvisviewerimpl.h>
7: #include <petsc/private/glvisvecimpl.h>
8: #include <petscviewerhdf5.h>
10: PetscErrorCode VecDestroy_MPI(Vec v)
11: {
12: Vec_MPI *x = (Vec_MPI*)v->data;
16: #if defined(PETSC_USE_LOG)
17: PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
18: #endif
19: if (!x) return(0);
20: PetscFree(x->array_allocated);
22: /* Destroy local representation of vector if it exists */
23: if (x->localrep) {
24: VecDestroy(&x->localrep);
25: VecScatterDestroy(&x->localupdate);
26: }
27: VecAssemblyReset_MPI(v);
29: /* Destroy the stashes: note the order - so that the tags are freed properly */
30: VecStashDestroy_Private(&v->bstash);
31: VecStashDestroy_Private(&v->stash);
32: PetscFree(v->data);
33: return(0);
34: }
36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
37: {
38: PetscErrorCode ierr;
39: PetscInt i,work = xin->map->n,cnt,len,nLen;
40: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
41: MPI_Status status;
42: PetscScalar *values;
43: const PetscScalar *xarray;
44: const char *name;
45: PetscViewerFormat format;
48: VecGetArrayRead(xin,&xarray);
49: /* determine maximum message to arrive */
50: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
51: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
52: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
53: PetscViewerGetFormat(viewer,&format);
54: if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
55: if (!rank) {
56: PetscMalloc1(len,&values);
57: /*
58: MATLAB format and ASCII format are very similar except
59: MATLAB uses %18.16e format while ASCII uses %g
60: */
61: if (format == PETSC_VIEWER_ASCII_MATLAB) {
62: PetscObjectGetName((PetscObject)xin,&name);
63: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
64: for (i=0; i<xin->map->n; i++) {
65: #if defined(PETSC_USE_COMPLEX)
66: if (PetscImaginaryPart(xarray[i]) > 0.0) {
67: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
68: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
69: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
70: } else {
71: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
72: }
73: #else
74: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
75: #endif
76: }
77: /* receive and print messages */
78: for (j=1; j<size; j++) {
79: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
80: MPI_Get_count(&status,MPIU_SCALAR,&n);
81: for (i=0; i<n; i++) {
82: #if defined(PETSC_USE_COMPLEX)
83: if (PetscImaginaryPart(values[i]) > 0.0) {
84: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
85: } else if (PetscImaginaryPart(values[i]) < 0.0) {
86: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
87: } else {
88: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
89: }
90: #else
91: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
92: #endif
93: }
94: }
95: PetscViewerASCIIPrintf(viewer,"];\n");
97: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
98: for (i=0; i<xin->map->n; i++) {
99: #if defined(PETSC_USE_COMPLEX)
100: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
101: #else
102: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
103: #endif
104: }
105: /* receive and print messages */
106: for (j=1; j<size; j++) {
107: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
108: MPI_Get_count(&status,MPIU_SCALAR,&n);
109: for (i=0; i<n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
112: #else
113: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
114: #endif
115: }
116: }
117: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
118: /*
119: state 0: No header has been output
120: state 1: Only POINT_DATA has been output
121: state 2: Only CELL_DATA has been output
122: state 3: Output both, POINT_DATA last
123: state 4: Output both, CELL_DATA last
124: */
125: static PetscInt stateId = -1;
126: int outputState = 0;
127: int doOutput = 0;
128: PetscBool hasState;
129: PetscInt bs, b;
131: if (stateId < 0) {
132: PetscObjectComposedDataRegister(&stateId);
133: }
134: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
135: if (!hasState) outputState = 0;
137: PetscObjectGetName((PetscObject)xin,&name);
138: VecGetLocalSize(xin, &nLen);
139: PetscMPIIntCast(nLen,&n);
140: VecGetBlockSize(xin, &bs);
141: if (format == PETSC_VIEWER_ASCII_VTK) {
142: if (outputState == 0) {
143: outputState = 1;
144: doOutput = 1;
145: } else if (outputState == 1) doOutput = 0;
146: else if (outputState == 2) {
147: outputState = 3;
148: doOutput = 1;
149: } else if (outputState == 3) doOutput = 0;
150: else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
152: if (doOutput) {
153: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
154: }
155: } else {
156: if (outputState == 0) {
157: outputState = 2;
158: doOutput = 1;
159: } else if (outputState == 1) {
160: outputState = 4;
161: doOutput = 1;
162: } else if (outputState == 2) doOutput = 0;
163: else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
164: else if (outputState == 4) doOutput = 0;
166: if (doOutput) {
167: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
168: }
169: }
170: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
171: if (name) {
172: if (bs == 3) {
173: PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
174: } else {
175: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
176: }
177: } else {
178: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
179: }
180: if (bs != 3) {
181: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
182: }
183: for (i=0; i<n/bs; i++) {
184: for (b=0; b<bs; b++) {
185: if (b > 0) {
186: PetscViewerASCIIPrintf(viewer," ");
187: }
188: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
189: }
190: PetscViewerASCIIPrintf(viewer,"\n");
191: }
192: for (j=1; j<size; j++) {
193: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
194: MPI_Get_count(&status,MPIU_SCALAR,&n);
195: for (i=0; i<n/bs; i++) {
196: for (b=0; b<bs; b++) {
197: if (b > 0) {
198: PetscViewerASCIIPrintf(viewer," ");
199: }
200: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
201: }
202: PetscViewerASCIIPrintf(viewer,"\n");
203: }
204: }
205: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
206: PetscInt bs, b;
208: VecGetLocalSize(xin, &nLen);
209: PetscMPIIntCast(nLen,&n);
210: VecGetBlockSize(xin, &bs);
211: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
213: for (i=0; i<n/bs; i++) {
214: for (b=0; b<bs; b++) {
215: if (b > 0) {
216: PetscViewerASCIIPrintf(viewer," ");
217: }
218: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
219: }
220: for (b=bs; b<3; b++) {
221: PetscViewerASCIIPrintf(viewer," 0.0");
222: }
223: PetscViewerASCIIPrintf(viewer,"\n");
224: }
225: for (j=1; j<size; j++) {
226: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
227: MPI_Get_count(&status,MPIU_SCALAR,&n);
228: for (i=0; i<n/bs; i++) {
229: for (b=0; b<bs; b++) {
230: if (b > 0) {
231: PetscViewerASCIIPrintf(viewer," ");
232: }
233: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
234: }
235: for (b=bs; b<3; b++) {
236: PetscViewerASCIIPrintf(viewer," 0.0");
237: }
238: PetscViewerASCIIPrintf(viewer,"\n");
239: }
240: }
241: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
242: PetscInt bs, b, vertexCount = 1;
244: VecGetLocalSize(xin, &nLen);
245: PetscMPIIntCast(nLen,&n);
246: VecGetBlockSize(xin, &bs);
247: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
249: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
250: for (i=0; i<n/bs; i++) {
251: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
252: for (b=0; b<bs; b++) {
253: if (b > 0) {
254: PetscViewerASCIIPrintf(viewer," ");
255: }
256: #if !defined(PETSC_USE_COMPLEX)
257: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
258: #endif
259: }
260: PetscViewerASCIIPrintf(viewer,"\n");
261: }
262: for (j=1; j<size; j++) {
263: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
264: MPI_Get_count(&status,MPIU_SCALAR,&n);
265: for (i=0; i<n/bs; i++) {
266: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
267: for (b=0; b<bs; b++) {
268: if (b > 0) {
269: PetscViewerASCIIPrintf(viewer," ");
270: }
271: #if !defined(PETSC_USE_COMPLEX)
272: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
273: #endif
274: }
275: PetscViewerASCIIPrintf(viewer,"\n");
276: }
277: }
278: } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
279: /* GLVis ASCII visualization/dump: this function mimicks mfem::GridFunction::Save() */
280: const PetscScalar *array;
281: PetscInt i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
282: PetscContainer glvis_container;
283: PetscViewerGLVisVecInfo glvis_vec_info;
284: PetscViewerGLVisInfo glvis_info;
285: PetscErrorCode ierr;
287: /* mfem::FiniteElementSpace::Save() */
288: VecGetBlockSize(xin,&vdim);
289: PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
290: PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
291: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
292: PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
293: PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
294: PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
295: PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
296: PetscViewerASCIIPrintf(viewer,"\n");
297: /* mfem::Vector::Print() */
298: PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
299: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
300: PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
301: if (glvis_info->enabled) {
302: VecGetLocalSize(xin,&n);
303: VecGetArrayRead(xin,&array);
304: for (i=0;i<n;i++) {
305: PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
306: PetscViewerASCIIPrintf(viewer,"\n");
307: }
308: VecRestoreArrayRead(xin,&array);
309: }
310: } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
311: /* No info */
312: } else {
313: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
314: cnt = 0;
315: for (i=0; i<xin->map->n; i++) {
316: if (format == PETSC_VIEWER_ASCII_INDEX) {
317: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
318: }
319: #if defined(PETSC_USE_COMPLEX)
320: if (PetscImaginaryPart(xarray[i]) > 0.0) {
321: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
322: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
323: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
324: } else {
325: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
326: }
327: #else
328: PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
329: #endif
330: }
331: /* receive and print messages */
332: for (j=1; j<size; j++) {
333: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
334: MPI_Get_count(&status,MPIU_SCALAR,&n);
335: if (format != PETSC_VIEWER_ASCII_COMMON) {
336: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
337: }
338: for (i=0; i<n; i++) {
339: if (format == PETSC_VIEWER_ASCII_INDEX) {
340: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
341: }
342: #if defined(PETSC_USE_COMPLEX)
343: if (PetscImaginaryPart(values[i]) > 0.0) {
344: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
345: } else if (PetscImaginaryPart(values[i]) < 0.0) {
346: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
347: } else {
348: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
349: }
350: #else
351: PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
352: #endif
353: }
354: }
355: }
356: PetscFree(values);
357: } else {
358: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
359: /* Rank 0 is not trying to receive anything, so don't send anything */
360: } else {
361: if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
362: /* this may be a collective operation so make sure everyone calls it */
363: PetscObjectGetName((PetscObject)xin,&name);
364: }
365: MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
366: }
367: }
368: PetscViewerFlush(viewer);
369: VecRestoreArrayRead(xin,&xarray);
370: return(0);
371: }
373: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
374: {
375: PetscErrorCode ierr;
376: PetscMPIInt rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
377: PetscInt len,n,j,tr[2];
378: int fdes;
379: MPI_Status status;
380: PetscScalar *values;
381: const PetscScalar *xarray;
382: FILE *file;
383: #if defined(PETSC_HAVE_MPIIO)
384: PetscBool isMPIIO;
385: #endif
386: PetscBool skipHeader;
387: PetscInt message_count,flowcontrolcount;
388: PetscViewerFormat format;
391: VecGetArrayRead(xin,&xarray);
392: PetscViewerBinaryGetDescriptor(viewer,&fdes);
393: PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);
395: /* determine maximum message to arrive */
396: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
397: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
399: if (!skipHeader) {
400: tr[0] = VEC_FILE_CLASSID;
401: tr[1] = xin->map->N;
402: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
403: }
405: #if defined(PETSC_HAVE_MPIIO)
406: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
407: if (!isMPIIO) {
408: #endif
409: PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
410: if (!rank) {
411: PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);
413: len = 0;
414: for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
415: PetscMalloc1(len,&values);
416: PetscMPIIntCast(len,&mesgsize);
417: /* receive and save messages */
418: for (j=1; j<size; j++) {
419: PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
420: MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
421: MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
422: n = (PetscInt)mesglen;
423: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
424: }
425: PetscViewerFlowControlEndMaster(viewer,&message_count);
426: PetscFree(values);
427: } else {
428: PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
429: PetscMPIIntCast(xin->map->n,&mesgsize);
430: MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
431: PetscViewerFlowControlEndWorker(viewer,&message_count);
432: }
433: PetscViewerGetFormat(viewer,&format);
434: if (format == PETSC_VIEWER_BINARY_MATLAB) {
435: MPI_Comm comm;
436: FILE *info;
437: const char *name;
439: PetscObjectGetName((PetscObject)xin,&name);
440: PetscObjectGetComm((PetscObject)viewer,&comm);
441: PetscViewerBinaryGetInfoPointer(viewer,&info);
442: PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
443: PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
444: PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
445: }
446: #if defined(PETSC_HAVE_MPIIO)
447: } else {
448: MPI_Offset off;
449: MPI_File mfdes;
450: PetscMPIInt lsize;
452: PetscMPIIntCast(xin->map->n,&lsize);
453: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
454: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
455: off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
456: MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
457: MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
458: PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
459: }
460: #endif
462: VecRestoreArrayRead(xin,&xarray);
463: if (!rank) {
464: PetscViewerBinaryGetInfoPointer(viewer,&file);
465: if (file) {
466: if (((PetscObject)xin)->prefix) {
467: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
468: } else {
469: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
470: }
471: }
472: }
473: return(0);
474: }
476: #include <petscdraw.h>
477: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
478: {
479: PetscDraw draw;
480: PetscBool isnull;
481: PetscDrawLG lg;
482: PetscMPIInt i,size,rank,n,N,*lens = NULL,*disp = NULL;
483: PetscReal *values, *xx = NULL,*yy = NULL;
484: const PetscScalar *xarray;
485: int colors[] = {PETSC_DRAW_RED};
486: PetscErrorCode ierr;
489: PetscViewerDrawGetDraw(viewer,0,&draw);
490: PetscDrawIsNull(draw,&isnull);
491: if (isnull) return(0);
492: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
493: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
494: PetscMPIIntCast(xin->map->n,&n);
495: PetscMPIIntCast(xin->map->N,&N);
497: VecGetArrayRead(xin,&xarray);
498: #if defined(PETSC_USE_COMPLEX)
499: PetscMalloc1(n+1,&values);
500: for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
501: #else
502: values = (PetscReal*)xarray;
503: #endif
504: if (!rank) {
505: PetscMalloc2(N,&xx,N,&yy);
506: for (i=0; i<N; i++) xx[i] = (PetscReal)i;
507: PetscMalloc2(size,&lens,size,&disp);
508: for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
509: for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
510: }
511: MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
512: PetscFree2(lens,disp);
513: #if defined(PETSC_USE_COMPLEX)
514: PetscFree(values);
515: #endif
516: VecRestoreArrayRead(xin,&xarray);
518: PetscViewerDrawGetDrawLG(viewer,0,&lg);
519: PetscDrawLGReset(lg);
520: PetscDrawLGSetDimension(lg,1);
521: PetscDrawLGSetColors(lg,colors);
522: if (!rank) {
523: PetscDrawLGAddPoints(lg,N,&xx,&yy);
524: PetscFree2(xx,yy);
525: }
526: PetscDrawLGDraw(lg);
527: PetscDrawLGSave(lg);
528: return(0);
529: }
531: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
532: {
533: PetscErrorCode ierr;
534: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
535: PetscInt i,start,end;
536: MPI_Status status;
537: PetscReal min,max,tmp = 0.0;
538: PetscDraw draw;
539: PetscBool isnull;
540: PetscDrawAxis axis;
541: const PetscScalar *xarray;
544: PetscViewerDrawGetDraw(viewer,0,&draw);
545: PetscDrawIsNull(draw,&isnull);
546: if (isnull) return(0);
547: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
548: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
550: VecMin(xin,NULL,&min);
551: VecMax(xin,NULL,&max);
552: if (min == max) {
553: min -= 1.e-5;
554: max += 1.e-5;
555: }
557: PetscDrawCheckResizedWindow(draw);
558: PetscDrawClear(draw);
560: PetscDrawAxisCreate(draw,&axis);
561: PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
562: PetscDrawAxisDraw(axis);
563: PetscDrawAxisDestroy(&axis);
565: /* draw local part of vector */
566: VecGetArrayRead(xin,&xarray);
567: VecGetOwnershipRange(xin,&start,&end);
568: if (rank < size-1) { /* send value to right */
569: MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
570: }
571: if (rank) { /* receive value from right */
572: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
573: }
574: PetscDrawCollectiveBegin(draw);
575: if (rank) {
576: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
577: }
578: for (i=1; i<xin->map->n; i++) {
579: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
580: }
581: PetscDrawCollectiveEnd(draw);
582: VecRestoreArrayRead(xin,&xarray);
584: PetscDrawFlush(draw);
585: PetscDrawPause(draw);
586: PetscDrawSave(draw);
587: return(0);
588: }
590: #if defined(PETSC_HAVE_MATLAB_ENGINE)
591: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
592: {
593: PetscErrorCode ierr;
594: PetscMPIInt rank,size,*lens;
595: PetscInt i,N = xin->map->N;
596: const PetscScalar *xarray;
597: PetscScalar *xx;
600: VecGetArrayRead(xin,&xarray);
601: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
602: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
603: if (!rank) {
604: PetscMalloc1(N,&xx);
605: PetscMalloc1(size,&lens);
606: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
608: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
609: PetscFree(lens);
611: PetscObjectName((PetscObject)xin);
612: PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);
614: PetscFree(xx);
615: } else {
616: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
617: }
618: VecRestoreArrayRead(xin,&xarray);
619: return(0);
620: }
621: #endif
623: #if defined(PETSC_HAVE_HDF5)
624: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
625: {
626: /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
627: hid_t filespace; /* file dataspace identifier */
628: hid_t chunkspace; /* chunk dataset property identifier */
629: hid_t plist_id; /* property list identifier */
630: hid_t dset_id; /* dataset identifier */
631: hid_t memspace; /* memory dataspace identifier */
632: hid_t file_id;
633: hid_t group;
634: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
635: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
636: PetscInt bs = PetscAbs(xin->map->bs);
637: hsize_t dim;
638: hsize_t maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
639: PetscInt timestep;
640: PetscInt low;
641: PetscInt chunksize;
642: const PetscScalar *x;
643: const char *vecname;
644: PetscErrorCode ierr;
645: PetscBool dim2;
646: PetscBool spoutput;
649: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
650: PetscViewerHDF5GetTimestep(viewer, ×tep);
651: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
652: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
654: /* Create the dataspace for the dataset.
655: *
656: * dims - holds the current dimensions of the dataset
657: *
658: * maxDims - holds the maximum dimensions of the dataset (unlimited
659: * for the number of time steps with the current dimensions for the
660: * other dimensions; so only additional time steps can be added).
661: *
662: * chunkDims - holds the size of a single time step (required to
663: * permit extending dataset).
664: */
665: dim = 0;
666: chunksize = 1;
667: if (timestep >= 0) {
668: dims[dim] = timestep+1;
669: maxDims[dim] = H5S_UNLIMITED;
670: chunkDims[dim] = 1;
671: ++dim;
672: }
673: PetscHDF5IntCast(xin->map->N/bs,dims + dim);
675: maxDims[dim] = dims[dim];
676: chunkDims[dim] = dims[dim];
677: chunksize *= chunkDims[dim];
678: ++dim;
679: if (bs > 1 || dim2) {
680: dims[dim] = bs;
681: maxDims[dim] = dims[dim];
682: chunkDims[dim] = dims[dim];
683: chunksize *= chunkDims[dim];
684: ++dim;
685: }
686: #if defined(PETSC_USE_COMPLEX)
687: dims[dim] = 2;
688: maxDims[dim] = dims[dim];
689: chunkDims[dim] = dims[dim];
690: chunksize *= chunkDims[dim];
691: /* hdf5 chunks must be less than the max of 32 bit int */
692: if (chunksize > PETSC_HDF5_INT_MAX/64 ) {
693: if (bs > 1 || dim2) {
694: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
695: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
696: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
697: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
698: }
699: } else {
700: chunkDims[dim-1] = PETSC_HDF5_INT_MAX/128;
701: }
702: }
703: ++dim;
704: #else
705: /* hdf5 chunks must be less than the max of 32 bit int */
706: if (chunksize > PETSC_HDF5_INT_MAX/64) {
707: if (bs > 1 || dim2) {
708: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
709: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
710: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
711: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
712: }
713: } else {
714: chunkDims[dim-1] = PETSC_HDF5_INT_MAX/64;
715: }
716: }
717: #endif
720: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
722: #if defined(PETSC_USE_REAL_SINGLE)
723: memscalartype = H5T_NATIVE_FLOAT;
724: filescalartype = H5T_NATIVE_FLOAT;
725: #elif defined(PETSC_USE_REAL___FLOAT128)
726: #error "HDF5 output with 128 bit floats not supported."
727: #elif defined(PETSC_USE_REAL___FP16)
728: #error "HDF5 output with 16 bit floats not supported."
729: #else
730: memscalartype = H5T_NATIVE_DOUBLE;
731: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
732: else filescalartype = H5T_NATIVE_DOUBLE;
733: #endif
735: /* Create the dataset with default properties and close filespace */
736: PetscObjectGetName((PetscObject) xin, &vecname);
737: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
738: /* Create chunk */
739: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
740: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
742: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
743: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
744: #else
745: PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
746: #endif
747: PetscStackCallHDF5(H5Pclose,(chunkspace));
748: } else {
749: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
750: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
751: }
752: PetscStackCallHDF5(H5Sclose,(filespace));
754: /* Each process defines a dataset and writes it to the hyperslab in the file */
755: dim = 0;
756: if (timestep >= 0) {
757: count[dim] = 1;
758: ++dim;
759: }
760: PetscHDF5IntCast(xin->map->n/bs,count + dim);
761: ++dim;
762: if (bs > 1 || dim2) {
763: count[dim] = bs;
764: ++dim;
765: }
766: #if defined(PETSC_USE_COMPLEX)
767: count[dim] = 2;
768: ++dim;
769: #endif
770: if (xin->map->n > 0) {
771: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
772: } else {
773: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
774: PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
775: }
777: /* Select hyperslab in the file */
778: VecGetOwnershipRange(xin, &low, NULL);
779: dim = 0;
780: if (timestep >= 0) {
781: offset[dim] = timestep;
782: ++dim;
783: }
784: PetscHDF5IntCast(low/bs,offset + dim);
785: ++dim;
786: if (bs > 1 || dim2) {
787: offset[dim] = 0;
788: ++dim;
789: }
790: #if defined(PETSC_USE_COMPLEX)
791: offset[dim] = 0;
792: ++dim;
793: #endif
794: if (xin->map->n > 0) {
795: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
796: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
797: } else {
798: /* Create null filespace to match null memspace. */
799: PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
800: }
802: /* Create property list for collective dataset write */
803: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
804: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
805: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
806: #endif
807: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
809: VecGetArrayRead(xin, &x);
810: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
811: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
812: VecRestoreArrayRead(xin, &x);
814: #if defined(PETSC_USE_COMPLEX)
815: {
816: PetscInt one = 1;
817: const char *groupname;
818: char vecgroup[PETSC_MAX_PATH_LEN];
820: PetscViewerHDF5GetGroup(viewer,&groupname);
821: PetscSNPrintf(vecgroup,PETSC_MAX_PATH_LEN,"%s/%s",groupname ? groupname : "",vecname);
822: PetscViewerHDF5WriteAttribute(viewer,vecgroup,"complex",PETSC_INT,&one);
823: }
824: #endif
825: /* Close/release resources */
826: if (group != file_id) {
827: PetscStackCallHDF5(H5Gclose,(group));
828: }
829: PetscStackCallHDF5(H5Pclose,(plist_id));
830: PetscStackCallHDF5(H5Sclose,(filespace));
831: PetscStackCallHDF5(H5Sclose,(memspace));
832: PetscStackCallHDF5(H5Dclose,(dset_id));
833: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
834: return(0);
835: }
836: #endif
838: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
839: {
841: PetscBool iascii,isbinary,isdraw;
842: #if defined(PETSC_HAVE_MATHEMATICA)
843: PetscBool ismathematica;
844: #endif
845: #if defined(PETSC_HAVE_HDF5)
846: PetscBool ishdf5;
847: #endif
848: #if defined(PETSC_HAVE_MATLAB_ENGINE)
849: PetscBool ismatlab;
850: #endif
851: PetscBool isglvis;
854: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
855: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
856: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
857: #if defined(PETSC_HAVE_MATHEMATICA)
858: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
859: #endif
860: #if defined(PETSC_HAVE_HDF5)
861: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
862: #endif
863: #if defined(PETSC_HAVE_MATLAB_ENGINE)
864: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
865: #endif
866: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
867: if (iascii) {
868: VecView_MPI_ASCII(xin,viewer);
869: } else if (isbinary) {
870: VecView_MPI_Binary(xin,viewer);
871: } else if (isdraw) {
872: PetscViewerFormat format;
873: PetscViewerGetFormat(viewer,&format);
874: if (format == PETSC_VIEWER_DRAW_LG) {
875: VecView_MPI_Draw_LG(xin,viewer);
876: } else {
877: VecView_MPI_Draw(xin,viewer);
878: }
879: #if defined(PETSC_HAVE_MATHEMATICA)
880: } else if (ismathematica) {
881: PetscViewerMathematicaPutVector(viewer,xin);
882: #endif
883: #if defined(PETSC_HAVE_HDF5)
884: } else if (ishdf5) {
885: VecView_MPI_HDF5(xin,viewer);
886: #endif
887: #if defined(PETSC_HAVE_MATLAB_ENGINE)
888: } else if (ismatlab) {
889: VecView_MPI_Matlab(xin,viewer);
890: #endif
891: } else if (isglvis) {
892: VecView_GLVis(xin,viewer);
893: }
894: return(0);
895: }
897: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
898: {
900: *N = xin->map->N;
901: return(0);
902: }
904: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
905: {
906: const PetscScalar *xx;
907: PetscInt i,tmp,start = xin->map->range[xin->stash.rank];
908: PetscErrorCode ierr;
911: VecGetArrayRead(xin,&xx);
912: for (i=0; i<ni; i++) {
913: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
914: tmp = ix[i] - start;
915: #if defined(PETSC_USE_DEBUG)
916: if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
917: #endif
918: y[i] = xx[tmp];
919: }
920: VecRestoreArrayRead(xin,&xx);
921: return(0);
922: }
924: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
925: {
927: PetscMPIInt rank = xin->stash.rank;
928: PetscInt *owners = xin->map->range,start = owners[rank];
929: PetscInt end = owners[rank+1],i,row;
930: PetscScalar *xx;
933: #if defined(PETSC_USE_DEBUG)
934: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
935: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
936: #endif
937: VecGetArray(xin,&xx);
938: xin->stash.insertmode = addv;
940: if (addv == INSERT_VALUES) {
941: for (i=0; i<ni; i++) {
942: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
943: #if defined(PETSC_USE_DEBUG)
944: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
945: #endif
946: if ((row = ix[i]) >= start && row < end) {
947: xx[row-start] = y[i];
948: } else if (!xin->stash.donotstash) {
949: #if defined(PETSC_USE_DEBUG)
950: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
951: #endif
952: VecStashValue_Private(&xin->stash,row,y[i]);
953: }
954: }
955: } else {
956: for (i=0; i<ni; i++) {
957: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
958: #if defined(PETSC_USE_DEBUG)
959: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
960: #endif
961: if ((row = ix[i]) >= start && row < end) {
962: xx[row-start] += y[i];
963: } else if (!xin->stash.donotstash) {
964: #if defined(PETSC_USE_DEBUG)
965: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
966: #endif
967: VecStashValue_Private(&xin->stash,row,y[i]);
968: }
969: }
970: }
971: VecRestoreArray(xin,&xx);
972: return(0);
973: }
975: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
976: {
977: PetscMPIInt rank = xin->stash.rank;
978: PetscInt *owners = xin->map->range,start = owners[rank];
980: PetscInt end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
981: PetscScalar *xx,*y = (PetscScalar*)yin;
984: VecGetArray(xin,&xx);
985: #if defined(PETSC_USE_DEBUG)
986: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
987: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
988: #endif
989: xin->stash.insertmode = addv;
991: if (addv == INSERT_VALUES) {
992: for (i=0; i<ni; i++) {
993: if ((row = bs*ix[i]) >= start && row < end) {
994: for (j=0; j<bs; j++) xx[row-start+j] = y[j];
995: } else if (!xin->stash.donotstash) {
996: if (ix[i] < 0) continue;
997: #if defined(PETSC_USE_DEBUG)
998: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
999: #endif
1000: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1001: }
1002: y += bs;
1003: }
1004: } else {
1005: for (i=0; i<ni; i++) {
1006: if ((row = bs*ix[i]) >= start && row < end) {
1007: for (j=0; j<bs; j++) xx[row-start+j] += y[j];
1008: } else if (!xin->stash.donotstash) {
1009: if (ix[i] < 0) continue;
1010: #if defined(PETSC_USE_DEBUG)
1011: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1012: #endif
1013: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1014: }
1015: y += bs;
1016: }
1017: }
1018: VecRestoreArray(xin,&xx);
1019: return(0);
1020: }
1022: /*
1023: Since nsends or nreceives may be zero we add 1 in certain mallocs
1024: to make sure we never malloc an empty one.
1025: */
1026: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1027: {
1029: PetscInt *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1030: PetscMPIInt size;
1031: InsertMode addv;
1032: MPI_Comm comm;
1035: PetscObjectGetComm((PetscObject)xin,&comm);
1036: if (xin->stash.donotstash) return(0);
1038: MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1039: if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1040: xin->stash.insertmode = addv; /* in case this processor had no cache */
1041: xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */
1043: VecGetBlockSize(xin,&bs);
1044: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1045: if (!xin->bstash.bowners && xin->map->bs != -1) {
1046: PetscMalloc1(size+1,&bowners);
1047: for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1048: xin->bstash.bowners = bowners;
1049: } else bowners = xin->bstash.bowners;
1051: VecStashScatterBegin_Private(&xin->stash,owners);
1052: VecStashScatterBegin_Private(&xin->bstash,bowners);
1053: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1054: PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1055: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1056: PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1057: return(0);
1058: }
1060: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1061: {
1063: PetscInt base,i,j,*row,flg,bs;
1064: PetscMPIInt n;
1065: PetscScalar *val,*vv,*array,*xarray;
1068: if (!vec->stash.donotstash) {
1069: VecGetArray(vec,&xarray);
1070: VecGetBlockSize(vec,&bs);
1071: base = vec->map->range[vec->stash.rank];
1073: /* Process the stash */
1074: while (1) {
1075: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1076: if (!flg) break;
1077: if (vec->stash.insertmode == ADD_VALUES) {
1078: for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1079: } else if (vec->stash.insertmode == INSERT_VALUES) {
1080: for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1081: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1082: }
1083: VecStashScatterEnd_Private(&vec->stash);
1085: /* now process the block-stash */
1086: while (1) {
1087: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1088: if (!flg) break;
1089: for (i=0; i<n; i++) {
1090: array = xarray+row[i]*bs-base;
1091: vv = val+i*bs;
1092: if (vec->stash.insertmode == ADD_VALUES) {
1093: for (j=0; j<bs; j++) array[j] += vv[j];
1094: } else if (vec->stash.insertmode == INSERT_VALUES) {
1095: for (j=0; j<bs; j++) array[j] = vv[j];
1096: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1097: }
1098: }
1099: VecStashScatterEnd_Private(&vec->bstash);
1100: VecRestoreArray(vec,&xarray);
1101: }
1102: vec->stash.insertmode = NOT_SET_VALUES;
1103: return(0);
1104: }