Actual source code: pdvec.c

petsc-3.7.1 2016-05-15
Report Typos and Errors
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
  6: #include <petscviewerhdf5.h>

 10: PetscErrorCode VecDestroy_MPI(Vec v)
 11: {
 12:   Vec_MPI        *x = (Vec_MPI*)v->data;

 16: #if defined(PETSC_USE_LOG)
 17:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 18: #endif
 19:   if (!x) return(0);
 20:   PetscFree(x->array_allocated);

 22:   /* Destroy local representation of vector if it exists */
 23:   if (x->localrep) {
 24:     VecDestroy(&x->localrep);
 25:     VecScatterDestroy(&x->localupdate);
 26:   }
 27:   VecAssemblyReset_MPI(v);

 29:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 30:   VecStashDestroy_Private(&v->bstash);
 31:   VecStashDestroy_Private(&v->stash);
 32:   PetscFree(v->data);
 33:   return(0);
 34: }

 38: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 39: {
 40:   PetscErrorCode    ierr;
 41:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 42:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 43:   MPI_Status        status;
 44:   PetscScalar       *values;
 45:   const PetscScalar *xarray;
 46:   const char        *name;
 47:   PetscViewerFormat format;

 50:   VecGetArrayRead(xin,&xarray);
 51:   /* determine maximum message to arrive */
 52:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
 53:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
 54:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);

 56:   PetscViewerGetFormat(viewer,&format);
 57:   if (!rank) {
 58:     PetscMalloc1(len,&values);
 59:     /*
 60:         MATLAB format and ASCII format are very similar except
 61:         MATLAB uses %18.16e format while ASCII uses %g
 62:     */
 63:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 64:       PetscObjectGetName((PetscObject)xin,&name);
 65:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 66:       for (i=0; i<xin->map->n; i++) {
 67: #if defined(PETSC_USE_COMPLEX)
 68:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 69:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
 70:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 71:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
 72:         } else {
 73:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
 74:         }
 75: #else
 76:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
 77: #endif
 78:       }
 79:       /* receive and print messages */
 80:       for (j=1; j<size; j++) {
 81:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
 82:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 83:         for (i=0; i<n; i++) {
 84: #if defined(PETSC_USE_COMPLEX)
 85:           if (PetscImaginaryPart(values[i]) > 0.0) {
 86:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
 87:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 88:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
 89:           } else {
 90:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
 91:           }
 92: #else
 93:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
 94: #endif
 95:         }
 96:       }
 97:       PetscViewerASCIIPrintf(viewer,"];\n");

 99:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100:       for (i=0; i<xin->map->n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
103: #else
104:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
105: #endif
106:       }
107:       /* receive and print messages */
108:       for (j=1; j<size; j++) {
109:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
110:         MPI_Get_count(&status,MPIU_SCALAR,&n);
111:         for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
114: #else
115:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
116: #endif
117:         }
118:       }
119:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120:       /*
121:         state 0: No header has been output
122:         state 1: Only POINT_DATA has been output
123:         state 2: Only CELL_DATA has been output
124:         state 3: Output both, POINT_DATA last
125:         state 4: Output both, CELL_DATA last
126:       */
127:       static PetscInt stateId     = -1;
128:       int             outputState = 0;
129:       int             doOutput    = 0;
130:       PetscBool       hasState;
131:       PetscInt        bs, b;

133:       if (stateId < 0) {
134:         PetscObjectComposedDataRegister(&stateId);
135:       }
136:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
137:       if (!hasState) outputState = 0;

139:       PetscObjectGetName((PetscObject)xin,&name);
140:       VecGetLocalSize(xin, &nLen);
141:       PetscMPIIntCast(nLen,&n);
142:       VecGetBlockSize(xin, &bs);
143:       if (format == PETSC_VIEWER_ASCII_VTK) {
144:         if (outputState == 0) {
145:           outputState = 1;
146:           doOutput    = 1;
147:         } else if (outputState == 1) doOutput = 0;
148:         else if (outputState == 2) {
149:           outputState = 3;
150:           doOutput    = 1;
151:         } else if (outputState == 3) doOutput = 0;
152:         else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");

154:         if (doOutput) {
155:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
156:         }
157:       } else {
158:         if (outputState == 0) {
159:           outputState = 2;
160:           doOutput    = 1;
161:         } else if (outputState == 1) {
162:           outputState = 4;
163:           doOutput    = 1;
164:         } else if (outputState == 2) doOutput = 0;
165:         else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
166:         else if (outputState == 4) doOutput = 0;

168:         if (doOutput) {
169:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
170:         }
171:       }
172:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
173:       if (name) {
174:         if (bs == 3) {
175:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
176:         } else {
177:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
178:         }
179:       } else {
180:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
181:       }
182:       if (bs != 3) {
183:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
184:       }
185:       for (i=0; i<n/bs; i++) {
186:         for (b=0; b<bs; b++) {
187:           if (b > 0) {
188:             PetscViewerASCIIPrintf(viewer," ");
189:           }
190:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
191:         }
192:         PetscViewerASCIIPrintf(viewer,"\n");
193:       }
194:       for (j=1; j<size; j++) {
195:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
196:         MPI_Get_count(&status,MPIU_SCALAR,&n);
197:         for (i=0; i<n/bs; i++) {
198:           for (b=0; b<bs; b++) {
199:             if (b > 0) {
200:               PetscViewerASCIIPrintf(viewer," ");
201:             }
202:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
203:           }
204:           PetscViewerASCIIPrintf(viewer,"\n");
205:         }
206:       }
207:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
208:       PetscInt bs, b;

210:       VecGetLocalSize(xin, &nLen);
211:       PetscMPIIntCast(nLen,&n);
212:       VecGetBlockSize(xin, &bs);
213:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);

215:       for (i=0; i<n/bs; i++) {
216:         for (b=0; b<bs; b++) {
217:           if (b > 0) {
218:             PetscViewerASCIIPrintf(viewer," ");
219:           }
220:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
221:         }
222:         for (b=bs; b<3; b++) {
223:           PetscViewerASCIIPrintf(viewer," 0.0");
224:         }
225:         PetscViewerASCIIPrintf(viewer,"\n");
226:       }
227:       for (j=1; j<size; j++) {
228:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
229:         MPI_Get_count(&status,MPIU_SCALAR,&n);
230:         for (i=0; i<n/bs; i++) {
231:           for (b=0; b<bs; b++) {
232:             if (b > 0) {
233:               PetscViewerASCIIPrintf(viewer," ");
234:             }
235:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
236:           }
237:           for (b=bs; b<3; b++) {
238:             PetscViewerASCIIPrintf(viewer," 0.0");
239:           }
240:           PetscViewerASCIIPrintf(viewer,"\n");
241:         }
242:       }
243:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
244:       PetscInt bs, b, vertexCount = 1;

246:       VecGetLocalSize(xin, &nLen);
247:       PetscMPIIntCast(nLen,&n);
248:       VecGetBlockSize(xin, &bs);
249:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);

251:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
252:       for (i=0; i<n/bs; i++) {
253:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
254:         for (b=0; b<bs; b++) {
255:           if (b > 0) {
256:             PetscViewerASCIIPrintf(viewer," ");
257:           }
258: #if !defined(PETSC_USE_COMPLEX)
259:           PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
260: #endif
261:         }
262:         PetscViewerASCIIPrintf(viewer,"\n");
263:       }
264:       for (j=1; j<size; j++) {
265:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
266:         MPI_Get_count(&status,MPIU_SCALAR,&n);
267:         for (i=0; i<n/bs; i++) {
268:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
269:           for (b=0; b<bs; b++) {
270:             if (b > 0) {
271:               PetscViewerASCIIPrintf(viewer," ");
272:             }
273: #if !defined(PETSC_USE_COMPLEX)
274:             PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
275: #endif
276:           }
277:           PetscViewerASCIIPrintf(viewer,"\n");
278:         }
279:       }
280:     } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
281:       /* No info */
282:     } else {
283:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
284:       cnt = 0;
285:       for (i=0; i<xin->map->n; i++) {
286:         if (format == PETSC_VIEWER_ASCII_INDEX) {
287:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
288:         }
289: #if defined(PETSC_USE_COMPLEX)
290:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
291:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
292:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
293:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
294:         } else {
295:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
296:         }
297: #else
298:         PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
299: #endif
300:       }
301:       /* receive and print messages */
302:       for (j=1; j<size; j++) {
303:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
304:         MPI_Get_count(&status,MPIU_SCALAR,&n);
305:         if (format != PETSC_VIEWER_ASCII_COMMON) {
306:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
307:         }
308:         for (i=0; i<n; i++) {
309:           if (format == PETSC_VIEWER_ASCII_INDEX) {
310:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
311:           }
312: #if defined(PETSC_USE_COMPLEX)
313:           if (PetscImaginaryPart(values[i]) > 0.0) {
314:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
315:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
316:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
317:           } else {
318:             PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
319:           }
320: #else
321:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
322: #endif
323:         }
324:       }
325:     }
326:     PetscFree(values);
327:   } else {
328:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
329:       /* Rank 0 is not trying to receive anything, so don't send anything */
330:     } else {
331:       if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
332:         /* this may be a collective operation so make sure everyone calls it */
333:         PetscObjectGetName((PetscObject)xin,&name);
334:       }
335:       MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
336:     }
337:   }
338:   PetscViewerFlush(viewer);
339:   VecRestoreArrayRead(xin,&xarray);
340:   return(0);
341: }

345: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
346: {
347:   PetscErrorCode    ierr;
348:   PetscMPIInt       rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
349:   PetscInt          len,n,j,tr[2];
350:   int               fdes;
351:   MPI_Status        status;
352:   PetscScalar       *values;
353:   const PetscScalar *xarray;
354:   FILE              *file;
355: #if defined(PETSC_HAVE_MPIIO)
356:   PetscBool         isMPIIO;
357: #endif
358:   PetscBool         skipHeader;
359:   PetscInt          message_count,flowcontrolcount;
360:   PetscViewerFormat format;

363:   VecGetArrayRead(xin,&xarray);
364:   PetscViewerBinaryGetDescriptor(viewer,&fdes);
365:   PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);

367:   /* determine maximum message to arrive */
368:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
369:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);

371:   if (!skipHeader) {
372:     tr[0] = VEC_FILE_CLASSID;
373:     tr[1] = xin->map->N;
374:     PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
375:   }

377: #if defined(PETSC_HAVE_MPIIO)
378:   PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
379:   if (!isMPIIO) {
380: #endif
381:     PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
382:     if (!rank) {
383:       PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);

385:       len = 0;
386:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
387:       PetscMalloc1(len,&values);
388:       PetscMPIIntCast(len,&mesgsize);
389:       /* receive and save messages */
390:       for (j=1; j<size; j++) {
391:         PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
392:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
393:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
394:         n    = (PetscInt)mesglen;
395:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
396:       }
397:       PetscViewerFlowControlEndMaster(viewer,&message_count);
398:       PetscFree(values);
399:     } else {
400:       PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
401:       PetscMPIIntCast(xin->map->n,&mesgsize);
402:       MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
403:       PetscViewerFlowControlEndWorker(viewer,&message_count);
404:     }
405:     PetscViewerGetFormat(viewer,&format);
406:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
407:       MPI_Comm   comm;
408:       FILE       *info;
409:       const char *name;

411:       PetscObjectGetName((PetscObject)xin,&name);
412:       PetscObjectGetComm((PetscObject)viewer,&comm);
413:       PetscViewerBinaryGetInfoPointer(viewer,&info);
414:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
415:       PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
416:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
417:     }
418: #if defined(PETSC_HAVE_MPIIO)
419:   } else {
420:     MPI_Offset   off;
421:     MPI_File     mfdes;
422:     PetscMPIInt  lsize;

424:     PetscMPIIntCast(xin->map->n,&lsize);
425:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
426:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
427:     off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
428:     MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
429:     MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
430:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
431:   }
432: #endif

434:   VecRestoreArrayRead(xin,&xarray);
435:   if (!rank) {
436:     PetscViewerBinaryGetInfoPointer(viewer,&file);
437:     if (file) {
438:       if (((PetscObject)xin)->prefix) {
439:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
440:       } else {
441:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
442:       }
443:     }
444:   }
445:   return(0);
446: }

448: #include <petscdraw.h>
451: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
452: {
453:   PetscDraw         draw;
454:   PetscBool         isnull;
455:   PetscDrawLG       lg;
456:   PetscMPIInt       i,size,rank,n,N,*lens = NULL,*disp = NULL;
457:   PetscReal         *values, *xx = NULL,*yy = NULL;
458:   const PetscScalar *xarray;
459:   int               colors[] = {PETSC_DRAW_RED};
460:   PetscErrorCode    ierr;

463:   PetscViewerDrawGetDraw(viewer,0,&draw);
464:   PetscDrawIsNull(draw,&isnull);
465:   if (isnull) return(0);
466:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
467:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
468:   PetscMPIIntCast(xin->map->n,&n);
469:   PetscMPIIntCast(xin->map->N,&N);

471:   VecGetArrayRead(xin,&xarray);
472: #if defined(PETSC_USE_COMPLEX)
473:   PetscMalloc1(n+1,&values);
474:   for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
475: #else
476:   values = (PetscReal*)xarray;
477: #endif
478:   if (!rank) {
479:     PetscMalloc2(N,&xx,N,&yy);
480:     for (i=0; i<N; i++) xx[i] = (PetscReal)i;
481:     PetscMalloc2(size,&lens,size,&disp);
482:     for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
483:     for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
484:   }
485:   MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
486:   PetscFree2(lens,disp);
487: #if defined(PETSC_USE_COMPLEX)
488:   PetscFree(values);
489: #endif
490:   VecRestoreArrayRead(xin,&xarray);

492:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
493:   PetscDrawLGReset(lg);
494:   PetscDrawLGSetDimension(lg,1);
495:   PetscDrawLGSetColors(lg,colors);
496:   if (!rank) {
497:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
498:     PetscFree2(xx,yy);
499:   }
500:   PetscDrawLGDraw(lg);
501:   PetscDrawLGSave(lg);
502:   return(0);
503: }

507: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
508: {
509:   PetscErrorCode    ierr;
510:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
511:   PetscInt          i,start,end;
512:   MPI_Status        status;
513:   PetscReal         min,max,tmp = 0.0;
514:   PetscDraw         draw;
515:   PetscBool         isnull;
516:   PetscDrawAxis     axis;
517:   const PetscScalar *xarray;

520:   PetscViewerDrawGetDraw(viewer,0,&draw);
521:   PetscDrawIsNull(draw,&isnull);
522:   if (isnull) return(0);
523:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
524:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);

526:   VecMin(xin,NULL,&min);
527:   VecMax(xin,NULL,&max);
528:   if (min == max) {
529:     min -= 1.e-5;
530:     max += 1.e-5;
531:   }

533:   PetscDrawCheckResizedWindow(draw);
534:   PetscDrawClear(draw);

536:   PetscDrawAxisCreate(draw,&axis);
537:   PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
538:   PetscDrawAxisDraw(axis);
539:   PetscDrawAxisDestroy(&axis);

541:   /* draw local part of vector */
542:   VecGetArrayRead(xin,&xarray);
543:   VecGetOwnershipRange(xin,&start,&end);
544:   if (rank < size-1) { /* send value to right */
545:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
546:   }
547:   if (rank) { /* receive value from right */
548:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
549:   }
550:   PetscDrawCollectiveBegin(draw);
551:   if (rank) {
552:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
553:   }
554:   for (i=1; i<xin->map->n; i++) {
555:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
556:   }
557:   PetscDrawCollectiveEnd(draw);
558:   VecRestoreArrayRead(xin,&xarray);

560:   PetscDrawFlush(draw);
561:   PetscDrawPause(draw);
562:   PetscDrawSave(draw);
563:   return(0);
564: }

566: #if defined(PETSC_HAVE_MATLAB_ENGINE)
569: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
570: {
571:   PetscErrorCode    ierr;
572:   PetscMPIInt       rank,size,*lens;
573:   PetscInt          i,N = xin->map->N;
574:   const PetscScalar *xarray;
575:   PetscScalar       *xx;

578:   VecGetArrayRead(xin,&xarray);
579:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
580:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
581:   if (!rank) {
582:     PetscMalloc1(N,&xx);
583:     PetscMalloc1(size,&lens);
584:     for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];

586:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
587:     PetscFree(lens);

589:     PetscObjectName((PetscObject)xin);
590:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

592:     PetscFree(xx);
593:   } else {
594:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
595:   }
596:   VecRestoreArrayRead(xin,&xarray);
597:   return(0);
598: }
599: #endif

601: #if defined(PETSC_HAVE_HDF5)
604: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
605: {
606:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
607:   hid_t             filespace; /* file dataspace identifier */
608:   hid_t             chunkspace; /* chunk dataset property identifier */
609:   hid_t             plist_id;  /* property list identifier */
610:   hid_t             dset_id;   /* dataset identifier */
611:   hid_t             memspace;  /* memory dataspace identifier */
612:   hid_t             file_id;
613:   hid_t             group;
614:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
615:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
616:   PetscInt          bs = PetscAbs(xin->map->bs);
617:   hsize_t           dim;
618:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
619:   PetscInt          timestep;
620:   PetscInt          low;
621:   const PetscScalar *x;
622:   const char        *vecname;
623:   PetscErrorCode    ierr;
624:   PetscBool         dim2;
625:   PetscBool         spoutput;

628:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
629:   PetscViewerHDF5GetTimestep(viewer, &timestep);
630:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
631:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

633:   /* Create the dataspace for the dataset.
634:    *
635:    * dims - holds the current dimensions of the dataset
636:    *
637:    * maxDims - holds the maximum dimensions of the dataset (unlimited
638:    * for the number of time steps with the current dimensions for the
639:    * other dimensions; so only additional time steps can be added).
640:    *
641:    * chunkDims - holds the size of a single time step (required to
642:    * permit extending dataset).
643:    */
644:   dim = 0;
645:   if (timestep >= 0) {
646:     dims[dim]      = timestep+1;
647:     maxDims[dim]   = H5S_UNLIMITED;
648:     chunkDims[dim] = 1;
649:     ++dim;
650:   }
651:   PetscHDF5IntCast(xin->map->N/bs,dims + dim);

653:   maxDims[dim]   = dims[dim];
654:   chunkDims[dim] = dims[dim];
655:   ++dim;
656:   if (bs > 1 || dim2) {
657:     dims[dim]      = bs;
658:     maxDims[dim]   = dims[dim];
659:     chunkDims[dim] = dims[dim];
660:     ++dim;
661:   }
662: #if defined(PETSC_USE_COMPLEX)
663:   dims[dim]      = 2;
664:   maxDims[dim]   = dims[dim];
665:   chunkDims[dim] = dims[dim];
666:   ++dim;
667: #endif
668:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

670: #if defined(PETSC_USE_REAL_SINGLE)
671:   memscalartype = H5T_NATIVE_FLOAT;
672:   filescalartype = H5T_NATIVE_FLOAT;
673: #elif defined(PETSC_USE_REAL___FLOAT128)
674: #error "HDF5 output with 128 bit floats not supported."
675: #else
676:   memscalartype = H5T_NATIVE_DOUBLE;
677:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
678:   else filescalartype = H5T_NATIVE_DOUBLE;
679: #endif

681:   /* Create the dataset with default properties and close filespace */
682:   PetscObjectGetName((PetscObject) xin, &vecname);
683:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
684:     /* Create chunk */
685:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
686:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

688: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
689:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
690: #else
691:     PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
692: #endif
693:     PetscStackCallHDF5(H5Pclose,(chunkspace));
694:   } else {
695:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
696:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
697:   }
698:   PetscStackCallHDF5(H5Sclose,(filespace));

700:   /* Each process defines a dataset and writes it to the hyperslab in the file */
701:   dim = 0;
702:   if (timestep >= 0) {
703:     count[dim] = 1;
704:     ++dim;
705:   }
706:   PetscHDF5IntCast(xin->map->n/bs,count + dim);
707:   ++dim;
708:   if (bs > 1 || dim2) {
709:     count[dim] = bs;
710:     ++dim;
711:   }
712: #if defined(PETSC_USE_COMPLEX)
713:   count[dim] = 2;
714:   ++dim;
715: #endif
716:   if (xin->map->n > 0) {
717:     PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
718:   } else {
719:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
720:     PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
721:   }

723:   /* Select hyperslab in the file */
724:   VecGetOwnershipRange(xin, &low, NULL);
725:   dim  = 0;
726:   if (timestep >= 0) {
727:     offset[dim] = timestep;
728:     ++dim;
729:   }
730:   PetscHDF5IntCast(low/bs,offset + dim);
731:   ++dim;
732:   if (bs > 1 || dim2) {
733:     offset[dim] = 0;
734:     ++dim;
735:   }
736: #if defined(PETSC_USE_COMPLEX)
737:   offset[dim] = 0;
738:   ++dim;
739: #endif
740:   if (xin->map->n > 0) {
741:     PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
742:     PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
743:   } else {
744:     /* Create null filespace to match null memspace. */
745:     PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
746:   }

748:   /* Create property list for collective dataset write */
749:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
750: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
751:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
752: #endif
753:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

755:   VecGetArrayRead(xin, &x);
756:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
757:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
758:   VecRestoreArrayRead(xin, &x);

760:   /* Close/release resources */
761:   if (group != file_id) {
762:     PetscStackCallHDF5(H5Gclose,(group));
763:   }
764:   PetscStackCallHDF5(H5Pclose,(plist_id));
765:   PetscStackCallHDF5(H5Sclose,(filespace));
766:   PetscStackCallHDF5(H5Sclose,(memspace));
767:   PetscStackCallHDF5(H5Dclose,(dset_id));
768:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
769:   return(0);
770: }
771: #endif

775: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
776: {
778:   PetscBool      iascii,isbinary,isdraw;
779: #if defined(PETSC_HAVE_MATHEMATICA)
780:   PetscBool      ismathematica;
781: #endif
782: #if defined(PETSC_HAVE_HDF5)
783:   PetscBool      ishdf5;
784: #endif
785: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
786:   PetscBool      ismatlab;
787: #endif

790:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
791:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
792:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
793: #if defined(PETSC_HAVE_MATHEMATICA)
794:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
795: #endif
796: #if defined(PETSC_HAVE_HDF5)
797:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
798: #endif
799: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
800:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
801: #endif
802:   if (iascii) {
803:     VecView_MPI_ASCII(xin,viewer);
804:   } else if (isbinary) {
805:     VecView_MPI_Binary(xin,viewer);
806:   } else if (isdraw) {
807:     PetscViewerFormat format;
808:     PetscViewerGetFormat(viewer,&format);
809:     if (format == PETSC_VIEWER_DRAW_LG) {
810:       VecView_MPI_Draw_LG(xin,viewer);
811:     } else {
812:       VecView_MPI_Draw(xin,viewer);
813:     }
814: #if defined(PETSC_HAVE_MATHEMATICA)
815:   } else if (ismathematica) {
816:     PetscViewerMathematicaPutVector(viewer,xin);
817: #endif
818: #if defined(PETSC_HAVE_HDF5)
819:   } else if (ishdf5) {
820:     VecView_MPI_HDF5(xin,viewer);
821: #endif
822: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
823:   } else if (ismatlab) {
824:     VecView_MPI_Matlab(xin,viewer);
825: #endif
826:   }
827:   return(0);
828: }

832: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
833: {
835:   *N = xin->map->N;
836:   return(0);
837: }

841: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
842: {
843:   const PetscScalar *xx;
844:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
845:   PetscErrorCode    ierr;

848:   VecGetArrayRead(xin,&xx);
849:   for (i=0; i<ni; i++) {
850:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
851:     tmp = ix[i] - start;
852: #if defined(PETSC_USE_DEBUG)
853:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
854: #endif
855:     y[i] = xx[tmp];
856:   }
857:   VecRestoreArrayRead(xin,&xx);
858:   return(0);
859: }

863: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
864: {
866:   PetscMPIInt    rank    = xin->stash.rank;
867:   PetscInt       *owners = xin->map->range,start = owners[rank];
868:   PetscInt       end     = owners[rank+1],i,row;
869:   PetscScalar    *xx;

872: #if defined(PETSC_USE_DEBUG)
873:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
874:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
875: #endif
876:   VecGetArray(xin,&xx);
877:   xin->stash.insertmode = addv;

879:   if (addv == INSERT_VALUES) {
880:     for (i=0; i<ni; i++) {
881:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
882: #if defined(PETSC_USE_DEBUG)
883:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
884: #endif
885:       if ((row = ix[i]) >= start && row < end) {
886:         xx[row-start] = y[i];
887:       } else if (!xin->stash.donotstash) {
888: #if defined(PETSC_USE_DEBUG)
889:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
890: #endif
891:         VecStashValue_Private(&xin->stash,row,y[i]);
892:       }
893:     }
894:   } else {
895:     for (i=0; i<ni; i++) {
896:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
897: #if defined(PETSC_USE_DEBUG)
898:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
899: #endif
900:       if ((row = ix[i]) >= start && row < end) {
901:         xx[row-start] += y[i];
902:       } else if (!xin->stash.donotstash) {
903: #if defined(PETSC_USE_DEBUG)
904:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
905: #endif
906:         VecStashValue_Private(&xin->stash,row,y[i]);
907:       }
908:     }
909:   }
910:   VecRestoreArray(xin,&xx);
911:   return(0);
912: }

916: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
917: {
918:   PetscMPIInt    rank    = xin->stash.rank;
919:   PetscInt       *owners = xin->map->range,start = owners[rank];
921:   PetscInt       end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
922:   PetscScalar    *xx,*y = (PetscScalar*)yin;

925:   VecGetArray(xin,&xx);
926: #if defined(PETSC_USE_DEBUG)
927:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
928:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
929: #endif
930:   xin->stash.insertmode = addv;

932:   if (addv == INSERT_VALUES) {
933:     for (i=0; i<ni; i++) {
934:       if ((row = bs*ix[i]) >= start && row < end) {
935:         for (j=0; j<bs; j++) xx[row-start+j] = y[j];
936:       } else if (!xin->stash.donotstash) {
937:         if (ix[i] < 0) continue;
938: #if defined(PETSC_USE_DEBUG)
939:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
940: #endif
941:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
942:       }
943:       y += bs;
944:     }
945:   } else {
946:     for (i=0; i<ni; i++) {
947:       if ((row = bs*ix[i]) >= start && row < end) {
948:         for (j=0; j<bs; j++) xx[row-start+j] += y[j];
949:       } else if (!xin->stash.donotstash) {
950:         if (ix[i] < 0) continue;
951: #if defined(PETSC_USE_DEBUG)
952:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
953: #endif
954:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
955:       }
956:       y += bs;
957:     }
958:   }
959:   VecRestoreArray(xin,&xx);
960:   return(0);
961: }

963: /*
964:    Since nsends or nreceives may be zero we add 1 in certain mallocs
965: to make sure we never malloc an empty one.
966: */
969: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
970: {
972:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
973:   PetscMPIInt    size;
974:   InsertMode     addv;
975:   MPI_Comm       comm;

978:   PetscObjectGetComm((PetscObject)xin,&comm);
979:   if (xin->stash.donotstash) return(0);

981:   MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
982:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
983:   xin->stash.insertmode = addv; /* in case this processor had no cache */
984:   xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */

986:   VecGetBlockSize(xin,&bs);
987:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
988:   if (!xin->bstash.bowners && xin->map->bs != -1) {
989:     PetscMalloc1(size+1,&bowners);
990:     for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
991:     xin->bstash.bowners = bowners;
992:   } else bowners = xin->bstash.bowners;

994:   VecStashScatterBegin_Private(&xin->stash,owners);
995:   VecStashScatterBegin_Private(&xin->bstash,bowners);
996:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
997:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
998:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
999:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1000:   return(0);
1001: }

1005: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1006: {
1008:   PetscInt       base,i,j,*row,flg,bs;
1009:   PetscMPIInt    n;
1010:   PetscScalar    *val,*vv,*array,*xarray;

1013:   if (!vec->stash.donotstash) {
1014:     VecGetArray(vec,&xarray);
1015:     VecGetBlockSize(vec,&bs);
1016:     base = vec->map->range[vec->stash.rank];

1018:     /* Process the stash */
1019:     while (1) {
1020:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1021:       if (!flg) break;
1022:       if (vec->stash.insertmode == ADD_VALUES) {
1023:         for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1024:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1025:         for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1026:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1027:     }
1028:     VecStashScatterEnd_Private(&vec->stash);

1030:     /* now process the block-stash */
1031:     while (1) {
1032:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1033:       if (!flg) break;
1034:       for (i=0; i<n; i++) {
1035:         array = xarray+row[i]*bs-base;
1036:         vv    = val+i*bs;
1037:         if (vec->stash.insertmode == ADD_VALUES) {
1038:           for (j=0; j<bs; j++) array[j] += vv[j];
1039:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1040:           for (j=0; j<bs; j++) array[j] = vv[j];
1041:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1042:       }
1043:     }
1044:     VecStashScatterEnd_Private(&vec->bstash);
1045:     VecRestoreArray(vec,&xarray);
1046:   }
1047:   vec->stash.insertmode = NOT_SET_VALUES;
1048:   return(0);
1049: }