Actual source code: pod.c
petsc-3.8.3 2017-12-09
1: #include <petsc/private/kspimpl.h>
2: #include <petsc/private/matimpl.h>
3: #include <petscblaslapack.h>
4: static PetscBool cited = PETSC_FALSE;
5: static const char citation[] =
6: "@phdthesis{zampini2010non,\n"
7: " title={Non-overlapping Domain Decomposition Methods for Cardiac Reaction-Diffusion Models and Applications},\n"
8: " author={Zampini, S},\n"
9: " year={2010},\n"
10: " school={PhD thesis, Universita degli Studi di Milano}\n"
11: "}\n";
13: typedef struct {
14: PetscInt maxn; /* maximum number of snapshots */
15: PetscInt n; /* number of active snapshots */
16: PetscInt curr; /* current tip of snapshots set */
17: Vec *xsnap; /* snapshots */
18: Vec *bsnap; /* rhs snapshots */
19: PetscScalar *dots_iallreduce;
20: MPI_Request req_iallreduce;
21: PetscInt ndots_iallreduce; /* if we have iallreduce we can hide the VecMDot communications */
22: PetscReal tol; /* relative tolerance to retain eigenvalues */
23: PetscBool Aspd; /* if true, uses the SPD operator as inner product */
24: PetscScalar *corr; /* correlation matrix */
25: PetscReal *eigs; /* eigenvalues */
26: PetscScalar *eigv; /* eigenvectors */
27: PetscBLASInt nen; /* dimension of lower dimensional system */
28: PetscInt st; /* first eigenvector of correlation matrix to be retained */
29: PetscBLASInt *iwork; /* integer work vector */
30: PetscScalar *yhay; /* Y^H * A * Y */
31: PetscScalar *low; /* lower dimensional linear system */
32: #if defined(PETSC_USE_COMPLEX)
33: PetscReal *rwork;
34: #endif
35: PetscBLASInt lwork;
36: PetscScalar *swork;
37: PetscBool monitor;
38: } KSPGuessPOD;
40: static PetscErrorCode KSPGuessReset_POD(KSPGuess guess)
41: {
42: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
44: PetscLayout Alay = NULL,vlay = NULL;
45: PetscBool cong;
48: pod->n = 0;
49: pod->curr = 0;
50: /* need to wait for completion of outstanding requests */
51: if (pod->ndots_iallreduce) {
52: MPI_Wait(&pod->req_iallreduce,MPI_STATUS_IGNORE);
53: }
54: pod->ndots_iallreduce = 0;
55: /* destroy vectors if the size of the linear system has changed */
56: if (guess->A) {
57: MatGetLayouts(guess->A,&Alay,NULL);
58: }
59: if (pod->xsnap) {
60: VecGetLayout(pod->xsnap[0],&vlay);
61: }
62: cong = PETSC_FALSE;
63: if (vlay && Alay) {
64: PetscLayoutCompare(Alay,vlay,&cong);
65: }
66: if (!cong) {
67: VecDestroyVecs(pod->maxn,&pod->xsnap);
68: VecDestroyVecs(pod->maxn,&pod->bsnap);
69: }
70: return(0);
71: }
73: static PetscErrorCode KSPGuessSetUp_POD(KSPGuess guess)
74: {
75: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
79: if (!pod->corr) {
80: PetscScalar sdummy;
81: PetscReal rdummy = 0;
82: PetscBLASInt bN,lierr,idummy;
84: PetscCalloc6(pod->maxn*pod->maxn,&pod->corr,pod->maxn,&pod->eigs,pod->maxn*pod->maxn,&pod->eigv,
85: 6*pod->maxn,&pod->iwork,pod->maxn*pod->maxn,&pod->yhay,pod->maxn*pod->maxn,&pod->low);
86: #if defined(PETSC_USE_COMPLEX)
87: PetscMalloc1(7*pod->maxn,&pod->rwork);
88: #endif
89: #if defined(PETSC_HAVE_MPI_IALLREDUCE)
90: PetscMalloc1(3*pod->maxn,&pod->dots_iallreduce);
91: #endif
92: pod->lwork = -1;
93: PetscBLASIntCast(pod->maxn,&bN);
94: #if !defined(PETSC_USE_COMPLEX)
95: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->corr,&bN,&rdummy,&rdummy,&idummy,&idummy,
96: &rdummy,&idummy,pod->eigs,pod->eigv,&bN,&sdummy,&pod->lwork,pod->iwork,pod->iwork+5*bN,&lierr));
97: #else
98: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->corr,&bN,&rdummy,&rdummy,&idummy,&idummy,
99: &rdummy,&idummy,pod->eigs,pod->eigv,&bN,&sdummy,&pod->lwork,pod->rwork,pod->iwork,pod->iwork+5*bN,&lierr));
100: #endif
101: if (lierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to SYEV Lapack routine %d",(int)lierr);
102: PetscBLASIntCast((PetscInt)PetscRealPart(sdummy),&pod->lwork);
103: PetscMalloc1(pod->lwork+PetscMax(bN*bN,6*bN),&pod->swork);
104: }
105: /* work vectors are sequential, we explicitly use MPI_Allreduce */
106: if (!pod->xsnap) {
107: VecType type;
108: Vec *v,vseq;
109: PetscInt n;
111: KSPCreateVecs(guess->ksp,1,&v,0,NULL);
112: VecCreate(PETSC_COMM_SELF,&vseq);
113: VecGetLocalSize(v[0],&n);
114: VecSetSizes(vseq,n,n);
115: VecGetType(v[0],&type);
116: VecSetType(vseq,type);
117: VecDestroyVecs(1,&v);
118: VecDuplicateVecs(vseq,pod->maxn,&pod->xsnap);
119: VecDestroy(&vseq);
120: PetscLogObjectParents(guess,pod->maxn,pod->xsnap);
121: }
122: if (!pod->bsnap) {
123: VecDuplicateVecs(pod->xsnap[0],pod->maxn,&pod->bsnap);
124: PetscLogObjectParents(guess,pod->maxn,pod->bsnap);
125: }
126: return(0);
127: }
129: static PetscErrorCode KSPGuessDestroy_POD(KSPGuess guess)
130: {
131: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
132: PetscErrorCode ierr;
135: PetscFree6(pod->corr,pod->eigs,pod->eigv,pod->iwork,
136: pod->yhay,pod->low);
137: #if defined(PETSC_USE_COMPLEX)
138: PetscFree(pod->rwork);
139: #endif
140: /* need to wait for completion before destroying dots_iallreduce */
141: if (pod->ndots_iallreduce) {
142: MPI_Wait(&pod->req_iallreduce,MPI_STATUS_IGNORE);
143: }
144: PetscFree(pod->dots_iallreduce);
145: PetscFree(pod->swork);
146: VecDestroyVecs(pod->maxn,&pod->bsnap);
147: VecDestroyVecs(pod->maxn,&pod->xsnap);
148: PetscFree(pod);
149: return(0);
150: }
152: static PetscErrorCode KSPGuessUpdate_POD(KSPGuess,Vec,Vec);
154: static PetscErrorCode KSPGuessFormGuess_POD(KSPGuess guess,Vec b,Vec x)
155: {
156: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
158: PetscScalar one = 1, zero = 0, *array;
159: PetscBLASInt bN,ione = 1,bNen,lierr;
160: PetscInt i;
163: PetscCitationsRegister(citation,&cited);
164: if (pod->ndots_iallreduce) { /* complete communication and project the linear system */
165: KSPGuessUpdate_POD(guess,NULL,NULL);
166: }
167: if (!pod->nen) return(0);
168: /* b_low = S * V^T * X^T * b */
169: VecGetArrayRead(b,(const PetscScalar**)&array);
170: VecPlaceArray(pod->bsnap[pod->curr],array);
171: VecRestoreArrayRead(b,(const PetscScalar**)&array);
172: VecMDot(pod->bsnap[pod->curr],pod->n,pod->xsnap,pod->swork);
173: VecResetArray(pod->bsnap[pod->curr]);
174: MPIU_Allreduce(pod->swork,pod->swork + pod->n,pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess));
175: PetscBLASIntCast(pod->n,&bN);
176: PetscBLASIntCast(pod->nen,&bNen);
177: PetscStackCallBLAS("BLASgemv",BLASgemv_("T",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->swork+pod->n,&ione,&zero,pod->swork,&ione));
178: if (pod->monitor) {
179: PetscPrintf(PetscObjectComm((PetscObject)guess)," KSPGuessPOD alphas = ");
180: for (i=0; i<pod->nen; i++) {
181: #if defined(PETSC_USE_COMPLEX)
182: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g + %g i",(double)PetscRealPart(pod->swork[i]),(double)PetscImaginaryPart(pod->swork[i]));
183: #else
184: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g ",(double)pod->swork[i]);
185: #endif
186: }
187: PetscPrintf(PetscObjectComm((PetscObject)guess),"\n");
188: }
189: /* A_low x_low = b_low */
190: if (!pod->Aspd) { /* A is spd -> LOW = Identity */
191: PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&bNen,&bNen,pod->low,&bNen,pod->iwork,&lierr));
192: if (lierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)lierr);
193: PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_("N",&bNen,&ione,pod->low,&bNen,pod->iwork,pod->swork,&bNen,&lierr));
194: if (lierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRS Lapack routine %d",(int)lierr);
195: }
196: /* x = X * V * S * x_low */
197: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->swork,&ione,&zero,pod->swork+pod->n,&ione));
198: if (pod->monitor) {
199: PetscPrintf(PetscObjectComm((PetscObject)guess)," KSPGuessPOD sol = ");
200: for (i=0; i<pod->nen; i++) {
201: #if defined(PETSC_USE_COMPLEX)
202: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g + %g i",(double)PetscRealPart(pod->swork[i+pod->n]),(double)PetscImaginaryPart(pod->swork[i+pod->n]));
203: #else
204: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g ",(double)pod->swork[i+pod->n]);
205: #endif
206: }
207: PetscPrintf(PetscObjectComm((PetscObject)guess),"\n");
208: }
209: VecGetArray(x,&array);
210: VecPlaceArray(pod->bsnap[pod->curr],array);
211: VecRestoreArray(x,&array);
212: VecSet(pod->bsnap[pod->curr],0);
213: VecMAXPY(pod->bsnap[pod->curr],pod->n,pod->swork+pod->n,pod->xsnap);
214: VecResetArray(pod->bsnap[pod->curr]);
215: PetscObjectStateIncrease((PetscObject)x);
216: return(0);
217: }
219: static PetscErrorCode KSPGuessUpdate_POD(KSPGuess guess, Vec b, Vec x)
220: {
221: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
222: PetscScalar one = 1, zero = 0,*array;
223: PetscReal toten, parten, reps = 0; /* dlamch? */
224: PetscBLASInt bN,lierr,idummy;
225: PetscInt i;
229: if (pod->ndots_iallreduce) goto complete_request;
230: pod->n = pod->n < pod->maxn ? pod->n+1 : pod->maxn;
231: VecCopy(x,pod->xsnap[pod->curr]);
232: VecGetArray(pod->bsnap[pod->curr],&array);
233: VecPlaceArray(b,array);
234: VecRestoreArray(pod->bsnap[pod->curr],&array);
235: KSP_MatMult(guess->ksp,guess->A,x,b);
236: VecResetArray(b);
237: PetscObjectStateIncrease((PetscObject)pod->bsnap[pod->curr]);
238: if (pod->Aspd) {
239: VecMDot(pod->xsnap[pod->curr],pod->n,pod->bsnap,pod->swork);
240: #if !defined(PETSC_HAVE_MPI_IALLREDUCE)
241: MPIU_Allreduce(pod->swork,pod->swork + 3*pod->n,pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess));
242: #else
243: MPI_Iallreduce(pod->swork,pod->dots_iallreduce,pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess),&pod->req_iallreduce);
244: pod->ndots_iallreduce = 1;
245: #endif
246: } else {
247: PetscBool herm;
249: /* TODO: we may want to use a user-defined dot for the correlation matrix */
250: VecMDot(pod->xsnap[pod->curr],pod->n,pod->xsnap,pod->swork);
251: VecMDot(pod->bsnap[pod->curr],pod->n,pod->xsnap,pod->swork + pod->n);
252: #if defined(PETSC_USE_COMPLEX)
253: MatGetOption(guess->A,MAT_HERMITIAN,&herm);
254: #else
255: MatGetOption(guess->A,MAT_SYMMETRIC,&herm);
256: #endif
257: if (!herm) {
258: VecMDot(pod->xsnap[pod->curr],pod->n,pod->bsnap,pod->swork + 2*pod->n);
259: #if !defined(PETSC_HAVE_MPI_IALLREDUCE)
260: MPIU_Allreduce(pod->swork,pod->swork + 3*pod->n,3*pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess));
261: #else
262: MPI_Iallreduce(pod->swork,pod->dots_iallreduce,3*pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess),&pod->req_iallreduce);
263: pod->ndots_iallreduce = 3;
264: #endif
265: } else {
266: #if !defined(PETSC_HAVE_MPI_IALLREDUCE)
267: MPIU_Allreduce(pod->swork,pod->swork + 3*pod->n,2*pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess));
268: for (i=0;i<pod->n;i++) pod->swork[5*pod->n + i] = pod->swork[4*pod->n + i];
269: #else
270: MPI_Iallreduce(pod->swork,pod->dots_iallreduce,2*pod->n,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess),&pod->req_iallreduce);
271: pod->ndots_iallreduce = 2;
272: #endif
273: }
274: }
275: if (pod->ndots_iallreduce) return(0);
277: complete_request:
278: if (pod->ndots_iallreduce) {
279: MPI_Wait(&pod->req_iallreduce,MPI_STATUS_IGNORE);
280: switch (pod->ndots_iallreduce) {
281: case 3:
282: for (i=0;i<pod->n;i++) pod->swork[3*pod->n + i] = pod->dots_iallreduce[ i];
283: for (i=0;i<pod->n;i++) pod->swork[4*pod->n + i] = pod->dots_iallreduce[ pod->n+i];
284: for (i=0;i<pod->n;i++) pod->swork[5*pod->n + i] = pod->dots_iallreduce[2*pod->n+i];
285: break;
286: case 2:
287: for (i=0;i<pod->n;i++) pod->swork[3*pod->n + i] = pod->dots_iallreduce[ i];
288: for (i=0;i<pod->n;i++) pod->swork[4*pod->n + i] = pod->dots_iallreduce[pod->n+i];
289: for (i=0;i<pod->n;i++) pod->swork[5*pod->n + i] = pod->dots_iallreduce[pod->n+i];
290: break;
291: case 1:
292: for (i=0;i<pod->n;i++) pod->swork[3*pod->n + i] = pod->dots_iallreduce[i];
293: break;
294: default:
295: SETERRQ1(PetscObjectComm((PetscObject)guess),PETSC_ERR_PLIB,"Invalid number of outstanding dots operations: %D",pod->ndots_iallreduce);
296: break;
297: }
298: }
299: pod->ndots_iallreduce = 0;
301: /* correlation matrix and Y^H A Y (Galerkin) */
302: for (i=0;i<pod->n;i++) {
303: pod->corr[pod->curr*pod->maxn+i] = pod->swork[3*pod->n + i];
304: pod->corr[i*pod->maxn+pod->curr] = PetscConj(pod->swork[3*pod->n + i]);
305: if (!pod->Aspd) {
306: pod->yhay[pod->curr*pod->maxn+i] = pod->swork[4*pod->n + i];
307: pod->yhay[i*pod->maxn+pod->curr] = PetscConj(pod->swork[5*pod->n + i]);
308: }
309: }
310: /* syevx change the input matrix */
311: for (i=0;i<pod->n;i++) {
312: PetscInt j;
313: for (j=i;j<pod->n;j++) pod->swork[i*pod->n+j] = pod->corr[i*pod->maxn+j];
314: }
315: PetscBLASIntCast(pod->n,&bN);
316: #if !defined(PETSC_USE_COMPLEX)
317: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->swork,&bN,
318: &reps,&reps,&idummy,&idummy,
319: &reps,&idummy,pod->eigs,pod->eigv,&bN,
320: pod->swork+bN*bN,&pod->lwork,pod->iwork,pod->iwork+5*bN,&lierr));
321: #else
322: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->swork,&bN,
323: &reps,&reps,&idummy,&idummy,
324: &reps,&idummy,pod->eigs,pod->eigv,&bN,
325: pod->swork+bN*bN,&pod->lwork,pod->rwork,pod->iwork,pod->iwork+5*bN,&lierr));
326: #endif
327: if (lierr<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYEV Lapack routine: illegal argument %d",-(int)lierr);
328: else if (lierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in SYEV Lapack routine: %d eigenvectors failed to converge",(int)lierr);
330: /* dimension of lower dimensional system */
331: for (i=0,toten=0;i<pod->n;i++) {
332: pod->eigs[i] = PetscAbsReal(pod->eigs[i]);
333: toten += pod->eigs[i];
334: }
335: pod->nen = 0;
336: for (i=pod->n-1,parten=0;i>=0 && toten > 0;i--) {
337: pod->nen++;
338: parten += pod->eigs[i];
339: if (parten/toten > 1.0 - pod->tol) break;
340: }
341: pod->st = pod->n - pod->nen;
343: /* Compute eigv = V * S */
344: for (i=pod->st;i<pod->n;i++) {
345: const PetscReal v = 1.0/PetscSqrtReal(pod->eigs[i]);
346: const PetscInt st = pod->n*i;
347: PetscInt j;
349: for (j=0;j<pod->n;j++) pod->eigv[st+j] *= v;
350: }
352: /* compute S * V^T * X^T * A * X * V * S if needed */
353: if (pod->nen && !pod->Aspd) {
354: PetscBLASInt bNen,bMaxN;
355: PetscInt st = pod->st*pod->n;
356: PetscBLASIntCast(pod->nen,&bNen);
357: PetscBLASIntCast(pod->maxn,&bMaxN);
358: PetscStackCallBLAS("BLASgemm",BLASgemm_("T","N",&bNen,&bN,&bN,&one,pod->eigv+st,&bN,pod->yhay,&bMaxN,&zero,pod->swork,&bNen));
359: PetscStackCallBLAS("BLASgemm",BLASgemm_("N","N",&bNen,&bNen,&bN,&one,pod->swork,&bNen,pod->eigv+st,&bN,&zero,pod->low,&bNen));
360: }
362: if (pod->monitor) {
363: PetscPrintf(PetscObjectComm((PetscObject)guess)," KSPGuessPOD: basis %D, energy fractions = ",pod->nen);
364: for (i=pod->n-1;i>=0;i--) {
365: PetscPrintf(PetscObjectComm((PetscObject)guess),"%1.6e (%d) ",pod->eigs[i]/toten,i >= pod->st ? 1 : 0);
366: }
367: PetscPrintf(PetscObjectComm((PetscObject)guess),"\n");
368: #if defined(PETSC_USE_DEBUG)
369: for (i=0;i<pod->n;i++) {
370: Vec v;
371: PetscInt j;
372: PetscBLASInt bNen,ione = 1;
374: VecDuplicate(pod->xsnap[i],&v);
375: VecCopy(pod->xsnap[i],v);
376: PetscBLASIntCast(pod->nen,&bNen);
377: PetscStackCallBLAS("BLASgemv",BLASgemv_("T",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->corr+pod->maxn*i,&ione,&zero,pod->swork,&ione));
378: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->swork,&ione,&zero,pod->swork+pod->n,&ione));
379: for (j=0;j<pod->n;j++) pod->swork[j] = -pod->swork[pod->n+j];
380: VecMAXPY(v,pod->n,pod->swork,pod->xsnap);
381: VecDot(v,v,pod->swork);
382: MPIU_Allreduce(pod->swork,pod->swork + 1,1,MPIU_SCALAR,MPI_SUM,PetscObjectComm((PetscObject)guess));
383: PetscPrintf(PetscObjectComm((PetscObject)guess)," Error projection %D: %g (expected lower than %g)\n",i,(double)PetscRealPart(pod->swork[1]),(double)(toten-parten));
384: VecDestroy(&v);
385: }
386: #endif
387: }
388: /* new tip */
389: pod->curr = (pod->curr+1)%pod->maxn;
390: return(0);
391: }
393: static PetscErrorCode KSPGuessSetFromOptions_POD(KSPGuess guess)
394: {
395: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
399: PetscOptionsBegin(PetscObjectComm((PetscObject)guess),((PetscObject)guess)->prefix,"POD initial guess options","KSPGuess");
400: PetscOptionsInt("-ksp_guess_pod_size","Number of snapshots",NULL,pod->maxn,&pod->maxn,NULL);
401: PetscOptionsBool("-ksp_guess_pod_monitor","Monitor initial guess generator",NULL,pod->monitor,&pod->monitor,NULL);
402: PetscOptionsReal("-ksp_guess_pod_tol","Tolerance to retain eigenvectors",NULL,pod->tol,&pod->tol,NULL);
403: PetscOptionsBool("-ksp_guess_pod_Ainner","Use the operator as inner product (must be SPD)",NULL,pod->Aspd,&pod->Aspd,NULL);
404: PetscOptionsEnd();
405: return(0);
406: }
408: static PetscErrorCode KSPGuessView_POD(KSPGuess guess,PetscViewer viewer)
409: {
410: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
411: PetscBool isascii;
415: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
416: if (isascii) {
417: PetscViewerASCIIPrintf(viewer,"Max size %D, tolerance %g, Ainner %d\n",pod->maxn,pod->tol,pod->Aspd);
418: }
419: return(0);
420: }
422: /*
423: KSPGUESSPOD - Implements a proper orthogonal decomposition based Galerkin scheme for repeated linear system solves.
425: The initial guess is obtained by solving a small and dense linear system, obtained by Galerkin projection on a lower dimensional space generated by the previous solutions.
426: The number of solutions to be retained and the energy tolerance to construct the lower dimensional basis can be specified at command line by -ksp_guess_pod_tol <real> and -ksp_guess_pod_size <int>.
428: References:
429: . 1. - http://www.math.uni-konstanz.de/numerik/personen/volkwein/teaching/POD-Book.pdf
431: Level: intermediate
433: .seealso: KSPGuess, KSPGuessType, KSPGuessCreate(), KSPSetGuess(), KSPGetGuess()
434: @*/
435: PetscErrorCode KSPGuessCreate_POD(KSPGuess guess)
436: {
437: KSPGuessPOD *pod;
441: PetscNewLog(guess,&pod);
442: pod->maxn = 10;
443: pod->tol = PETSC_MACHINE_EPSILON;
444: guess->data = pod;
446: guess->ops->setfromoptions = KSPGuessSetFromOptions_POD;
447: guess->ops->destroy = KSPGuessDestroy_POD;
448: guess->ops->setup = KSPGuessSetUp_POD;
449: guess->ops->view = KSPGuessView_POD;
450: guess->ops->reset = KSPGuessReset_POD;
451: guess->ops->update = KSPGuessUpdate_POD;
452: guess->ops->formguess = KSPGuessFormGuess_POD;
453: return(0);
454: }