1: /*
2: BV orthogonalization routines.
4: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
5: SLEPc - Scalable Library for Eigenvalue Problem Computations
6: Copyright (c) 2002-2016, Universitat Politecnica de Valencia, Spain
8: This file is part of SLEPc.
10: SLEPc is free software: you can redistribute it and/or modify it under the
11: terms of version 3 of the GNU Lesser General Public License as published by
12: the Free Software Foundation.
14: SLEPc is distributed in the hope that it will be useful, but WITHOUT ANY
15: WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
16: FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
17: more details.
19: You should have received a copy of the GNU Lesser General Public License
20: along with SLEPc. If not, see <http://www.gnu.org/licenses/>.
21: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
22: */
24: #include <slepc/private/bvimpl.h> /*I "slepcbv.h" I*/
25: #include <slepcblaslapack.h>
29: /*
30: BVOrthogonalizeMGS1 - Compute one step of Modified Gram-Schmidt
31: */
32: static PetscErrorCode BVOrthogonalizeMGS1(BV bv,PetscInt k,Vec v,PetscBool *which,PetscScalar *H) 33: {
35: PetscInt i;
36: PetscScalar dot;
37: Vec vi,z;
40: z = v;
41: for (i=-bv->nc;i<k;i++) {
42: if (which && i>=0 && !which[i]) continue;
43: BVGetColumn(bv,i,&vi);
44: /* h_i = ( v, v_i ) */
45: if (bv->matrix) {
46: BV_IPMatMult(bv,v);
47: z = bv->Bx;
48: }
49: VecDot(z,vi,&dot);
50: /* v <- v - h_i v_i */
51: if (bv->indef) dot /= bv->omega[bv->nc+i];
52: VecAXPY(v,-dot,vi);
53: if (bv->indef) dot *= bv->omega[bv->nc+i];
54: if (H) H[bv->nc+i] += dot;
55: BVRestoreColumn(bv,i,&vi);
56: }
57: return(0);
58: }
62: /*
63: BVOrthogonalizeCGS1 - Compute |v'| (estimated), |v| and one step of CGS with
64: only one global synchronization
65: */
66: PetscErrorCode BVOrthogonalizeCGS1(BV bv,PetscInt j,Vec v,PetscScalar *H,PetscReal *onorm,PetscReal *norm) 67: {
69: PetscInt i;
70: PetscReal sum,nrm,beta;
71: Vec w=v;
74: /* h = W^* v ; alpha = (v, v) */
75: bv->k = j;
76: if (onorm || norm) {
77: if (!v) {
78: bv->k++;
79: BVGetColumn(bv,j,&w);
80: }
81: BVDotVec(bv,w,H);
82: if (!v) {
83: BVRestoreColumn(bv,j,&w);
84: bv->k--;
85: BV_SafeSqrt(bv,H[bv->nc+j],&beta);
86: } else {
87: BVNormVec(bv,w,NORM_2,&beta);
88: }
89: } else {
90: if (!v) { BVDotColumn(bv,j,H); }
91: else { BVDotVec(bv,w,H); }
92: }
94: /* q = v - V h */
95: if (bv->indef) {
96: for (i=0;i<bv->nc+j;i++) H[i] /= bv->omega[i]; /* apply inverse of signature */
97: }
98: if (!v) { BVMultColumn(bv,-1.0,1.0,j,H); }
99: else { BVMultVec(bv,-1.0,1.0,w,H); }
100: if (bv->indef) {
101: for (i=0;i<bv->nc+j;i++) H[i] *= bv->omega[i]; /* revert signature */
102: }
104: /* compute |v| */
105: if (onorm) *onorm = beta;
107: if (bv->indef) {
108: if (!v) { BVNormColumn(bv,j,NORM_2,&nrm); }
109: else { BVNormVec(bv,w,NORM_2,&nrm); }
110: if (norm) *norm = nrm;
111: bv->omega[bv->nc+j] = (nrm<0.0)? -1.0: 1.0;
112: } else if (norm) {
113: /* estimate |v'| from |v| */
114: sum = 0.0;
115: for (i=0;i<bv->nc+j;i++) sum += PetscRealPart(H[i]*PetscConj(H[i]));
116: *norm = beta*beta-sum;
117: if (*norm <= 0.0) {
118: if (!v) { BVNormColumn(bv,j,NORM_2,norm); }
119: else { BVNormVec(bv,w,NORM_2,norm); }
120: } else *norm = PetscSqrtReal(*norm);
121: }
122: return(0);
123: }
127: /*
128: BVOrthogonalizeMGS - Orthogonalize with modified Gram-Schmidt
129: */
130: static PetscErrorCode BVOrthogonalizeMGS(BV bv,PetscInt j,Vec v,PetscBool *which,PetscScalar *H,PetscReal *norm,PetscBool *lindep)131: {
133: PetscReal onrm,nrm;
134: PetscInt k,l;
135: Vec w;
138: if (v) {
139: w = v;
140: k = bv->k;
141: } else {
142: BVGetColumn(bv,j,&w);
143: k = j;
144: }
145: PetscMemzero(bv->h,(bv->nc+k)*sizeof(PetscScalar));
146: switch (bv->orthog_ref) {
148: case BV_ORTHOG_REFINE_IFNEEDED:
149: /* first step */
150: BVNormVec(bv,w,NORM_2,&onrm);
151: BVOrthogonalizeMGS1(bv,k,w,which,bv->h);
152: BVNormVec(bv,w,NORM_2,&nrm);
153: /* ||q|| < eta ||h|| */
154: l = 1;
155: while (l<3 && nrm && nrm < bv->orthog_eta*onrm) {
156: l++;
157: onrm = nrm;
158: BVOrthogonalizeMGS1(bv,k,w,which,bv->c);
159: BVNormVec(bv,w,NORM_2,&nrm);
160: }
161: if (lindep) *lindep = PetscNot(nrm >= bv->orthog_eta*onrm);
162: break;
164: case BV_ORTHOG_REFINE_NEVER:
165: BVOrthogonalizeMGS1(bv,k,w,which,bv->h);
166: /* compute |v| */
167: if (norm || lindep) {
168: BVNormVec(bv,w,NORM_2,&nrm);
169: }
170: /* linear dependence check: just test for exactly zero norm */
171: if (lindep) *lindep = PetscNot(nrm);
172: break;
174: case BV_ORTHOG_REFINE_ALWAYS:
175: /* first step */
176: BVOrthogonalizeMGS1(bv,k,w,which,bv->h);
177: if (lindep) {
178: BVNormVec(bv,w,NORM_2,&onrm);
179: }
180: /* second step */
181: BVOrthogonalizeMGS1(bv,k,w,which,bv->h);
182: if (norm || lindep) {
183: BVNormVec(bv,w,NORM_2,&nrm);
184: }
185: if (lindep) *lindep = PetscNot(nrm && nrm >= bv->orthog_eta*onrm);
186: break;
187: }
188: if (bv->indef) {
189: BVNormVec(bv,w,NORM_2,&nrm);
190: bv->omega[bv->nc+j] = (nrm<0.0)? -1.0: 1.0;
191: }
192: if (!v) { BVRestoreColumn(bv,j,&w); }
193: if (norm) *norm = nrm;
194: return(0);
195: }
199: /*
200: BVOrthogonalizeCGS - Orthogonalize with classical Gram-Schmidt
201: */
202: static PetscErrorCode BVOrthogonalizeCGS(BV bv,PetscInt j,Vec v,PetscScalar *H,PetscReal *norm,PetscBool *lindep)203: {
205: PetscReal onrm,nrm;
206: PetscInt i,k,l;
209: if (v) k = bv->k;
210: else k = j;
211: switch (bv->orthog_ref) {
213: case BV_ORTHOG_REFINE_IFNEEDED:
214: BVOrthogonalizeCGS1(bv,k,v,bv->h,&onrm,&nrm);
215: /* ||q|| < eta ||h|| */
216: l = 1;
217: while (l<3 && nrm && nrm < bv->orthog_eta*onrm) {
218: l++;
219: BVOrthogonalizeCGS1(bv,k,v,bv->c,&onrm,&nrm);
220: for (i=0;i<bv->nc+k;i++) bv->h[i] += bv->c[i];
221: }
222: if (norm) *norm = nrm;
223: if (lindep) *lindep = PetscNot(nrm >= bv->orthog_eta*onrm);
224: break;
226: case BV_ORTHOG_REFINE_NEVER:
227: BVOrthogonalizeCGS1(bv,k,v,bv->h,NULL,NULL);
228: /* compute |v| */
229: if (norm || lindep) {
230: if (v) { BVNormVec(bv,v,NORM_2,&nrm); }
231: else { BVNormColumn(bv,k,NORM_2,&nrm); }
232: }
233: if (norm) *norm = nrm;
234: /* linear dependence check: just test for exactly zero norm */
235: if (lindep) *lindep = PetscNot(nrm);
236: break;
238: case BV_ORTHOG_REFINE_ALWAYS:
239: BVOrthogonalizeCGS1(bv,k,v,bv->h,NULL,NULL);
240: if (lindep) {
241: BVOrthogonalizeCGS1(bv,k,v,bv->c,&onrm,&nrm);
242: if (norm) *norm = nrm;
243: *lindep = PetscNot(nrm && nrm >= bv->orthog_eta*onrm);
244: } else {
245: BVOrthogonalizeCGS1(bv,k,v,bv->c,NULL,norm);
246: }
247: for (i=0;i<bv->nc+k;i++) bv->h[i] += bv->c[i];
248: break;
249: }
250: return(0);
251: }
255: /*@
256: BVOrthogonalizeVec - Orthogonalize a given vector with respect to all
257: active columns.
259: Collective on BV261: Input Parameters:
262: + bv - the basis vectors context
263: - v - the vector
265: Output Parameters:
266: + H - (optional) coefficients computed during orthogonalization
267: . norm - (optional) norm of the vector after being orthogonalized
268: - lindep - (optional) flag indicating that refinement did not improve the quality
269: of orthogonalization
271: Notes:
272: This function is equivalent to BVOrthogonalizeColumn() but orthogonalizes
273: a vector as an argument rather than taking one of the BV columns. The
274: vector is orthogonalized against all active columns.
276: Level: advanced
278: .seealso: BVOrthogonalizeColumn(), BVSetOrthogonalization(), BVSetActiveColumns()
279: @*/
280: PetscErrorCode BVOrthogonalizeVec(BV bv,Vec v,PetscScalar *H,PetscReal *norm,PetscBool *lindep)281: {
283: PetscInt i,ksave,lsave;
289: BVCheckSizes(bv,1);
293: PetscLogEventBegin(BV_OrthogonalizeVec,bv,0,0,0);
294: ksave = bv->k;
295: lsave = bv->l;
296: bv->l = -bv->nc; /* must also orthogonalize against constraints and leading columns */
297: BV_AllocateCoeffs(bv);
298: BV_AllocateSignature(bv);
299: switch (bv->orthog_type) {
300: case BV_ORTHOG_CGS:
301: BVOrthogonalizeCGS(bv,0,v,H,norm,lindep);
302: break;
303: case BV_ORTHOG_MGS:
304: BVOrthogonalizeMGS(bv,0,v,NULL,H,norm,lindep);
305: break;
306: }
307: bv->k = ksave;
308: bv->l = lsave;
309: if (H) for (i=bv->l;i<bv->k;i++) H[i-bv->l] = bv->h[bv->nc+i];
310: PetscLogEventEnd(BV_OrthogonalizeVec,bv,0,0,0);
311: return(0);
312: }
316: /*@
317: BVOrthogonalizeColumn - Orthogonalize one of the column vectors with respect to
318: the previous ones.
320: Collective on BV322: Input Parameters:
323: + bv - the basis vectors context
324: - j - index of column to be orthogonalized
326: Output Parameters:
327: + H - (optional) coefficients computed during orthogonalization
328: . norm - (optional) norm of the vector after being orthogonalized
329: - lindep - (optional) flag indicating that refinement did not improve the quality
330: of orthogonalization
332: Notes:
333: This function applies an orthogonal projector to project vector V[j] onto
334: the orthogonal complement of the span of the columns of V[0..j-1],
335: where V[.] are the vectors of BV. The columns V[0..j-1] are assumed to be
336: mutually orthonormal.
338: Leading columns V[0..l-1] also participate in the orthogonalization.
340: If a non-standard inner product has been specified with BVSetMatrix(),
341: then the vector is B-orthogonalized, using the non-standard inner product
342: defined by matrix B. The output vector satisfies V[j]'*B*V[0..j-1] = 0.
344: This routine does not normalize the resulting vector.
346: Level: advanced
348: .seealso: BVSetOrthogonalization(), BVSetMatrix(), BVSetActiveColumns(), BVOrthogonalize(), BVOrthogonalizeVec()
349: @*/
350: PetscErrorCode BVOrthogonalizeColumn(BV bv,PetscInt j,PetscScalar *H,PetscReal *norm,PetscBool *lindep)351: {
353: PetscInt i,ksave,lsave;
359: BVCheckSizes(bv,1);
360: if (j<0) SETERRQ(PetscObjectComm((PetscObject)bv),PETSC_ERR_ARG_OUTOFRANGE,"Index j must be non-negative");
361: if (j>=bv->m) SETERRQ2(PetscObjectComm((PetscObject)bv),PETSC_ERR_ARG_OUTOFRANGE,"Index j=%D but BV only has %D columns",j,bv->m);
363: PetscLogEventBegin(BV_OrthogonalizeVec,bv,0,0,0);
364: ksave = bv->k;
365: lsave = bv->l;
366: bv->l = -bv->nc; /* must also orthogonalize against constraints and leading columns */
367: BV_AllocateCoeffs(bv);
368: BV_AllocateSignature(bv);
369: switch (bv->orthog_type) {
370: case BV_ORTHOG_CGS:
371: BVOrthogonalizeCGS(bv,j,NULL,H,norm,lindep);
372: break;
373: case BV_ORTHOG_MGS:
374: BVOrthogonalizeMGS(bv,j,NULL,NULL,H,norm,lindep);
375: break;
376: }
377: bv->k = ksave;
378: bv->l = lsave;
379: if (H) for (i=bv->l;i<j;i++) H[i-bv->l] = bv->h[bv->nc+i];
380: PetscLogEventEnd(BV_OrthogonalizeVec,bv,0,0,0);
381: return(0);
382: }
386: /*@
387: BVOrthogonalizeSomeColumn - Orthogonalize one of the column vectors with
388: respect to some of the previous ones.
390: Collective on BV392: Input Parameters:
393: + bv - the basis vectors context
394: . j - index of column to be orthogonalized
395: - which - logical array indicating selected columns
397: Output Parameters:
398: + H - (optional) coefficients computed during orthogonalization
399: . norm - (optional) norm of the vector after being orthogonalized
400: - lindep - (optional) flag indicating that refinement did not improve the quality
401: of orthogonalization
403: Notes:
404: This function is similar to BVOrthogonalizeColumn(), but V[j] is
405: orthogonalized only against columns V[i] having which[i]=PETSC_TRUE.
406: The length of array which must be j at least.
408: The use of this operation is restricted to MGS orthogonalization type.
410: Level: advanced
412: .seealso: BVOrthogonalizeColumn(), BVSetOrthogonalization()
413: @*/
414: PetscErrorCode BVOrthogonalizeSomeColumn(BV bv,PetscInt j,PetscBool *which,PetscScalar *H,PetscReal *norm,PetscBool *lindep)415: {
417: PetscInt i,ksave,lsave;
424: BVCheckSizes(bv,1);
425: if (j<0) SETERRQ(PetscObjectComm((PetscObject)bv),PETSC_ERR_ARG_OUTOFRANGE,"Index j must be non-negative");
426: if (j>=bv->m) SETERRQ2(PetscObjectComm((PetscObject)bv),PETSC_ERR_ARG_OUTOFRANGE,"Index j=%D but BV only has %D columns",j,bv->m);
427: if (bv->orthog_type!=BV_ORTHOG_MGS) SETERRQ(PetscObjectComm((PetscObject)bv),PETSC_ERR_SUP,"Operation only available for MGS orthogonalization");
429: PetscLogEventBegin(BV_OrthogonalizeVec,bv,0,0,0);
430: ksave = bv->k;
431: lsave = bv->l;
432: bv->l = -bv->nc; /* must also orthogonalize against constraints and leading columns */
433: BV_AllocateCoeffs(bv);
434: BV_AllocateSignature(bv);
435: BVOrthogonalizeMGS(bv,j,NULL,which,H,norm,lindep);
436: bv->k = ksave;
437: bv->l = lsave;
438: if (H) for (i=bv->l;i<j;i++) H[i-bv->l] = bv->h[bv->nc+i];
439: PetscLogEventEnd(BV_OrthogonalizeVec,bv,0,0,0);
440: return(0);
441: }
445: /*
446: Orthogonalize a set of vectors with Gram-Schmidt, column by column.
447: */
448: static PetscErrorCode BVOrthogonalize_GS(BV V,Mat R)449: {
451: PetscScalar *r=NULL;
452: PetscReal norm;
453: PetscInt j,ldr;
454: Vec v;
457: if (R) {
458: MatGetSize(R,&ldr,NULL);
459: MatDenseGetArray(R,&r);
460: }
461: if (V->matrix) {
462: BV_AllocateCachedBV(V);
463: BVSetActiveColumns(V->cached,V->l,V->k);
464: }
465: for (j=V->l;j<V->k;j++) {
466: if (R) {
467: BVOrthogonalizeColumn(V,j,r+j*ldr+V->l,&norm,NULL);
468: r[j+j*ldr] = norm;
469: } else {
470: BVOrthogonalizeColumn(V,j,NULL,&norm,NULL);
471: }
472: if (!norm) SETERRQ(PETSC_COMM_SELF,1,"Breakdown in BVOrthogonalize due to a linearly dependent column");
473: if (V->matrix) { /* fill cached BV */
474: BVGetColumn(V->cached,j,&v);
475: VecCopy(V->Bx,v);
476: BVRestoreColumn(V->cached,j,&v);
477: }
478: BVScaleColumn(V,j,1.0/norm);
479: }
480: if (R) { MatDenseRestoreArray(R,&r); }
481: return(0);
482: }
486: /*
487: Compute the upper Cholesky factor in R and its inverse in S.
488: */
489: static PetscErrorCode MatCholeskyFactorInvert(Mat R,PetscInt l,Mat *S)490: {
491: #if defined(PETSC_MISSING_LAPACK_POTRF) || defined(SLEPC_MISSING_LAPACK_TRTRI)
493: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"POTRF/TRTRI - Lapack routine is unavailable");
494: #else
496: PetscInt i,n,m,ld;
497: PetscScalar *pR,*pS;
498: PetscBLASInt info,n_,l_,m_,ld_;
501: MatGetSize(R,&m,NULL);
502: n = m-l;
503: PetscBLASIntCast(m,&m_);
504: PetscBLASIntCast(l,&l_);
505: PetscBLASIntCast(n,&n_);
506: ld = m;
507: ld_ = m_;
508: MatCreateSeqDense(PETSC_COMM_SELF,ld,ld,NULL,S);
509: MatDenseGetArray(R,&pR);
510: MatDenseGetArray(*S,&pS);
512: /* save a copy of matrix in S */
513: for (i=l;i<m;i++) {
514: PetscMemcpy(pS+i*ld+l,pR+i*ld+l,n*sizeof(PetscScalar));
515: }
517: /* compute upper Cholesky factor in R */
518: PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_("U",&n_,pR+l*ld+l,&ld_,&info));
519: PetscLogFlops((1.0*n*n*n)/3.0);
521: if (info) { /* LAPACKpotrf failed, retry on diagonally perturbed matrix */
522: for (i=l;i<m;i++) {
523: PetscMemcpy(pR+i*ld+l,pS+i*ld+l,n*sizeof(PetscScalar));
524: pR[i+i*ld] += 50.0*PETSC_MACHINE_EPSILON;
525: }
526: PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_("U",&n_,pR+l*ld+l,&ld_,&info));
527: if (info) SETERRQ1(PETSC_COMM_SELF,1,"Error in Cholesky factorization, info=%D",(PetscInt)info);
528: PetscLogFlops((1.0*n*n*n)/3.0);
529: }
531: /* compute S = inv(R) */
532: PetscMemzero(pS,m*m*sizeof(PetscScalar));
533: for (i=l;i<m;i++) {
534: PetscMemcpy(pS+i*ld+l,pR+i*ld+l,n*sizeof(PetscScalar));
535: }
536: PetscStackCallBLAS("LAPACKtrtri",LAPACKtrtri_("U","N",&n_,pS+l*ld+l,&ld_,&info));
537: if (info) SETERRQ1(PETSC_COMM_SELF,1,"Error in xTRTRI, info=%D",(PetscInt)info);
538: PetscLogFlops(1.0*n*n*n);
540: /* Zero out entries below the diagonal */
541: for (i=l;i<m-1;i++) {
542: PetscMemzero(pR+i*ld+i+1,(m-i-1)*sizeof(PetscScalar));
543: PetscMemzero(pS+i*ld+i+1,(m-i-1)*sizeof(PetscScalar));
544: }
545: MatDenseRestoreArray(R,&pR);
546: MatDenseRestoreArray(*S,&pS);
547: return(0);
548: #endif
549: }
553: /*
554: Orthogonalize a set of vectors with Cholesky: R=chol(V'*V), Q=V*inv(R)
555: */
556: static PetscErrorCode BVOrthogonalize_Chol(BV V,Mat Rin)557: {
559: Mat S,R=Rin;
562: if (!Rin) {
563: MatCreateSeqDense(PETSC_COMM_SELF,V->k,V->k,NULL,&R);
564: }
565: BVDot(V,V,R);
566: MatCholeskyFactorInvert(R,V->l,&S);
567: BVMultInPlace(V,S,V->l,V->k);
568: MatDestroy(&S);
569: if (!Rin) {
570: MatDestroy(&R);
571: }
572: return(0);
573: }
577: /*@
578: BVOrthogonalize - Orthogonalize all columns (except leading ones), that is,
579: compute the QR decomposition.
581: Collective on BV583: Input Parameter:
584: . V - basis vectors
586: Output Parameters:
587: + V - the modified basis vectors
588: - R - a sequential dense matrix (or NULL)
590: Notes:
591: On input, matrix R must be a sequential dense Mat, with at least as many rows
592: and columns as the number of active columns of V. The output satisfies
593: V0 = V*R (where V0 represent the input V) and V'*V = I.
595: If V has leading columns, then they are not modified (are assumed to be already
596: orthonormal) and the corresponding part of R is not referenced.
598: Can pass NULL if R is not required.
600: The method to be used for block orthogonalization can be set with
601: BVSetOrthogonalization(). If set to GS, the computation is done column by
602: column with successive calls to BVOrthogonalizeColumn().
604: If V is rank-deficient or very ill-conditioned, that is, one or more columns are
605: (almost) linearly dependent with respect to the rest, then the algorithm may
606: break down or result in larger numerical error. Linearly dependent columns are
607: essentially replaced by random directions, and the corresponding diagonal entry
608: in R is set to (nearly) zero.
610: Level: intermediate
612: .seealso: BVOrthogonalizeColumn(), BVOrthogonalizeVec(), BVSetActiveColumns(), BVSetOrthogonalization(), BVOrthogBlockType613: @*/
614: PetscErrorCode BVOrthogonalize(BV V,Mat R)615: {
617: PetscBool match;
618: PetscInt m,n;
623: BVCheckSizes(V,1);
624: if (R) {
627: if (V->l>0 && V->orthog_block==BV_ORTHOG_BLOCK_GS) SETERRQ(PetscObjectComm((PetscObject)V),PETSC_ERR_SUP,"Cannot request matrix R in Gram-Schmidt orthogonalization if l>0");
628: PetscObjectTypeCompare((PetscObject)R,MATSEQDENSE,&match);
629: if (!match) SETERRQ(PetscObjectComm((PetscObject)V),PETSC_ERR_SUP,"Mat argument must be of type seqdense");
630: MatGetSize(R,&m,&n);
631: if (m!=n) SETERRQ2(PetscObjectComm((PetscObject)V),PETSC_ERR_ARG_SIZ,"Mat argument is not square, it has %D rows and %D columns",m,n);
632: if (n<V->k) SETERRQ2(PetscObjectComm((PetscObject)V),PETSC_ERR_ARG_SIZ,"Mat size %D is smaller than the number of BV active columns %D",n,V->k);
633: }
634: if (V->nc) SETERRQ(PetscObjectComm((PetscObject)V),PETSC_ERR_SUP,"Not implemented for BV with constraints, use BVOrthogonalizeColumn() instead");
636: PetscLogEventBegin(BV_Orthogonalize,V,R,0,0);
637: switch (V->orthog_block) {
638: case BV_ORTHOG_BLOCK_GS: /* proceed column by column with Gram-Schmidt */
639: BVOrthogonalize_GS(V,R);
640: break;
641: case BV_ORTHOG_BLOCK_CHOL:
642: BVOrthogonalize_Chol(V,R);
643: /*if (V->ops->orthogonalize) {
644: (*V->ops->orthogonalize)(V,R);
645: }*/
646: break;
647: }
648: PetscLogEventEnd(BV_Orthogonalize,V,R,0,0);
649: PetscObjectStateIncrease((PetscObject)V);
650: return(0);
651: }