1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-2020, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
10: /*
11: Basic DS routines
12: */
14: #include <slepc/private/dsimpl.h> /*I "slepcds.h" I*/
16: PetscFunctionList DSList = 0;
17: PetscBool DSRegisterAllCalled = PETSC_FALSE;
18: PetscClassId DS_CLASSID = 0;
19: PetscLogEvent DS_Solve = 0,DS_Vectors = 0,DS_Synchronize = 0,DS_Other = 0;
20: static PetscBool DSPackageInitialized = PETSC_FALSE;
22: const char *DSStateTypes[] = {"RAW","INTERMEDIATE","CONDENSED","TRUNCATED","DSStateType","DS_STATE_",0};
23: const char *DSParallelTypes[] = {"REDUNDANT","SYNCHRONIZED","DSParallelType","DS_PARALLEL_",0};
24: const char *DSMatName[DS_NUM_MAT] = {"A","B","C","T","D","Q","Z","X","Y","U","VT","W","E0","E1","E2","E3","E4","E5","E6","E7","E8","E9"};
25: DSMatType DSMatExtra[DS_NUM_EXTRA] = {DS_MAT_E0,DS_MAT_E1,DS_MAT_E2,DS_MAT_E3,DS_MAT_E4,DS_MAT_E5,DS_MAT_E6,DS_MAT_E7,DS_MAT_E8,DS_MAT_E9};
27: /*@C
28: DSFinalizePackage - This function destroys everything in the SLEPc interface
29: to the DS package. It is called from SlepcFinalize().
31: Level: developer
33: .seealso: SlepcFinalize()
34: @*/
35: PetscErrorCode DSFinalizePackage(void) 36: {
40: PetscFunctionListDestroy(&DSList);
41: DSPackageInitialized = PETSC_FALSE;
42: DSRegisterAllCalled = PETSC_FALSE;
43: return(0);
44: }
46: /*@C
47: DSInitializePackage - This function initializes everything in the DS package.
48: It is called from PetscDLLibraryRegister() when using dynamic libraries, and
49: on the first call to DSCreate() when using static libraries.
51: Level: developer
53: .seealso: SlepcInitialize()
54: @*/
55: PetscErrorCode DSInitializePackage() 56: {
57: char logList[256];
58: PetscBool opt,pkg;
59: PetscClassId classids[1];
63: if (DSPackageInitialized) return(0);
64: DSPackageInitialized = PETSC_TRUE;
65: /* Register Classes */
66: PetscClassIdRegister("Direct Solver",&DS_CLASSID);
67: /* Register Constructors */
68: DSRegisterAll();
69: /* Register Events */
70: PetscLogEventRegister("DSSolve",DS_CLASSID,&DS_Solve);
71: PetscLogEventRegister("DSVectors",DS_CLASSID,&DS_Vectors);
72: PetscLogEventRegister("DSSynchronize",DS_CLASSID,&DS_Synchronize);
73: PetscLogEventRegister("DSOther",DS_CLASSID,&DS_Other);
74: /* Process Info */
75: classids[0] = DS_CLASSID;
76: PetscInfoProcessClass("ds",1,&classids[0]);
77: /* Process summary exclusions */
78: PetscOptionsGetString(NULL,NULL,"-log_exclude",logList,sizeof(logList),&opt);
79: if (opt) {
80: PetscStrInList("ds",logList,',',&pkg);
81: if (pkg) { PetscLogEventDeactivateClass(DS_CLASSID); }
82: }
83: /* Register package finalizer */
84: PetscRegisterFinalize(DSFinalizePackage);
85: return(0);
86: }
88: /*@
89: DSCreate - Creates a DS context.
91: Collective
93: Input Parameter:
94: . comm - MPI communicator
96: Output Parameter:
97: . newds - location to put the DS context
99: Level: beginner
101: Note:
102: DS objects are not intended for normal users but only for
103: advanced user that for instance implement their own solvers.
105: .seealso: DSDestroy(), DS106: @*/
107: PetscErrorCode DSCreate(MPI_Comm comm,DS *newds)108: {
109: DS ds;
110: PetscInt i;
115: *newds = 0;
116: DSInitializePackage();
117: SlepcHeaderCreate(ds,DS_CLASSID,"DS","Direct Solver (or Dense System)","DS",comm,DSDestroy,DSView);
119: ds->state = DS_STATE_RAW;
120: ds->method = 0;
121: ds->compact = PETSC_FALSE;
122: ds->refined = PETSC_FALSE;
123: ds->extrarow = PETSC_FALSE;
124: ds->ld = 0;
125: ds->l = 0;
126: ds->n = 0;
127: ds->m = 0;
128: ds->k = 0;
129: ds->t = 0;
130: ds->bs = 1;
131: ds->sc = NULL;
132: ds->pmode = DS_PARALLEL_REDUNDANT;
134: for (i=0;i<DS_NUM_MAT;i++) {
135: ds->mat[i] = NULL;
136: ds->rmat[i] = NULL;
137: ds->omat[i] = NULL;
138: }
139: ds->perm = NULL;
140: ds->data = NULL;
141: ds->scset = PETSC_FALSE;
142: ds->work = NULL;
143: ds->rwork = NULL;
144: ds->iwork = NULL;
145: ds->lwork = 0;
146: ds->lrwork = 0;
147: ds->liwork = 0;
149: *newds = ds;
150: return(0);
151: }
153: /*@C
154: DSSetOptionsPrefix - Sets the prefix used for searching for all
155: DS options in the database.
157: Logically Collective on ds
159: Input Parameters:
160: + ds - the direct solver context
161: - prefix - the prefix string to prepend to all DS option requests
163: Notes:
164: A hyphen (-) must NOT be given at the beginning of the prefix name.
165: The first character of all runtime options is AUTOMATICALLY the
166: hyphen.
168: Level: advanced
170: .seealso: DSAppendOptionsPrefix()
171: @*/
172: PetscErrorCode DSSetOptionsPrefix(DS ds,const char *prefix)173: {
178: PetscObjectSetOptionsPrefix((PetscObject)ds,prefix);
179: return(0);
180: }
182: /*@C
183: DSAppendOptionsPrefix - Appends to the prefix used for searching for all
184: DS options in the database.
186: Logically Collective on ds
188: Input Parameters:
189: + ds - the direct solver context
190: - prefix - the prefix string to prepend to all DS option requests
192: Notes:
193: A hyphen (-) must NOT be given at the beginning of the prefix name.
194: The first character of all runtime options is AUTOMATICALLY the hyphen.
196: Level: advanced
198: .seealso: DSSetOptionsPrefix()
199: @*/
200: PetscErrorCode DSAppendOptionsPrefix(DS ds,const char *prefix)201: {
206: PetscObjectAppendOptionsPrefix((PetscObject)ds,prefix);
207: return(0);
208: }
210: /*@C
211: DSGetOptionsPrefix - Gets the prefix used for searching for all
212: DS options in the database.
214: Not Collective
216: Input Parameters:
217: . ds - the direct solver context
219: Output Parameters:
220: . prefix - pointer to the prefix string used is returned
222: Note:
223: On the Fortran side, the user should pass in a string 'prefix' of
224: sufficient length to hold the prefix.
226: Level: advanced
228: .seealso: DSSetOptionsPrefix(), DSAppendOptionsPrefix()
229: @*/
230: PetscErrorCode DSGetOptionsPrefix(DS ds,const char *prefix[])231: {
237: PetscObjectGetOptionsPrefix((PetscObject)ds,prefix);
238: return(0);
239: }
241: /*@C
242: DSSetType - Selects the type for the DS object.
244: Logically Collective on ds
246: Input Parameter:
247: + ds - the direct solver context
248: - type - a known type
250: Level: intermediate
252: .seealso: DSGetType()
253: @*/
254: PetscErrorCode DSSetType(DS ds,DSType type)255: {
256: PetscErrorCode ierr,(*r)(DS);
257: PetscBool match;
263: PetscObjectTypeCompare((PetscObject)ds,type,&match);
264: if (match) return(0);
266: PetscFunctionListFind(DSList,type,&r);
267: if (!r) SETERRQ1(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested DS type %s",type);
269: PetscMemzero(ds->ops,sizeof(struct _DSOps));
271: PetscObjectChangeTypeName((PetscObject)ds,type);
272: (*r)(ds);
273: return(0);
274: }
276: /*@C
277: DSGetType - Gets the DS type name (as a string) from the DS context.
279: Not Collective
281: Input Parameter:
282: . ds - the direct solver context
284: Output Parameter:
285: . name - name of the direct solver
287: Level: intermediate
289: .seealso: DSSetType()
290: @*/
291: PetscErrorCode DSGetType(DS ds,DSType *type)292: {
296: *type = ((PetscObject)ds)->type_name;
297: return(0);
298: }
300: /*@
301: DSDuplicate - Creates a new direct solver object with the same options as
302: an existing one.
304: Collective on ds
306: Input Parameter:
307: . ds - direct solver context
309: Output Parameter:
310: . dsnew - location to put the new DS312: Notes:
313: DSDuplicate() DOES NOT COPY the matrices, and the new DS does not even have
314: internal arrays allocated. Use DSAllocate() to use the new DS.
316: The sorting criterion options are not copied, see DSSetSlepcSC().
318: Level: intermediate
320: .seealso: DSCreate(), DSAllocate(), DSSetSlepcSC()
321: @*/
322: PetscErrorCode DSDuplicate(DS ds,DS *dsnew)323: {
330: DSCreate(PetscObjectComm((PetscObject)ds),dsnew);
331: DSSetType(*dsnew,((PetscObject)ds)->type_name);
332: (*dsnew)->method = ds->method;
333: (*dsnew)->compact = ds->compact;
334: (*dsnew)->refined = ds->refined;
335: (*dsnew)->extrarow = ds->extrarow;
336: (*dsnew)->bs = ds->bs;
337: (*dsnew)->pmode = ds->pmode;
338: return(0);
339: }
341: /*@
342: DSSetMethod - Selects the method to be used to solve the problem.
344: Logically Collective on ds
346: Input Parameter:
347: + ds - the direct solver context
348: - meth - an index indentifying the method
350: Options Database Key:
351: . -ds_method <meth> - Sets the method
353: Level: intermediate
355: .seealso: DSGetMethod()
356: @*/
357: PetscErrorCode DSSetMethod(DS ds,PetscInt meth)358: {
362: if (meth<0) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The method must be a non-negative integer");
363: if (meth>DS_MAX_SOLVE) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too large value for the method");
364: ds->method = meth;
365: return(0);
366: }
368: /*@
369: DSGetMethod - Gets the method currently used in the DS.
371: Not Collective
373: Input Parameter:
374: . ds - the direct solver context
376: Output Parameter:
377: . meth - identifier of the method
379: Level: intermediate
381: .seealso: DSSetMethod()
382: @*/
383: PetscErrorCode DSGetMethod(DS ds,PetscInt *meth)384: {
388: *meth = ds->method;
389: return(0);
390: }
392: /*@
393: DSSetParallel - Selects the mode of operation in parallel runs.
395: Logically Collective on ds
397: Input Parameter:
398: + ds - the direct solver context
399: - pmode - the parallel mode
401: Options Database Key:
402: . -ds_parallel <mode> - Sets the parallel mode, either 'redundant' or 'synchronized'
404: Notes:
405: In the 'redundant' parallel mode, all processes will make the computation
406: redundantly, starting from the same data, and producing the same result.
407: This result may be slightly different in the different processes if using a
408: multithreaded BLAS library, which may cause issues in ill-conditioned problems.
410: In the 'synchronized' parallel mode, only the first MPI process performs the
411: computation and then the computed quantities are broadcast to the other
412: processes in the communicator. This communication is not done automatically,
413: an explicit call to DSSynchronize() is required.
415: Level: advanced
417: .seealso: DSSynchronize(), DSGetParallel()
418: @*/
419: PetscErrorCode DSSetParallel(DS ds,DSParallelType pmode)420: {
424: ds->pmode = pmode;
425: return(0);
426: }
428: /*@
429: DSGetParallel - Gets the mode of operation in parallel runs.
431: Not Collective
433: Input Parameter:
434: . ds - the direct solver context
436: Output Parameter:
437: . pmode - the parallel mode
439: Level: advanced
441: .seealso: DSSetParallel()
442: @*/
443: PetscErrorCode DSGetParallel(DS ds,DSParallelType *pmode)444: {
448: *pmode = ds->pmode;
449: return(0);
450: }
452: /*@
453: DSSetCompact - Switch to compact storage of matrices.
455: Logically Collective on ds
457: Input Parameter:
458: + ds - the direct solver context
459: - comp - a boolean flag
461: Notes:
462: Compact storage is used in some DS types such as DSHEP when the matrix
463: is tridiagonal. This flag can be used to indicate whether the user
464: provides the matrix entries via the compact form (the tridiagonal DS_MAT_T)
465: or the non-compact one (DS_MAT_A).
467: The default is PETSC_FALSE.
469: Level: advanced
471: .seealso: DSGetCompact()
472: @*/
473: PetscErrorCode DSSetCompact(DS ds,PetscBool comp)474: {
478: ds->compact = comp;
479: return(0);
480: }
482: /*@
483: DSGetCompact - Gets the compact storage flag.
485: Not Collective
487: Input Parameter:
488: . ds - the direct solver context
490: Output Parameter:
491: . comp - the flag
493: Level: advanced
495: .seealso: DSSetCompact()
496: @*/
497: PetscErrorCode DSGetCompact(DS ds,PetscBool *comp)498: {
502: *comp = ds->compact;
503: return(0);
504: }
506: /*@
507: DSSetExtraRow - Sets a flag to indicate that the matrix has one extra
508: row.
510: Logically Collective on ds
512: Input Parameter:
513: + ds - the direct solver context
514: - ext - a boolean flag
516: Notes:
517: In Krylov methods it is useful that the matrix representing the direct solver
518: has one extra row, i.e., has dimension (n+1) x n. If this flag is activated, all
519: transformations applied to the right of the matrix also affect this additional
520: row. In that case, (n+1) must be less or equal than the leading dimension.
522: The default is PETSC_FALSE.
524: Level: advanced
526: .seealso: DSSolve(), DSAllocate(), DSGetExtraRow()
527: @*/
528: PetscErrorCode DSSetExtraRow(DS ds,PetscBool ext)529: {
533: if (ds->n>0 && ds->n==ds->ld) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"Cannot set extra row after setting n=ld");
534: ds->extrarow = ext;
535: return(0);
536: }
538: /*@
539: DSGetExtraRow - Gets the extra row flag.
541: Not Collective
543: Input Parameter:
544: . ds - the direct solver context
546: Output Parameter:
547: . ext - the flag
549: Level: advanced
551: .seealso: DSSetExtraRow()
552: @*/
553: PetscErrorCode DSGetExtraRow(DS ds,PetscBool *ext)554: {
558: *ext = ds->extrarow;
559: return(0);
560: }
562: /*@
563: DSSetRefined - Sets a flag to indicate that refined vectors must be
564: computed.
566: Logically Collective on ds
568: Input Parameter:
569: + ds - the direct solver context
570: - ref - a boolean flag
572: Notes:
573: Normally the vectors returned in DS_MAT_X are eigenvectors of the
574: projected matrix. With this flag activated, DSVectors() will return
575: the right singular vector of the smallest singular value of matrix
576: \tilde{A}-theta*I, where \tilde{A} is the extended (n+1)xn matrix
577: and theta is the Ritz value. This is used in the refined Ritz
578: approximation.
580: The default is PETSC_FALSE.
582: Level: advanced
584: .seealso: DSVectors(), DSGetRefined()
585: @*/
586: PetscErrorCode DSSetRefined(DS ds,PetscBool ref)587: {
591: ds->refined = ref;
592: return(0);
593: }
595: /*@
596: DSGetRefined - Gets the refined vectors flag.
598: Not Collective
600: Input Parameter:
601: . ds - the direct solver context
603: Output Parameter:
604: . ref - the flag
606: Level: advanced
608: .seealso: DSSetRefined()
609: @*/
610: PetscErrorCode DSGetRefined(DS ds,PetscBool *ref)611: {
615: *ref = ds->refined;
616: return(0);
617: }
619: /*@
620: DSSetBlockSize - Sets the block size.
622: Logically Collective on ds
624: Input Parameter:
625: + ds - the direct solver context
626: - bs - the block size
628: Options Database Key:
629: . -ds_block_size <bs> - Sets the block size
631: Level: intermediate
633: .seealso: DSGetBlockSize()
634: @*/
635: PetscErrorCode DSSetBlockSize(DS ds,PetscInt bs)636: {
640: if (bs<1) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The block size must be at least one");
641: ds->bs = bs;
642: return(0);
643: }
645: /*@
646: DSGetBlockSize - Gets the block size.
648: Not Collective
650: Input Parameter:
651: . ds - the direct solver context
653: Output Parameter:
654: . bs - block size
656: Level: intermediate
658: .seealso: DSSetBlockSize()
659: @*/
660: PetscErrorCode DSGetBlockSize(DS ds,PetscInt *bs)661: {
665: *bs = ds->bs;
666: return(0);
667: }
669: /*@C
670: DSSetSlepcSC - Sets the sorting criterion context.
672: Not Collective
674: Input Parameters:
675: + ds - the direct solver context
676: - sc - a pointer to the sorting criterion context
678: Level: developer
680: .seealso: DSGetSlepcSC(), DSSort()
681: @*/
682: PetscErrorCode DSSetSlepcSC(DS ds,SlepcSC sc)683: {
689: if (ds->sc && !ds->scset) {
690: PetscFree(ds->sc);
691: }
692: ds->sc = sc;
693: ds->scset = PETSC_TRUE;
694: return(0);
695: }
697: /*@C
698: DSGetSlepcSC - Gets the sorting criterion context.
700: Not Collective
702: Input Parameter:
703: . ds - the direct solver context
705: Output Parameters:
706: . sc - a pointer to the sorting criterion context
708: Level: developer
710: .seealso: DSSetSlepcSC(), DSSort()
711: @*/
712: PetscErrorCode DSGetSlepcSC(DS ds,SlepcSC *sc)713: {
719: if (!ds->sc) {
720: PetscNewLog(ds,&ds->sc);
721: }
722: *sc = ds->sc;
723: return(0);
724: }
726: /*@
727: DSSetFromOptions - Sets DS options from the options database.
729: Collective on ds
731: Input Parameters:
732: . ds - the direct solver context
734: Notes:
735: To see all options, run your program with the -help option.
737: Level: beginner
738: @*/
739: PetscErrorCode DSSetFromOptions(DS ds)740: {
742: PetscInt bs,meth;
743: PetscBool flag;
744: DSParallelType pmode;
748: DSRegisterAll();
749: /* Set default type (we do not allow changing it with -ds_type) */
750: if (!((PetscObject)ds)->type_name) {
751: DSSetType(ds,DSNHEP);
752: }
753: PetscObjectOptionsBegin((PetscObject)ds);
755: PetscOptionsInt("-ds_block_size","Block size for the dense system solver","DSSetBlockSize",ds->bs,&bs,&flag);
756: if (flag) { DSSetBlockSize(ds,bs); }
758: PetscOptionsInt("-ds_method","Method to be used for the dense system","DSSetMethod",ds->method,&meth,&flag);
759: if (flag) { DSSetMethod(ds,meth); }
761: PetscOptionsEnum("-ds_parallel","Operation mode in parallel runs","DSSetParallel",DSParallelTypes,(PetscEnum)ds->pmode,(PetscEnum*)&pmode,&flag);
762: if (flag) { DSSetParallel(ds,pmode); }
764: PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject)ds);
765: PetscOptionsEnd();
766: return(0);
767: }
769: /*@C
770: DSView - Prints the DS data structure.
772: Collective on ds
774: Input Parameters:
775: + ds - the direct solver context
776: - viewer - optional visualization context
778: Note:
779: The available visualization contexts include
780: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
781: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
782: output where only the first processor opens
783: the file. All other processors send their
784: data to the first processor to print.
786: The user can open an alternative visualization context with
787: PetscViewerASCIIOpen() - output to a specified file.
789: Level: beginner
791: .seealso: DSViewMat()
792: @*/
793: PetscErrorCode DSView(DS ds,PetscViewer viewer)794: {
795: PetscBool isascii,issvd;
796: PetscViewerFormat format;
797: PetscErrorCode ierr;
798: PetscMPIInt size;
802: if (!viewer) {
803: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)ds),&viewer);
804: }
807: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
808: if (isascii) {
809: PetscViewerGetFormat(viewer,&format);
810: PetscObjectPrintClassNamePrefixType((PetscObject)ds,viewer);
811: MPI_Comm_size(PetscObjectComm((PetscObject)ds),&size);
812: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
813: if (size>1) {
814: PetscViewerASCIIPrintf(viewer," parallel operation mode: %s\n",DSParallelTypes[ds->pmode]);
815: }
816: PetscViewerASCIIPrintf(viewer," current state: %s\n",DSStateTypes[ds->state]);
817: PetscObjectTypeCompare((PetscObject)ds,DSSVD,&issvd);
818: if (issvd) {
819: PetscViewerASCIIPrintf(viewer," dimensions: ld=%D, n=%D, m=%D, l=%D, k=%D",ds->ld,ds->n,ds->m,ds->l,ds->k);
820: } else {
821: PetscViewerASCIIPrintf(viewer," dimensions: ld=%D, n=%D, l=%D, k=%D",ds->ld,ds->n,ds->l,ds->k);
822: }
823: if (ds->state==DS_STATE_TRUNCATED) {
824: PetscViewerASCIIPrintf(viewer,", t=%D\n",ds->t);
825: } else {
826: PetscViewerASCIIPrintf(viewer,"\n");
827: }
828: PetscViewerASCIIPrintf(viewer," flags:%s%s%s\n",ds->compact?" compact":"",ds->extrarow?" extrarow":"",ds->refined?" refined":"");
829: }
830: if (ds->ops->view) {
831: PetscViewerASCIIPushTab(viewer);
832: (*ds->ops->view)(ds,viewer);
833: PetscViewerASCIIPopTab(viewer);
834: }
835: }
836: return(0);
837: }
839: /*@C
840: DSViewFromOptions - View from options
842: Collective on DS844: Input Parameters:
845: + ds - the direct solver context
846: . obj - optional object
847: - name - command line option
849: Level: intermediate
851: .seealso: DSView(), DSCreate()
852: @*/
853: PetscErrorCode DSViewFromOptions(DS ds,PetscObject obj,const char name[])854: {
859: PetscObjectViewFromOptions((PetscObject)ds,obj,name);
860: return(0);
861: }
863: /*@
864: DSAllocate - Allocates memory for internal storage or matrices in DS.
866: Logically Collective on ds
868: Input Parameters:
869: + ds - the direct solver context
870: - ld - leading dimension (maximum allowed dimension for the matrices, including
871: the extra row if present)
873: Note:
874: If the leading dimension is different from a previously set value, then
875: all matrices are destroyed with DSReset().
877: Level: intermediate
879: .seealso: DSGetLeadingDimension(), DSSetDimensions(), DSSetExtraRow(), DSReset()
880: @*/
881: PetscErrorCode DSAllocate(DS ds,PetscInt ld)882: {
889: if (ld<1) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Leading dimension should be at least one");
890: if (ld!=ds->ld) {
891: DSReset(ds);
892: ds->ld = ld;
893: (*ds->ops->allocate)(ds,ld);
894: }
895: return(0);
896: }
898: /*@
899: DSReset - Resets the DS context to the initial state.
901: Collective on ds
903: Input Parameter:
904: . ds - the direct solver context
906: Note:
907: All data structures with size depending on the leading dimension
908: of DSAllocate() are released.
910: Level: advanced
912: .seealso: DSDestroy(), DSAllocate()
913: @*/
914: PetscErrorCode DSReset(DS ds)915: {
916: PetscInt i;
921: if (!ds) return(0);
922: ds->state = DS_STATE_RAW;
923: ds->ld = 0;
924: ds->l = 0;
925: ds->n = 0;
926: ds->m = 0;
927: ds->k = 0;
928: for (i=0;i<DS_NUM_MAT;i++) {
929: PetscFree(ds->mat[i]);
930: PetscFree(ds->rmat[i]);
931: MatDestroy(&ds->omat[i]);
932: }
933: PetscFree(ds->perm);
934: return(0);
935: }
937: /*@
938: DSDestroy - Destroys DS context that was created with DSCreate().
940: Collective on ds
942: Input Parameter:
943: . ds - the direct solver context
945: Level: beginner
947: .seealso: DSCreate()
948: @*/
949: PetscErrorCode DSDestroy(DS *ds)950: {
954: if (!*ds) return(0);
956: if (--((PetscObject)(*ds))->refct > 0) { *ds = 0; return(0); }
957: DSReset(*ds);
958: if ((*ds)->ops->destroy) { (*(*ds)->ops->destroy)(*ds); }
959: PetscFree((*ds)->work);
960: PetscFree((*ds)->rwork);
961: PetscFree((*ds)->iwork);
962: if (!(*ds)->scset) { PetscFree((*ds)->sc); }
963: PetscHeaderDestroy(ds);
964: return(0);
965: }
967: /*@C
968: DSRegister - Adds a direct solver to the DS package.
970: Not collective
972: Input Parameters:
973: + name - name of a new user-defined DS974: - routine_create - routine to create context
976: Notes:
977: DSRegister() may be called multiple times to add several user-defined
978: direct solvers.
980: Level: advanced
982: .seealso: DSRegisterAll()
983: @*/
984: PetscErrorCode DSRegister(const char *name,PetscErrorCode (*function)(DS))985: {
989: DSInitializePackage();
990: PetscFunctionListAdd(&DSList,name,function);
991: return(0);
992: }
994: SLEPC_EXTERN PetscErrorCode DSCreate_HEP(DS);
995: SLEPC_EXTERN PetscErrorCode DSCreate_NHEP(DS);
996: SLEPC_EXTERN PetscErrorCode DSCreate_GHEP(DS);
997: SLEPC_EXTERN PetscErrorCode DSCreate_GHIEP(DS);
998: SLEPC_EXTERN PetscErrorCode DSCreate_GNHEP(DS);
999: SLEPC_EXTERN PetscErrorCode DSCreate_SVD(DS);
1000: SLEPC_EXTERN PetscErrorCode DSCreate_PEP(DS);
1001: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS);
1003: /*@C
1004: DSRegisterAll - Registers all of the direct solvers in the DS package.
1006: Not Collective
1008: Level: advanced
1009: @*/
1010: PetscErrorCode DSRegisterAll(void)1011: {
1015: if (DSRegisterAllCalled) return(0);
1016: DSRegisterAllCalled = PETSC_TRUE;
1017: DSRegister(DSHEP,DSCreate_HEP);
1018: DSRegister(DSNHEP,DSCreate_NHEP);
1019: DSRegister(DSGHEP,DSCreate_GHEP);
1020: DSRegister(DSGHIEP,DSCreate_GHIEP);
1021: DSRegister(DSGNHEP,DSCreate_GNHEP);
1022: DSRegister(DSSVD,DSCreate_SVD);
1023: DSRegister(DSPEP,DSCreate_PEP);
1024: DSRegister(DSNEP,DSCreate_NEP);
1025: return(0);
1026: }