1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-2021, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
10: /*
11: Basic DS routines
12: */
14: #include <slepc/private/dsimpl.h> 16: PetscFunctionList DSList = 0;
17: PetscBool DSRegisterAllCalled = PETSC_FALSE;
18: PetscClassId DS_CLASSID = 0;
19: PetscLogEvent DS_Solve = 0,DS_Vectors = 0,DS_Synchronize = 0,DS_Other = 0;
20: static PetscBool DSPackageInitialized = PETSC_FALSE;
22: const char *DSStateTypes[] = {"RAW","INTERMEDIATE","CONDENSED","TRUNCATED","DSStateType","DS_STATE_",0};
23: const char *DSParallelTypes[] = {"REDUNDANT","SYNCHRONIZED","DSParallelType","DS_PARALLEL_",0};
24: const char *DSMatName[DS_NUM_MAT] = {"A","B","C","T","D","Q","Z","X","Y","U","VT","W","E0","E1","E2","E3","E4","E5","E6","E7","E8","E9"};
25: DSMatType DSMatExtra[DS_NUM_EXTRA] = {DS_MAT_E0,DS_MAT_E1,DS_MAT_E2,DS_MAT_E3,DS_MAT_E4,DS_MAT_E5,DS_MAT_E6,DS_MAT_E7,DS_MAT_E8,DS_MAT_E9};
27: /*@C
28: DSFinalizePackage - This function destroys everything in the SLEPc interface
29: to the DS package. It is called from SlepcFinalize().
31: Level: developer
33: .seealso: SlepcFinalize()
34: @*/
35: PetscErrorCode DSFinalizePackage(void) 36: {
40: PetscFunctionListDestroy(&DSList);
41: DSPackageInitialized = PETSC_FALSE;
42: DSRegisterAllCalled = PETSC_FALSE;
43: return(0);
44: }
46: /*@C
47: DSInitializePackage - This function initializes everything in the DS package.
48: It is called from PetscDLLibraryRegister() when using dynamic libraries, and
49: on the first call to DSCreate() when using static libraries.
51: Level: developer
53: .seealso: SlepcInitialize()
54: @*/
55: PetscErrorCode DSInitializePackage() 56: {
57: char logList[256];
58: PetscBool opt,pkg;
59: PetscClassId classids[1];
63: if (DSPackageInitialized) return(0);
64: DSPackageInitialized = PETSC_TRUE;
65: /* Register Classes */
66: PetscClassIdRegister("Direct Solver",&DS_CLASSID);
67: /* Register Constructors */
68: DSRegisterAll();
69: /* Register Events */
70: PetscLogEventRegister("DSSolve",DS_CLASSID,&DS_Solve);
71: PetscLogEventRegister("DSVectors",DS_CLASSID,&DS_Vectors);
72: PetscLogEventRegister("DSSynchronize",DS_CLASSID,&DS_Synchronize);
73: PetscLogEventRegister("DSOther",DS_CLASSID,&DS_Other);
74: /* Process Info */
75: classids[0] = DS_CLASSID;
76: PetscInfoProcessClass("ds",1,&classids[0]);
77: /* Process summary exclusions */
78: PetscOptionsGetString(NULL,NULL,"-log_exclude",logList,sizeof(logList),&opt);
79: if (opt) {
80: PetscStrInList("ds",logList,',',&pkg);
81: if (pkg) { PetscLogEventDeactivateClass(DS_CLASSID); }
82: }
83: /* Register package finalizer */
84: PetscRegisterFinalize(DSFinalizePackage);
85: return(0);
86: }
88: /*@
89: DSCreate - Creates a DS context.
91: Collective
93: Input Parameter:
94: . comm - MPI communicator
96: Output Parameter:
97: . newds - location to put the DS context
99: Level: beginner
101: Note:
102: DS objects are not intended for normal users but only for
103: advanced user that for instance implement their own solvers.
105: .seealso: DSDestroy(), DS106: @*/
107: PetscErrorCode DSCreate(MPI_Comm comm,DS *newds)108: {
109: DS ds;
110: PetscInt i;
115: *newds = 0;
116: DSInitializePackage();
117: SlepcHeaderCreate(ds,DS_CLASSID,"DS","Direct Solver (or Dense System)","DS",comm,DSDestroy,DSView);
119: ds->state = DS_STATE_RAW;
120: ds->method = 0;
121: ds->compact = PETSC_FALSE;
122: ds->refined = PETSC_FALSE;
123: ds->extrarow = PETSC_FALSE;
124: ds->ld = 0;
125: ds->l = 0;
126: ds->n = 0;
127: ds->m = 0;
128: ds->k = 0;
129: ds->t = 0;
130: ds->bs = 1;
131: ds->sc = NULL;
132: ds->pmode = DS_PARALLEL_REDUNDANT;
134: for (i=0;i<DS_NUM_MAT;i++) {
135: ds->mat[i] = NULL;
136: ds->rmat[i] = NULL;
137: ds->omat[i] = NULL;
138: }
139: ds->perm = NULL;
140: ds->data = NULL;
141: ds->scset = PETSC_FALSE;
142: ds->work = NULL;
143: ds->rwork = NULL;
144: ds->iwork = NULL;
145: ds->lwork = 0;
146: ds->lrwork = 0;
147: ds->liwork = 0;
149: *newds = ds;
150: return(0);
151: }
153: /*@C
154: DSSetOptionsPrefix - Sets the prefix used for searching for all
155: DS options in the database.
157: Logically Collective on ds
159: Input Parameters:
160: + ds - the direct solver context
161: - prefix - the prefix string to prepend to all DS option requests
163: Notes:
164: A hyphen (-) must NOT be given at the beginning of the prefix name.
165: The first character of all runtime options is AUTOMATICALLY the
166: hyphen.
168: Level: advanced
170: .seealso: DSAppendOptionsPrefix()
171: @*/
172: PetscErrorCode DSSetOptionsPrefix(DS ds,const char *prefix)173: {
178: PetscObjectSetOptionsPrefix((PetscObject)ds,prefix);
179: return(0);
180: }
182: /*@C
183: DSAppendOptionsPrefix - Appends to the prefix used for searching for all
184: DS options in the database.
186: Logically Collective on ds
188: Input Parameters:
189: + ds - the direct solver context
190: - prefix - the prefix string to prepend to all DS option requests
192: Notes:
193: A hyphen (-) must NOT be given at the beginning of the prefix name.
194: The first character of all runtime options is AUTOMATICALLY the hyphen.
196: Level: advanced
198: .seealso: DSSetOptionsPrefix()
199: @*/
200: PetscErrorCode DSAppendOptionsPrefix(DS ds,const char *prefix)201: {
206: PetscObjectAppendOptionsPrefix((PetscObject)ds,prefix);
207: return(0);
208: }
210: /*@C
211: DSGetOptionsPrefix - Gets the prefix used for searching for all
212: DS options in the database.
214: Not Collective
216: Input Parameters:
217: . ds - the direct solver context
219: Output Parameters:
220: . prefix - pointer to the prefix string used is returned
222: Note:
223: On the Fortran side, the user should pass in a string 'prefix' of
224: sufficient length to hold the prefix.
226: Level: advanced
228: .seealso: DSSetOptionsPrefix(), DSAppendOptionsPrefix()
229: @*/
230: PetscErrorCode DSGetOptionsPrefix(DS ds,const char *prefix[])231: {
237: PetscObjectGetOptionsPrefix((PetscObject)ds,prefix);
238: return(0);
239: }
241: /*@C
242: DSSetType - Selects the type for the DS object.
244: Logically Collective on ds
246: Input Parameter:
247: + ds - the direct solver context
248: - type - a known type
250: Level: intermediate
252: .seealso: DSGetType()
253: @*/
254: PetscErrorCode DSSetType(DS ds,DSType type)255: {
256: PetscErrorCode ierr,(*r)(DS);
257: PetscBool match;
263: PetscObjectTypeCompare((PetscObject)ds,type,&match);
264: if (match) return(0);
266: PetscFunctionListFind(DSList,type,&r);
267: if (!r) SETERRQ1(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested DS type %s",type);
269: PetscMemzero(ds->ops,sizeof(struct _DSOps));
271: PetscObjectChangeTypeName((PetscObject)ds,type);
272: (*r)(ds);
273: return(0);
274: }
276: /*@C
277: DSGetType - Gets the DS type name (as a string) from the DS context.
279: Not Collective
281: Input Parameter:
282: . ds - the direct solver context
284: Output Parameter:
285: . name - name of the direct solver
287: Level: intermediate
289: .seealso: DSSetType()
290: @*/
291: PetscErrorCode DSGetType(DS ds,DSType *type)292: {
296: *type = ((PetscObject)ds)->type_name;
297: return(0);
298: }
300: /*@
301: DSDuplicate - Creates a new direct solver object with the same options as
302: an existing one.
304: Collective on ds
306: Input Parameter:
307: . ds - direct solver context
309: Output Parameter:
310: . dsnew - location to put the new DS312: Notes:
313: DSDuplicate() DOES NOT COPY the matrices, and the new DS does not even have
314: internal arrays allocated. Use DSAllocate() to use the new DS.
316: The sorting criterion options are not copied, see DSSetSlepcSC().
318: Level: intermediate
320: .seealso: DSCreate(), DSAllocate(), DSSetSlepcSC()
321: @*/
322: PetscErrorCode DSDuplicate(DS ds,DS *dsnew)323: {
329: DSCreate(PetscObjectComm((PetscObject)ds),dsnew);
330: if (((PetscObject)ds)->type_name) {
331: DSSetType(*dsnew,((PetscObject)ds)->type_name);
332: }
333: (*dsnew)->method = ds->method;
334: (*dsnew)->compact = ds->compact;
335: (*dsnew)->refined = ds->refined;
336: (*dsnew)->extrarow = ds->extrarow;
337: (*dsnew)->bs = ds->bs;
338: (*dsnew)->pmode = ds->pmode;
339: return(0);
340: }
342: /*@
343: DSSetMethod - Selects the method to be used to solve the problem.
345: Logically Collective on ds
347: Input Parameter:
348: + ds - the direct solver context
349: - meth - an index indentifying the method
351: Options Database Key:
352: . -ds_method <meth> - Sets the method
354: Level: intermediate
356: .seealso: DSGetMethod()
357: @*/
358: PetscErrorCode DSSetMethod(DS ds,PetscInt meth)359: {
363: if (meth<0) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The method must be a non-negative integer");
364: if (meth>DS_MAX_SOLVE) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too large value for the method");
365: ds->method = meth;
366: return(0);
367: }
369: /*@
370: DSGetMethod - Gets the method currently used in the DS.
372: Not Collective
374: Input Parameter:
375: . ds - the direct solver context
377: Output Parameter:
378: . meth - identifier of the method
380: Level: intermediate
382: .seealso: DSSetMethod()
383: @*/
384: PetscErrorCode DSGetMethod(DS ds,PetscInt *meth)385: {
389: *meth = ds->method;
390: return(0);
391: }
393: /*@
394: DSSetParallel - Selects the mode of operation in parallel runs.
396: Logically Collective on ds
398: Input Parameter:
399: + ds - the direct solver context
400: - pmode - the parallel mode
402: Options Database Key:
403: . -ds_parallel <mode> - Sets the parallel mode, either 'redundant' or 'synchronized'
405: Notes:
406: In the 'redundant' parallel mode, all processes will make the computation
407: redundantly, starting from the same data, and producing the same result.
408: This result may be slightly different in the different processes if using a
409: multithreaded BLAS library, which may cause issues in ill-conditioned problems.
411: In the 'synchronized' parallel mode, only the first MPI process performs the
412: computation and then the computed quantities are broadcast to the other
413: processes in the communicator. This communication is not done automatically,
414: an explicit call to DSSynchronize() is required.
416: Level: advanced
418: .seealso: DSSynchronize(), DSGetParallel()
419: @*/
420: PetscErrorCode DSSetParallel(DS ds,DSParallelType pmode)421: {
425: ds->pmode = pmode;
426: return(0);
427: }
429: /*@
430: DSGetParallel - Gets the mode of operation in parallel runs.
432: Not Collective
434: Input Parameter:
435: . ds - the direct solver context
437: Output Parameter:
438: . pmode - the parallel mode
440: Level: advanced
442: .seealso: DSSetParallel()
443: @*/
444: PetscErrorCode DSGetParallel(DS ds,DSParallelType *pmode)445: {
449: *pmode = ds->pmode;
450: return(0);
451: }
453: /*@
454: DSSetCompact - Switch to compact storage of matrices.
456: Logically Collective on ds
458: Input Parameter:
459: + ds - the direct solver context
460: - comp - a boolean flag
462: Notes:
463: Compact storage is used in some DS types such as DSHEP when the matrix
464: is tridiagonal. This flag can be used to indicate whether the user
465: provides the matrix entries via the compact form (the tridiagonal DS_MAT_T)
466: or the non-compact one (DS_MAT_A).
468: The default is PETSC_FALSE.
470: Level: advanced
472: .seealso: DSGetCompact()
473: @*/
474: PetscErrorCode DSSetCompact(DS ds,PetscBool comp)475: {
479: ds->compact = comp;
480: return(0);
481: }
483: /*@
484: DSGetCompact - Gets the compact storage flag.
486: Not Collective
488: Input Parameter:
489: . ds - the direct solver context
491: Output Parameter:
492: . comp - the flag
494: Level: advanced
496: .seealso: DSSetCompact()
497: @*/
498: PetscErrorCode DSGetCompact(DS ds,PetscBool *comp)499: {
503: *comp = ds->compact;
504: return(0);
505: }
507: /*@
508: DSSetExtraRow - Sets a flag to indicate that the matrix has one extra
509: row.
511: Logically Collective on ds
513: Input Parameter:
514: + ds - the direct solver context
515: - ext - a boolean flag
517: Notes:
518: In Krylov methods it is useful that the matrix representing the direct solver
519: has one extra row, i.e., has dimension (n+1) x n. If this flag is activated, all
520: transformations applied to the right of the matrix also affect this additional
521: row. In that case, (n+1) must be less or equal than the leading dimension.
523: The default is PETSC_FALSE.
525: Level: advanced
527: .seealso: DSSolve(), DSAllocate(), DSGetExtraRow()
528: @*/
529: PetscErrorCode DSSetExtraRow(DS ds,PetscBool ext)530: {
534: if (ds->n>0 && ds->n==ds->ld) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"Cannot set extra row after setting n=ld");
535: ds->extrarow = ext;
536: return(0);
537: }
539: /*@
540: DSGetExtraRow - Gets the extra row flag.
542: Not Collective
544: Input Parameter:
545: . ds - the direct solver context
547: Output Parameter:
548: . ext - the flag
550: Level: advanced
552: .seealso: DSSetExtraRow()
553: @*/
554: PetscErrorCode DSGetExtraRow(DS ds,PetscBool *ext)555: {
559: *ext = ds->extrarow;
560: return(0);
561: }
563: /*@
564: DSSetRefined - Sets a flag to indicate that refined vectors must be
565: computed.
567: Logically Collective on ds
569: Input Parameter:
570: + ds - the direct solver context
571: - ref - a boolean flag
573: Notes:
574: Normally the vectors returned in DS_MAT_X are eigenvectors of the
575: projected matrix. With this flag activated, DSVectors() will return
576: the right singular vector of the smallest singular value of matrix
577: \tilde{A}-theta*I, where \tilde{A} is the extended (n+1)xn matrix
578: and theta is the Ritz value. This is used in the refined Ritz
579: approximation.
581: The default is PETSC_FALSE.
583: Level: advanced
585: .seealso: DSVectors(), DSGetRefined()
586: @*/
587: PetscErrorCode DSSetRefined(DS ds,PetscBool ref)588: {
592: ds->refined = ref;
593: return(0);
594: }
596: /*@
597: DSGetRefined - Gets the refined vectors flag.
599: Not Collective
601: Input Parameter:
602: . ds - the direct solver context
604: Output Parameter:
605: . ref - the flag
607: Level: advanced
609: .seealso: DSSetRefined()
610: @*/
611: PetscErrorCode DSGetRefined(DS ds,PetscBool *ref)612: {
616: *ref = ds->refined;
617: return(0);
618: }
620: /*@
621: DSSetBlockSize - Sets the block size.
623: Logically Collective on ds
625: Input Parameter:
626: + ds - the direct solver context
627: - bs - the block size
629: Options Database Key:
630: . -ds_block_size <bs> - Sets the block size
632: Level: intermediate
634: .seealso: DSGetBlockSize()
635: @*/
636: PetscErrorCode DSSetBlockSize(DS ds,PetscInt bs)637: {
641: if (bs<1) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The block size must be at least one");
642: ds->bs = bs;
643: return(0);
644: }
646: /*@
647: DSGetBlockSize - Gets the block size.
649: Not Collective
651: Input Parameter:
652: . ds - the direct solver context
654: Output Parameter:
655: . bs - block size
657: Level: intermediate
659: .seealso: DSSetBlockSize()
660: @*/
661: PetscErrorCode DSGetBlockSize(DS ds,PetscInt *bs)662: {
666: *bs = ds->bs;
667: return(0);
668: }
670: /*@C
671: DSSetSlepcSC - Sets the sorting criterion context.
673: Not Collective
675: Input Parameters:
676: + ds - the direct solver context
677: - sc - a pointer to the sorting criterion context
679: Level: developer
681: .seealso: DSGetSlepcSC(), DSSort()
682: @*/
683: PetscErrorCode DSSetSlepcSC(DS ds,SlepcSC sc)684: {
690: if (ds->sc && !ds->scset) {
691: PetscFree(ds->sc);
692: }
693: ds->sc = sc;
694: ds->scset = PETSC_TRUE;
695: return(0);
696: }
698: /*@C
699: DSGetSlepcSC - Gets the sorting criterion context.
701: Not Collective
703: Input Parameter:
704: . ds - the direct solver context
706: Output Parameters:
707: . sc - a pointer to the sorting criterion context
709: Level: developer
711: .seealso: DSSetSlepcSC(), DSSort()
712: @*/
713: PetscErrorCode DSGetSlepcSC(DS ds,SlepcSC *sc)714: {
720: if (!ds->sc) {
721: PetscNewLog(ds,&ds->sc);
722: }
723: *sc = ds->sc;
724: return(0);
725: }
727: /*@
728: DSSetFromOptions - Sets DS options from the options database.
730: Collective on ds
732: Input Parameters:
733: . ds - the direct solver context
735: Notes:
736: To see all options, run your program with the -help option.
738: Level: beginner
739: @*/
740: PetscErrorCode DSSetFromOptions(DS ds)741: {
743: PetscInt bs,meth;
744: PetscBool flag;
745: DSParallelType pmode;
749: DSRegisterAll();
750: /* Set default type (we do not allow changing it with -ds_type) */
751: if (!((PetscObject)ds)->type_name) {
752: DSSetType(ds,DSNHEP);
753: }
754: PetscObjectOptionsBegin((PetscObject)ds);
756: PetscOptionsInt("-ds_block_size","Block size for the dense system solver","DSSetBlockSize",ds->bs,&bs,&flag);
757: if (flag) { DSSetBlockSize(ds,bs); }
759: PetscOptionsInt("-ds_method","Method to be used for the dense system","DSSetMethod",ds->method,&meth,&flag);
760: if (flag) { DSSetMethod(ds,meth); }
762: PetscOptionsEnum("-ds_parallel","Operation mode in parallel runs","DSSetParallel",DSParallelTypes,(PetscEnum)ds->pmode,(PetscEnum*)&pmode,&flag);
763: if (flag) { DSSetParallel(ds,pmode); }
765: PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject)ds);
766: PetscOptionsEnd();
767: return(0);
768: }
770: /*@C
771: DSView - Prints the DS data structure.
773: Collective on ds
775: Input Parameters:
776: + ds - the direct solver context
777: - viewer - optional visualization context
779: Note:
780: The available visualization contexts include
781: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
782: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
783: output where only the first processor opens
784: the file. All other processors send their
785: data to the first processor to print.
787: The user can open an alternative visualization context with
788: PetscViewerASCIIOpen() - output to a specified file.
790: Level: beginner
792: .seealso: DSViewMat()
793: @*/
794: PetscErrorCode DSView(DS ds,PetscViewer viewer)795: {
796: PetscBool isascii,issvd;
797: PetscViewerFormat format;
798: PetscErrorCode ierr;
799: PetscMPIInt size;
803: if (!viewer) {
804: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)ds),&viewer);
805: }
808: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
809: if (isascii) {
810: PetscViewerGetFormat(viewer,&format);
811: PetscObjectPrintClassNamePrefixType((PetscObject)ds,viewer);
812: MPI_Comm_size(PetscObjectComm((PetscObject)ds),&size);CHKERRMPI(ierr);
813: if (size>1) {
814: PetscViewerASCIIPrintf(viewer," parallel operation mode: %s\n",DSParallelTypes[ds->pmode]);
815: }
816: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
817: PetscViewerASCIIPrintf(viewer," current state: %s\n",DSStateTypes[ds->state]);
818: PetscObjectTypeCompare((PetscObject)ds,DSSVD,&issvd);
819: if (issvd) {
820: PetscViewerASCIIPrintf(viewer," dimensions: ld=%D, n=%D, m=%D, l=%D, k=%D",ds->ld,ds->n,ds->m,ds->l,ds->k);
821: } else {
822: PetscViewerASCIIPrintf(viewer," dimensions: ld=%D, n=%D, l=%D, k=%D",ds->ld,ds->n,ds->l,ds->k);
823: }
824: if (ds->state==DS_STATE_TRUNCATED) {
825: PetscViewerASCIIPrintf(viewer,", t=%D\n",ds->t);
826: } else {
827: PetscViewerASCIIPrintf(viewer,"\n");
828: }
829: PetscViewerASCIIPrintf(viewer," flags:%s%s%s\n",ds->compact?" compact":"",ds->extrarow?" extrarow":"",ds->refined?" refined":"");
830: }
831: if (ds->ops->view) {
832: PetscViewerASCIIPushTab(viewer);
833: (*ds->ops->view)(ds,viewer);
834: PetscViewerASCIIPopTab(viewer);
835: }
836: }
837: return(0);
838: }
840: /*@C
841: DSViewFromOptions - View from options
843: Collective on DS845: Input Parameters:
846: + ds - the direct solver context
847: . obj - optional object
848: - name - command line option
850: Level: intermediate
852: .seealso: DSView(), DSCreate()
853: @*/
854: PetscErrorCode DSViewFromOptions(DS ds,PetscObject obj,const char name[])855: {
860: PetscObjectViewFromOptions((PetscObject)ds,obj,name);
861: return(0);
862: }
864: /*@
865: DSAllocate - Allocates memory for internal storage or matrices in DS.
867: Logically Collective on ds
869: Input Parameters:
870: + ds - the direct solver context
871: - ld - leading dimension (maximum allowed dimension for the matrices, including
872: the extra row if present)
874: Note:
875: If the leading dimension is different from a previously set value, then
876: all matrices are destroyed with DSReset().
878: Level: intermediate
880: .seealso: DSGetLeadingDimension(), DSSetDimensions(), DSSetExtraRow(), DSReset()
881: @*/
882: PetscErrorCode DSAllocate(DS ds,PetscInt ld)883: {
890: if (ld<1) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Leading dimension should be at least one");
891: if (ld!=ds->ld) {
892: PetscInfo1(ds,"Allocating memory with leading dimension=%D\n",ld);
893: DSReset(ds);
894: ds->ld = ld;
895: (*ds->ops->allocate)(ds,ld);
896: }
897: return(0);
898: }
900: /*@
901: DSReset - Resets the DS context to the initial state.
903: Collective on ds
905: Input Parameter:
906: . ds - the direct solver context
908: Note:
909: All data structures with size depending on the leading dimension
910: of DSAllocate() are released.
912: Level: advanced
914: .seealso: DSDestroy(), DSAllocate()
915: @*/
916: PetscErrorCode DSReset(DS ds)917: {
918: PetscInt i;
923: if (!ds) return(0);
924: ds->state = DS_STATE_RAW;
925: ds->ld = 0;
926: ds->l = 0;
927: ds->n = 0;
928: ds->m = 0;
929: ds->k = 0;
930: for (i=0;i<DS_NUM_MAT;i++) {
931: PetscFree(ds->mat[i]);
932: PetscFree(ds->rmat[i]);
933: MatDestroy(&ds->omat[i]);
934: }
935: PetscFree(ds->perm);
936: return(0);
937: }
939: /*@C
940: DSDestroy - Destroys DS context that was created with DSCreate().
942: Collective on ds
944: Input Parameter:
945: . ds - the direct solver context
947: Level: beginner
949: .seealso: DSCreate()
950: @*/
951: PetscErrorCode DSDestroy(DS *ds)952: {
956: if (!*ds) return(0);
958: if (--((PetscObject)(*ds))->refct > 0) { *ds = 0; return(0); }
959: DSReset(*ds);
960: if ((*ds)->ops->destroy) { (*(*ds)->ops->destroy)(*ds); }
961: PetscFree((*ds)->work);
962: PetscFree((*ds)->rwork);
963: PetscFree((*ds)->iwork);
964: if (!(*ds)->scset) { PetscFree((*ds)->sc); }
965: PetscHeaderDestroy(ds);
966: return(0);
967: }
969: /*@C
970: DSRegister - Adds a direct solver to the DS package.
972: Not collective
974: Input Parameters:
975: + name - name of a new user-defined DS976: - routine_create - routine to create context
978: Notes:
979: DSRegister() may be called multiple times to add several user-defined
980: direct solvers.
982: Level: advanced
984: .seealso: DSRegisterAll()
985: @*/
986: PetscErrorCode DSRegister(const char *name,PetscErrorCode (*function)(DS))987: {
991: DSInitializePackage();
992: PetscFunctionListAdd(&DSList,name,function);
993: return(0);
994: }
996: SLEPC_EXTERN PetscErrorCode DSCreate_HEP(DS);
997: SLEPC_EXTERN PetscErrorCode DSCreate_NHEP(DS);
998: SLEPC_EXTERN PetscErrorCode DSCreate_GHEP(DS);
999: SLEPC_EXTERN PetscErrorCode DSCreate_GHIEP(DS);
1000: SLEPC_EXTERN PetscErrorCode DSCreate_GNHEP(DS);
1001: SLEPC_EXTERN PetscErrorCode DSCreate_NHEPTS(DS);
1002: SLEPC_EXTERN PetscErrorCode DSCreate_SVD(DS);
1003: SLEPC_EXTERN PetscErrorCode DSCreate_PEP(DS);
1004: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS);
1006: /*@C
1007: DSRegisterAll - Registers all of the direct solvers in the DS package.
1009: Not Collective
1011: Level: advanced
1012: @*/
1013: PetscErrorCode DSRegisterAll(void)1014: {
1018: if (DSRegisterAllCalled) return(0);
1019: DSRegisterAllCalled = PETSC_TRUE;
1020: DSRegister(DSHEP,DSCreate_HEP);
1021: DSRegister(DSNHEP,DSCreate_NHEP);
1022: DSRegister(DSGHEP,DSCreate_GHEP);
1023: DSRegister(DSGHIEP,DSCreate_GHIEP);
1024: DSRegister(DSGNHEP,DSCreate_GNHEP);
1025: DSRegister(DSNHEPTS,DSCreate_NHEPTS);
1026: DSRegister(DSSVD,DSCreate_SVD);
1027: DSRegister(DSPEP,DSCreate_PEP);
1028: DSRegister(DSNEP,DSCreate_NEP);
1029: return(0);
1030: }