1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-2021, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
10: /*
11: Basic DS routines
12: */
14: #include <slepc/private/dsimpl.h> 16: PetscFunctionList DSList = 0;
17: PetscBool DSRegisterAllCalled = PETSC_FALSE;
18: PetscClassId DS_CLASSID = 0;
19: PetscLogEvent DS_Solve = 0,DS_Vectors = 0,DS_Synchronize = 0,DS_Other = 0;
20: static PetscBool DSPackageInitialized = PETSC_FALSE;
22: const char *DSStateTypes[] = {"RAW","INTERMEDIATE","CONDENSED","TRUNCATED","DSStateType","DS_STATE_",0};
23: const char *DSParallelTypes[] = {"REDUNDANT","SYNCHRONIZED","DISTRIBUTED","DSParallelType","DS_PARALLEL_",0};
24: const char *DSMatName[DS_NUM_MAT] = {"A","B","C","T","D","Q","Z","X","Y","U","V","W","E0","E1","E2","E3","E4","E5","E6","E7","E8","E9"};
25: DSMatType DSMatExtra[DS_NUM_EXTRA] = {DS_MAT_E0,DS_MAT_E1,DS_MAT_E2,DS_MAT_E3,DS_MAT_E4,DS_MAT_E5,DS_MAT_E6,DS_MAT_E7,DS_MAT_E8,DS_MAT_E9};
27: /*@C
28: DSFinalizePackage - This function destroys everything in the SLEPc interface
29: to the DS package. It is called from SlepcFinalize().
31: Level: developer
33: .seealso: SlepcFinalize()
34: @*/
35: PetscErrorCode DSFinalizePackage(void) 36: {
40: PetscFunctionListDestroy(&DSList);
41: DSPackageInitialized = PETSC_FALSE;
42: DSRegisterAllCalled = PETSC_FALSE;
43: return(0);
44: }
46: /*@C
47: DSInitializePackage - This function initializes everything in the DS package.
48: It is called from PetscDLLibraryRegister() when using dynamic libraries, and
49: on the first call to DSCreate() when using static libraries.
51: Level: developer
53: .seealso: SlepcInitialize()
54: @*/
55: PetscErrorCode DSInitializePackage() 56: {
57: char logList[256];
58: PetscBool opt,pkg;
59: PetscClassId classids[1];
63: if (DSPackageInitialized) return(0);
64: DSPackageInitialized = PETSC_TRUE;
65: /* Register Classes */
66: PetscClassIdRegister("Direct Solver",&DS_CLASSID);
67: /* Register Constructors */
68: DSRegisterAll();
69: /* Register Events */
70: PetscLogEventRegister("DSSolve",DS_CLASSID,&DS_Solve);
71: PetscLogEventRegister("DSVectors",DS_CLASSID,&DS_Vectors);
72: PetscLogEventRegister("DSSynchronize",DS_CLASSID,&DS_Synchronize);
73: PetscLogEventRegister("DSOther",DS_CLASSID,&DS_Other);
74: /* Process Info */
75: classids[0] = DS_CLASSID;
76: PetscInfoProcessClass("ds",1,&classids[0]);
77: /* Process summary exclusions */
78: PetscOptionsGetString(NULL,NULL,"-log_exclude",logList,sizeof(logList),&opt);
79: if (opt) {
80: PetscStrInList("ds",logList,',',&pkg);
81: if (pkg) { PetscLogEventDeactivateClass(DS_CLASSID); }
82: }
83: /* Register package finalizer */
84: PetscRegisterFinalize(DSFinalizePackage);
85: return(0);
86: }
88: /*@
89: DSCreate - Creates a DS context.
91: Collective
93: Input Parameter:
94: . comm - MPI communicator
96: Output Parameter:
97: . newds - location to put the DS context
99: Level: beginner
101: Note:
102: DS objects are not intended for normal users but only for
103: advanced user that for instance implement their own solvers.
105: .seealso: DSDestroy(), DS106: @*/
107: PetscErrorCode DSCreate(MPI_Comm comm,DS *newds)108: {
109: DS ds;
110: PetscInt i;
115: *newds = 0;
116: DSInitializePackage();
117: SlepcHeaderCreate(ds,DS_CLASSID,"DS","Direct Solver (or Dense System)","DS",comm,DSDestroy,DSView);
119: ds->state = DS_STATE_RAW;
120: ds->method = 0;
121: ds->compact = PETSC_FALSE;
122: ds->refined = PETSC_FALSE;
123: ds->extrarow = PETSC_FALSE;
124: ds->ld = 0;
125: ds->l = 0;
126: ds->n = 0;
127: ds->k = 0;
128: ds->t = 0;
129: ds->bs = 1;
130: ds->sc = NULL;
131: ds->pmode = DS_PARALLEL_REDUNDANT;
133: for (i=0;i<DS_NUM_MAT;i++) {
134: ds->mat[i] = NULL;
135: ds->rmat[i] = NULL;
136: ds->omat[i] = NULL;
137: }
138: ds->perm = NULL;
139: ds->data = NULL;
140: ds->scset = PETSC_FALSE;
141: ds->work = NULL;
142: ds->rwork = NULL;
143: ds->iwork = NULL;
144: ds->lwork = 0;
145: ds->lrwork = 0;
146: ds->liwork = 0;
148: *newds = ds;
149: return(0);
150: }
152: /*@C
153: DSSetOptionsPrefix - Sets the prefix used for searching for all
154: DS options in the database.
156: Logically Collective on ds
158: Input Parameters:
159: + ds - the direct solver context
160: - prefix - the prefix string to prepend to all DS option requests
162: Notes:
163: A hyphen (-) must NOT be given at the beginning of the prefix name.
164: The first character of all runtime options is AUTOMATICALLY the
165: hyphen.
167: Level: advanced
169: .seealso: DSAppendOptionsPrefix()
170: @*/
171: PetscErrorCode DSSetOptionsPrefix(DS ds,const char *prefix)172: {
177: PetscObjectSetOptionsPrefix((PetscObject)ds,prefix);
178: return(0);
179: }
181: /*@C
182: DSAppendOptionsPrefix - Appends to the prefix used for searching for all
183: DS options in the database.
185: Logically Collective on ds
187: Input Parameters:
188: + ds - the direct solver context
189: - prefix - the prefix string to prepend to all DS option requests
191: Notes:
192: A hyphen (-) must NOT be given at the beginning of the prefix name.
193: The first character of all runtime options is AUTOMATICALLY the hyphen.
195: Level: advanced
197: .seealso: DSSetOptionsPrefix()
198: @*/
199: PetscErrorCode DSAppendOptionsPrefix(DS ds,const char *prefix)200: {
205: PetscObjectAppendOptionsPrefix((PetscObject)ds,prefix);
206: return(0);
207: }
209: /*@C
210: DSGetOptionsPrefix - Gets the prefix used for searching for all
211: DS options in the database.
213: Not Collective
215: Input Parameters:
216: . ds - the direct solver context
218: Output Parameters:
219: . prefix - pointer to the prefix string used is returned
221: Note:
222: On the Fortran side, the user should pass in a string 'prefix' of
223: sufficient length to hold the prefix.
225: Level: advanced
227: .seealso: DSSetOptionsPrefix(), DSAppendOptionsPrefix()
228: @*/
229: PetscErrorCode DSGetOptionsPrefix(DS ds,const char *prefix[])230: {
236: PetscObjectGetOptionsPrefix((PetscObject)ds,prefix);
237: return(0);
238: }
240: /*@C
241: DSSetType - Selects the type for the DS object.
243: Logically Collective on ds
245: Input Parameters:
246: + ds - the direct solver context
247: - type - a known type
249: Level: intermediate
251: .seealso: DSGetType()
252: @*/
253: PetscErrorCode DSSetType(DS ds,DSType type)254: {
255: PetscErrorCode ierr,(*r)(DS);
256: PetscBool match;
262: PetscObjectTypeCompare((PetscObject)ds,type,&match);
263: if (match) return(0);
265: PetscFunctionListFind(DSList,type,&r);
266: if (!r) SETERRQ1(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested DS type %s",type);
268: PetscMemzero(ds->ops,sizeof(struct _DSOps));
270: PetscObjectChangeTypeName((PetscObject)ds,type);
271: (*r)(ds);
272: return(0);
273: }
275: /*@C
276: DSGetType - Gets the DS type name (as a string) from the DS context.
278: Not Collective
280: Input Parameter:
281: . ds - the direct solver context
283: Output Parameter:
284: . name - name of the direct solver
286: Level: intermediate
288: .seealso: DSSetType()
289: @*/
290: PetscErrorCode DSGetType(DS ds,DSType *type)291: {
295: *type = ((PetscObject)ds)->type_name;
296: return(0);
297: }
299: /*@
300: DSDuplicate - Creates a new direct solver object with the same options as
301: an existing one.
303: Collective on ds
305: Input Parameter:
306: . ds - direct solver context
308: Output Parameter:
309: . dsnew - location to put the new DS311: Notes:
312: DSDuplicate() DOES NOT COPY the matrices, and the new DS does not even have
313: internal arrays allocated. Use DSAllocate() to use the new DS.
315: The sorting criterion options are not copied, see DSSetSlepcSC().
317: Level: intermediate
319: .seealso: DSCreate(), DSAllocate(), DSSetSlepcSC()
320: @*/
321: PetscErrorCode DSDuplicate(DS ds,DS *dsnew)322: {
328: DSCreate(PetscObjectComm((PetscObject)ds),dsnew);
329: if (((PetscObject)ds)->type_name) {
330: DSSetType(*dsnew,((PetscObject)ds)->type_name);
331: }
332: (*dsnew)->method = ds->method;
333: (*dsnew)->compact = ds->compact;
334: (*dsnew)->refined = ds->refined;
335: (*dsnew)->extrarow = ds->extrarow;
336: (*dsnew)->bs = ds->bs;
337: (*dsnew)->pmode = ds->pmode;
338: return(0);
339: }
341: /*@
342: DSSetMethod - Selects the method to be used to solve the problem.
344: Logically Collective on ds
346: Input Parameters:
347: + ds - the direct solver context
348: - meth - an index indentifying the method
350: Options Database Key:
351: . -ds_method <meth> - Sets the method
353: Level: intermediate
355: .seealso: DSGetMethod()
356: @*/
357: PetscErrorCode DSSetMethod(DS ds,PetscInt meth)358: {
362: if (meth<0) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The method must be a non-negative integer");
363: if (meth>DS_MAX_SOLVE) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too large value for the method");
364: ds->method = meth;
365: return(0);
366: }
368: /*@
369: DSGetMethod - Gets the method currently used in the DS.
371: Not Collective
373: Input Parameter:
374: . ds - the direct solver context
376: Output Parameter:
377: . meth - identifier of the method
379: Level: intermediate
381: .seealso: DSSetMethod()
382: @*/
383: PetscErrorCode DSGetMethod(DS ds,PetscInt *meth)384: {
388: *meth = ds->method;
389: return(0);
390: }
392: /*@
393: DSSetParallel - Selects the mode of operation in parallel runs.
395: Logically Collective on ds
397: Input Parameters:
398: + ds - the direct solver context
399: - pmode - the parallel mode
401: Options Database Key:
402: . -ds_parallel <mode> - Sets the parallel mode, 'redundant', 'synchronized'
403: or 'distributed'
405: Notes:
406: In the 'redundant' parallel mode, all processes will make the computation
407: redundantly, starting from the same data, and producing the same result.
408: This result may be slightly different in the different processes if using a
409: multithreaded BLAS library, which may cause issues in ill-conditioned problems.
411: In the 'synchronized' parallel mode, only the first MPI process performs the
412: computation and then the computed quantities are broadcast to the other
413: processes in the communicator. This communication is not done automatically,
414: an explicit call to DSSynchronize() is required.
416: The 'distributed' parallel mode can be used in some DS types only, such
417: as the contour integral method of DSNEP. In this case, every MPI process
418: will be in charge of part of the computation.
420: Level: advanced
422: .seealso: DSSynchronize(), DSGetParallel()
423: @*/
424: PetscErrorCode DSSetParallel(DS ds,DSParallelType pmode)425: {
429: ds->pmode = pmode;
430: return(0);
431: }
433: /*@
434: DSGetParallel - Gets the mode of operation in parallel runs.
436: Not Collective
438: Input Parameter:
439: . ds - the direct solver context
441: Output Parameter:
442: . pmode - the parallel mode
444: Level: advanced
446: .seealso: DSSetParallel()
447: @*/
448: PetscErrorCode DSGetParallel(DS ds,DSParallelType *pmode)449: {
453: *pmode = ds->pmode;
454: return(0);
455: }
457: /*@
458: DSSetCompact - Switch to compact storage of matrices.
460: Logically Collective on ds
462: Input Parameters:
463: + ds - the direct solver context
464: - comp - a boolean flag
466: Notes:
467: Compact storage is used in some DS types such as DSHEP when the matrix
468: is tridiagonal. This flag can be used to indicate whether the user
469: provides the matrix entries via the compact form (the tridiagonal DS_MAT_T)
470: or the non-compact one (DS_MAT_A).
472: The default is PETSC_FALSE.
474: Level: advanced
476: .seealso: DSGetCompact()
477: @*/
478: PetscErrorCode DSSetCompact(DS ds,PetscBool comp)479: {
483: ds->compact = comp;
484: return(0);
485: }
487: /*@
488: DSGetCompact - Gets the compact storage flag.
490: Not Collective
492: Input Parameter:
493: . ds - the direct solver context
495: Output Parameter:
496: . comp - the flag
498: Level: advanced
500: .seealso: DSSetCompact()
501: @*/
502: PetscErrorCode DSGetCompact(DS ds,PetscBool *comp)503: {
507: *comp = ds->compact;
508: return(0);
509: }
511: /*@
512: DSSetExtraRow - Sets a flag to indicate that the matrix has one extra
513: row.
515: Logically Collective on ds
517: Input Parameters:
518: + ds - the direct solver context
519: - ext - a boolean flag
521: Notes:
522: In Krylov methods it is useful that the matrix representing the direct solver
523: has one extra row, i.e., has dimension (n+1) x n. If this flag is activated, all
524: transformations applied to the right of the matrix also affect this additional
525: row. In that case, (n+1) must be less or equal than the leading dimension.
527: The default is PETSC_FALSE.
529: Level: advanced
531: .seealso: DSSolve(), DSAllocate(), DSGetExtraRow()
532: @*/
533: PetscErrorCode DSSetExtraRow(DS ds,PetscBool ext)534: {
538: if (ds->n>0 && ds->n==ds->ld) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"Cannot set extra row after setting n=ld");
539: ds->extrarow = ext;
540: return(0);
541: }
543: /*@
544: DSGetExtraRow - Gets the extra row flag.
546: Not Collective
548: Input Parameter:
549: . ds - the direct solver context
551: Output Parameter:
552: . ext - the flag
554: Level: advanced
556: .seealso: DSSetExtraRow()
557: @*/
558: PetscErrorCode DSGetExtraRow(DS ds,PetscBool *ext)559: {
563: *ext = ds->extrarow;
564: return(0);
565: }
567: /*@
568: DSSetRefined - Sets a flag to indicate that refined vectors must be
569: computed.
571: Logically Collective on ds
573: Input Parameters:
574: + ds - the direct solver context
575: - ref - a boolean flag
577: Notes:
578: Normally the vectors returned in DS_MAT_X are eigenvectors of the
579: projected matrix. With this flag activated, DSVectors() will return
580: the right singular vector of the smallest singular value of matrix
581: \tilde{A}-theta*I, where \tilde{A} is the extended (n+1)xn matrix
582: and theta is the Ritz value. This is used in the refined Ritz
583: approximation.
585: The default is PETSC_FALSE.
587: Level: advanced
589: .seealso: DSVectors(), DSGetRefined()
590: @*/
591: PetscErrorCode DSSetRefined(DS ds,PetscBool ref)592: {
596: ds->refined = ref;
597: return(0);
598: }
600: /*@
601: DSGetRefined - Gets the refined vectors flag.
603: Not Collective
605: Input Parameter:
606: . ds - the direct solver context
608: Output Parameter:
609: . ref - the flag
611: Level: advanced
613: .seealso: DSSetRefined()
614: @*/
615: PetscErrorCode DSGetRefined(DS ds,PetscBool *ref)616: {
620: *ref = ds->refined;
621: return(0);
622: }
624: /*@
625: DSSetBlockSize - Sets the block size.
627: Logically Collective on ds
629: Input Parameters:
630: + ds - the direct solver context
631: - bs - the block size
633: Options Database Key:
634: . -ds_block_size <bs> - Sets the block size
636: Level: intermediate
638: .seealso: DSGetBlockSize()
639: @*/
640: PetscErrorCode DSSetBlockSize(DS ds,PetscInt bs)641: {
645: if (bs<1) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The block size must be at least one");
646: ds->bs = bs;
647: return(0);
648: }
650: /*@
651: DSGetBlockSize - Gets the block size.
653: Not Collective
655: Input Parameter:
656: . ds - the direct solver context
658: Output Parameter:
659: . bs - block size
661: Level: intermediate
663: .seealso: DSSetBlockSize()
664: @*/
665: PetscErrorCode DSGetBlockSize(DS ds,PetscInt *bs)666: {
670: *bs = ds->bs;
671: return(0);
672: }
674: /*@C
675: DSSetSlepcSC - Sets the sorting criterion context.
677: Not Collective
679: Input Parameters:
680: + ds - the direct solver context
681: - sc - a pointer to the sorting criterion context
683: Level: developer
685: .seealso: DSGetSlepcSC(), DSSort()
686: @*/
687: PetscErrorCode DSSetSlepcSC(DS ds,SlepcSC sc)688: {
694: if (ds->sc && !ds->scset) {
695: PetscFree(ds->sc);
696: }
697: ds->sc = sc;
698: ds->scset = PETSC_TRUE;
699: return(0);
700: }
702: /*@C
703: DSGetSlepcSC - Gets the sorting criterion context.
705: Not Collective
707: Input Parameter:
708: . ds - the direct solver context
710: Output Parameters:
711: . sc - a pointer to the sorting criterion context
713: Level: developer
715: .seealso: DSSetSlepcSC(), DSSort()
716: @*/
717: PetscErrorCode DSGetSlepcSC(DS ds,SlepcSC *sc)718: {
724: if (!ds->sc) {
725: PetscNewLog(ds,&ds->sc);
726: }
727: *sc = ds->sc;
728: return(0);
729: }
731: /*@
732: DSSetFromOptions - Sets DS options from the options database.
734: Collective on ds
736: Input Parameters:
737: . ds - the direct solver context
739: Notes:
740: To see all options, run your program with the -help option.
742: Level: beginner
743: @*/
744: PetscErrorCode DSSetFromOptions(DS ds)745: {
747: PetscInt bs,meth;
748: PetscBool flag;
749: DSParallelType pmode;
753: DSRegisterAll();
754: /* Set default type (we do not allow changing it with -ds_type) */
755: if (!((PetscObject)ds)->type_name) {
756: DSSetType(ds,DSNHEP);
757: }
758: PetscObjectOptionsBegin((PetscObject)ds);
760: PetscOptionsInt("-ds_block_size","Block size for the dense system solver","DSSetBlockSize",ds->bs,&bs,&flag);
761: if (flag) { DSSetBlockSize(ds,bs); }
763: PetscOptionsInt("-ds_method","Method to be used for the dense system","DSSetMethod",ds->method,&meth,&flag);
764: if (flag) { DSSetMethod(ds,meth); }
766: PetscOptionsEnum("-ds_parallel","Operation mode in parallel runs","DSSetParallel",DSParallelTypes,(PetscEnum)ds->pmode,(PetscEnum*)&pmode,&flag);
767: if (flag) { DSSetParallel(ds,pmode); }
769: if (ds->ops->setfromoptions) {
770: (*ds->ops->setfromoptions)(PetscOptionsObject,ds);
771: }
772: PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject)ds);
773: PetscOptionsEnd();
774: return(0);
775: }
777: /*@C
778: DSView - Prints the DS data structure.
780: Collective on ds
782: Input Parameters:
783: + ds - the direct solver context
784: - viewer - optional visualization context
786: Note:
787: The available visualization contexts include
788: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
789: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
790: output where only the first processor opens
791: the file. All other processors send their
792: data to the first processor to print.
794: The user can open an alternative visualization context with
795: PetscViewerASCIIOpen() - output to a specified file.
797: Level: beginner
799: .seealso: DSViewMat()
800: @*/
801: PetscErrorCode DSView(DS ds,PetscViewer viewer)802: {
803: PetscBool isascii;
804: PetscViewerFormat format;
805: PetscErrorCode ierr;
806: PetscMPIInt size;
810: if (!viewer) {
811: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)ds),&viewer);
812: }
815: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
816: if (isascii) {
817: PetscViewerGetFormat(viewer,&format);
818: PetscObjectPrintClassNamePrefixType((PetscObject)ds,viewer);
819: MPI_Comm_size(PetscObjectComm((PetscObject)ds),&size);
820: if (size>1) {
821: PetscViewerASCIIPrintf(viewer," parallel operation mode: %s\n",DSParallelTypes[ds->pmode]);
822: }
823: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
824: PetscViewerASCIIPrintf(viewer," current state: %s\n",DSStateTypes[ds->state]);
825: PetscViewerASCIIPrintf(viewer," dimensions: ld=%D, n=%D, l=%D, k=%D",ds->ld,ds->n,ds->l,ds->k);
826: if (ds->state==DS_STATE_TRUNCATED) {
827: PetscViewerASCIIPrintf(viewer,", t=%D\n",ds->t);
828: } else {
829: PetscViewerASCIIPrintf(viewer,"\n");
830: }
831: PetscViewerASCIIPrintf(viewer," flags:%s%s%s\n",ds->compact?" compact":"",ds->extrarow?" extrarow":"",ds->refined?" refined":"");
832: }
833: if (ds->ops->view) {
834: PetscViewerASCIIPushTab(viewer);
835: (*ds->ops->view)(ds,viewer);
836: PetscViewerASCIIPopTab(viewer);
837: }
838: }
839: return(0);
840: }
842: /*@C
843: DSViewFromOptions - View from options
845: Collective on DS847: Input Parameters:
848: + ds - the direct solver context
849: . obj - optional object
850: - name - command line option
852: Level: intermediate
854: .seealso: DSView(), DSCreate()
855: @*/
856: PetscErrorCode DSViewFromOptions(DS ds,PetscObject obj,const char name[])857: {
862: PetscObjectViewFromOptions((PetscObject)ds,obj,name);
863: return(0);
864: }
866: /*@
867: DSAllocate - Allocates memory for internal storage or matrices in DS.
869: Logically Collective on ds
871: Input Parameters:
872: + ds - the direct solver context
873: - ld - leading dimension (maximum allowed dimension for the matrices, including
874: the extra row if present)
876: Note:
877: If the leading dimension is different from a previously set value, then
878: all matrices are destroyed with DSReset().
880: Level: intermediate
882: .seealso: DSGetLeadingDimension(), DSSetDimensions(), DSSetExtraRow(), DSReset()
883: @*/
884: PetscErrorCode DSAllocate(DS ds,PetscInt ld)885: {
892: if (ld<1) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Leading dimension should be at least one");
893: if (ld!=ds->ld) {
894: PetscInfo1(ds,"Allocating memory with leading dimension=%D\n",ld);
895: DSReset(ds);
896: ds->ld = ld;
897: (*ds->ops->allocate)(ds,ld);
898: }
899: return(0);
900: }
902: /*@
903: DSReset - Resets the DS context to the initial state.
905: Collective on ds
907: Input Parameter:
908: . ds - the direct solver context
910: Note:
911: All data structures with size depending on the leading dimension
912: of DSAllocate() are released.
914: Level: advanced
916: .seealso: DSDestroy(), DSAllocate()
917: @*/
918: PetscErrorCode DSReset(DS ds)919: {
920: PetscInt i;
925: if (!ds) return(0);
926: ds->state = DS_STATE_RAW;
927: ds->ld = 0;
928: ds->l = 0;
929: ds->n = 0;
930: ds->k = 0;
931: for (i=0;i<DS_NUM_MAT;i++) {
932: PetscFree(ds->mat[i]);
933: PetscFree(ds->rmat[i]);
934: MatDestroy(&ds->omat[i]);
935: }
936: PetscFree(ds->perm);
937: return(0);
938: }
940: /*@C
941: DSDestroy - Destroys DS context that was created with DSCreate().
943: Collective on ds
945: Input Parameter:
946: . ds - the direct solver context
948: Level: beginner
950: .seealso: DSCreate()
951: @*/
952: PetscErrorCode DSDestroy(DS *ds)953: {
957: if (!*ds) return(0);
959: if (--((PetscObject)(*ds))->refct > 0) { *ds = 0; return(0); }
960: DSReset(*ds);
961: if ((*ds)->ops->destroy) { (*(*ds)->ops->destroy)(*ds); }
962: PetscFree((*ds)->work);
963: PetscFree((*ds)->rwork);
964: PetscFree((*ds)->iwork);
965: if (!(*ds)->scset) { PetscFree((*ds)->sc); }
966: PetscHeaderDestroy(ds);
967: return(0);
968: }
970: /*@C
971: DSRegister - Adds a direct solver to the DS package.
973: Not collective
975: Input Parameters:
976: + name - name of a new user-defined DS977: - routine_create - routine to create context
979: Notes:
980: DSRegister() may be called multiple times to add several user-defined
981: direct solvers.
983: Level: advanced
985: .seealso: DSRegisterAll()
986: @*/
987: PetscErrorCode DSRegister(const char *name,PetscErrorCode (*function)(DS))988: {
992: DSInitializePackage();
993: PetscFunctionListAdd(&DSList,name,function);
994: return(0);
995: }
997: SLEPC_EXTERN PetscErrorCode DSCreate_HEP(DS);
998: SLEPC_EXTERN PetscErrorCode DSCreate_NHEP(DS);
999: SLEPC_EXTERN PetscErrorCode DSCreate_GHEP(DS);
1000: SLEPC_EXTERN PetscErrorCode DSCreate_GHIEP(DS);
1001: SLEPC_EXTERN PetscErrorCode DSCreate_GNHEP(DS);
1002: SLEPC_EXTERN PetscErrorCode DSCreate_NHEPTS(DS);
1003: SLEPC_EXTERN PetscErrorCode DSCreate_SVD(DS);
1004: SLEPC_EXTERN PetscErrorCode DSCreate_GSVD(DS);
1005: SLEPC_EXTERN PetscErrorCode DSCreate_PEP(DS);
1006: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS);
1008: /*@C
1009: DSRegisterAll - Registers all of the direct solvers in the DS package.
1011: Not Collective
1013: Level: advanced
1014: @*/
1015: PetscErrorCode DSRegisterAll(void)1016: {
1020: if (DSRegisterAllCalled) return(0);
1021: DSRegisterAllCalled = PETSC_TRUE;
1022: DSRegister(DSHEP,DSCreate_HEP);
1023: DSRegister(DSNHEP,DSCreate_NHEP);
1024: DSRegister(DSGHEP,DSCreate_GHEP);
1025: DSRegister(DSGHIEP,DSCreate_GHIEP);
1026: DSRegister(DSGNHEP,DSCreate_GNHEP);
1027: DSRegister(DSNHEPTS,DSCreate_NHEPTS);
1028: DSRegister(DSSVD,DSCreate_SVD);
1029: DSRegister(DSGSVD,DSCreate_GSVD);
1030: DSRegister(DSPEP,DSCreate_PEP);
1031: DSRegister(DSNEP,DSCreate_NEP);
1032: return(0);
1033: }