Actual source code: dsbasic.c
slepc-main 2024-12-17
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
10: /*
11: Basic DS routines
12: */
14: #include <slepc/private/dsimpl.h>
16: PetscFunctionList DSList = NULL;
17: PetscBool DSRegisterAllCalled = PETSC_FALSE;
18: PetscClassId DS_CLASSID = 0;
19: PetscLogEvent DS_Solve = 0,DS_Vectors = 0,DS_Synchronize = 0,DS_Other = 0;
20: static PetscBool DSPackageInitialized = PETSC_FALSE;
22: const char *DSStateTypes[] = {"RAW","INTERMEDIATE","CONDENSED","TRUNCATED","DSStateType","DS_STATE_",NULL};
23: const char *DSParallelTypes[] = {"REDUNDANT","SYNCHRONIZED","DISTRIBUTED","DSParallelType","DS_PARALLEL_",NULL};
24: const char *DSMatName[DS_NUM_MAT] = {"A","B","C","T","D","Q","Z","X","Y","U","V","W","E0","E1","E2","E3","E4","E5","E6","E7","E8","E9"};
25: DSMatType DSMatExtra[DS_NUM_EXTRA] = {DS_MAT_E0,DS_MAT_E1,DS_MAT_E2,DS_MAT_E3,DS_MAT_E4,DS_MAT_E5,DS_MAT_E6,DS_MAT_E7,DS_MAT_E8,DS_MAT_E9};
27: /*@C
28: DSFinalizePackage - This function destroys everything in the SLEPc interface
29: to the DS package. It is called from SlepcFinalize().
31: Level: developer
33: .seealso: SlepcFinalize()
34: @*/
35: PetscErrorCode DSFinalizePackage(void)
36: {
37: PetscFunctionBegin;
38: PetscCall(PetscFunctionListDestroy(&DSList));
39: DSPackageInitialized = PETSC_FALSE;
40: DSRegisterAllCalled = PETSC_FALSE;
41: PetscFunctionReturn(PETSC_SUCCESS);
42: }
44: /*@C
45: DSInitializePackage - This function initializes everything in the DS package.
46: It is called from PetscDLLibraryRegister() when using dynamic libraries, and
47: on the first call to DSCreate() when using static libraries.
49: Level: developer
51: .seealso: SlepcInitialize()
52: @*/
53: PetscErrorCode DSInitializePackage(void)
54: {
55: char logList[256];
56: PetscBool opt,pkg;
57: PetscClassId classids[1];
59: PetscFunctionBegin;
60: if (DSPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS);
61: DSPackageInitialized = PETSC_TRUE;
62: /* Register Classes */
63: PetscCall(PetscClassIdRegister("Direct Solver",&DS_CLASSID));
64: /* Register Constructors */
65: PetscCall(DSRegisterAll());
66: /* Register Events */
67: PetscCall(PetscLogEventRegister("DSSolve",DS_CLASSID,&DS_Solve));
68: PetscCall(PetscLogEventRegister("DSVectors",DS_CLASSID,&DS_Vectors));
69: PetscCall(PetscLogEventRegister("DSSynchronize",DS_CLASSID,&DS_Synchronize));
70: PetscCall(PetscLogEventRegister("DSOther",DS_CLASSID,&DS_Other));
71: /* Process Info */
72: classids[0] = DS_CLASSID;
73: PetscCall(PetscInfoProcessClass("ds",1,&classids[0]));
74: /* Process summary exclusions */
75: PetscCall(PetscOptionsGetString(NULL,NULL,"-log_exclude",logList,sizeof(logList),&opt));
76: if (opt) {
77: PetscCall(PetscStrInList("ds",logList,',',&pkg));
78: if (pkg) PetscCall(PetscLogEventDeactivateClass(DS_CLASSID));
79: }
80: /* Register package finalizer */
81: PetscCall(PetscRegisterFinalize(DSFinalizePackage));
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }
85: /*@
86: DSCreate - Creates a DS context.
88: Collective
90: Input Parameter:
91: . comm - MPI communicator
93: Output Parameter:
94: . newds - location to put the DS context
96: Level: beginner
98: Note:
99: DS objects are not intended for normal users but only for
100: advanced user that for instance implement their own solvers.
102: .seealso: DSDestroy(), DS
103: @*/
104: PetscErrorCode DSCreate(MPI_Comm comm,DS *newds)
105: {
106: DS ds;
107: PetscInt i;
109: PetscFunctionBegin;
110: PetscAssertPointer(newds,2);
111: PetscCall(DSInitializePackage());
112: PetscCall(SlepcHeaderCreate(ds,DS_CLASSID,"DS","Direct Solver (or Dense System)","DS",comm,DSDestroy,DSView));
114: ds->state = DS_STATE_RAW;
115: ds->method = 0;
116: ds->compact = PETSC_FALSE;
117: ds->refined = PETSC_FALSE;
118: ds->extrarow = PETSC_FALSE;
119: ds->ld = 0;
120: ds->l = 0;
121: ds->n = 0;
122: ds->k = 0;
123: ds->t = 0;
124: ds->bs = 1;
125: ds->sc = NULL;
126: ds->pmode = DS_PARALLEL_REDUNDANT;
128: for (i=0;i<DS_NUM_MAT;i++) ds->omat[i] = NULL;
129: ds->perm = NULL;
130: ds->data = NULL;
131: ds->scset = PETSC_FALSE;
132: ds->work = NULL;
133: ds->rwork = NULL;
134: ds->iwork = NULL;
135: ds->lwork = 0;
136: ds->lrwork = 0;
137: ds->liwork = 0;
139: *newds = ds;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: DSSetOptionsPrefix - Sets the prefix used for searching for all
145: DS options in the database.
147: Logically Collective
149: Input Parameters:
150: + ds - the direct solver context
151: - prefix - the prefix string to prepend to all DS option requests
153: Notes:
154: A hyphen (-) must NOT be given at the beginning of the prefix name.
155: The first character of all runtime options is AUTOMATICALLY the
156: hyphen.
158: Level: advanced
160: .seealso: DSAppendOptionsPrefix()
161: @*/
162: PetscErrorCode DSSetOptionsPrefix(DS ds,const char *prefix)
163: {
164: PetscFunctionBegin;
166: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)ds,prefix));
167: PetscFunctionReturn(PETSC_SUCCESS);
168: }
170: /*@
171: DSAppendOptionsPrefix - Appends to the prefix used for searching for all
172: DS options in the database.
174: Logically Collective
176: Input Parameters:
177: + ds - the direct solver context
178: - prefix - the prefix string to prepend to all DS option requests
180: Notes:
181: A hyphen (-) must NOT be given at the beginning of the prefix name.
182: The first character of all runtime options is AUTOMATICALLY the hyphen.
184: Level: advanced
186: .seealso: DSSetOptionsPrefix()
187: @*/
188: PetscErrorCode DSAppendOptionsPrefix(DS ds,const char *prefix)
189: {
190: PetscFunctionBegin;
192: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)ds,prefix));
193: PetscFunctionReturn(PETSC_SUCCESS);
194: }
196: /*@
197: DSGetOptionsPrefix - Gets the prefix used for searching for all
198: DS options in the database.
200: Not Collective
202: Input Parameters:
203: . ds - the direct solver context
205: Output Parameters:
206: . prefix - pointer to the prefix string used is returned
208: Note:
209: On the Fortran side, the user should pass in a string 'prefix' of
210: sufficient length to hold the prefix.
212: Level: advanced
214: .seealso: DSSetOptionsPrefix(), DSAppendOptionsPrefix()
215: @*/
216: PetscErrorCode DSGetOptionsPrefix(DS ds,const char *prefix[])
217: {
218: PetscFunctionBegin;
220: PetscAssertPointer(prefix,2);
221: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ds,prefix));
222: PetscFunctionReturn(PETSC_SUCCESS);
223: }
225: /*@
226: DSSetType - Selects the type for the DS object.
228: Logically Collective
230: Input Parameters:
231: + ds - the direct solver context
232: - type - a known type
234: Level: intermediate
236: .seealso: DSGetType()
237: @*/
238: PetscErrorCode DSSetType(DS ds,DSType type)
239: {
240: PetscErrorCode (*r)(DS);
241: PetscBool match;
243: PetscFunctionBegin;
245: PetscAssertPointer(type,2);
247: PetscCall(PetscObjectTypeCompare((PetscObject)ds,type,&match));
248: if (match) PetscFunctionReturn(PETSC_SUCCESS);
250: PetscCall(PetscFunctionListFind(DSList,type,&r));
251: PetscCheck(r,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested DS type %s",type);
253: PetscTryTypeMethod(ds,destroy);
254: PetscCall(DSReset(ds));
255: PetscCall(PetscMemzero(ds->ops,sizeof(struct _DSOps)));
257: PetscCall(PetscObjectChangeTypeName((PetscObject)ds,type));
258: PetscCall((*r)(ds));
259: PetscFunctionReturn(PETSC_SUCCESS);
260: }
262: /*@
263: DSGetType - Gets the DS type name (as a string) from the DS context.
265: Not Collective
267: Input Parameter:
268: . ds - the direct solver context
270: Output Parameter:
271: . type - name of the direct solver
273: Level: intermediate
275: .seealso: DSSetType()
276: @*/
277: PetscErrorCode DSGetType(DS ds,DSType *type)
278: {
279: PetscFunctionBegin;
281: PetscAssertPointer(type,2);
282: *type = ((PetscObject)ds)->type_name;
283: PetscFunctionReturn(PETSC_SUCCESS);
284: }
286: /*@
287: DSDuplicate - Creates a new direct solver object with the same options as
288: an existing one.
290: Collective
292: Input Parameter:
293: . ds - direct solver context
295: Output Parameter:
296: . dsnew - location to put the new DS
298: Notes:
299: DSDuplicate() DOES NOT COPY the matrices, and the new DS does not even have
300: internal arrays allocated. Use DSAllocate() to use the new DS.
302: The sorting criterion options are not copied, see DSSetSlepcSC().
304: Level: intermediate
306: .seealso: DSCreate(), DSAllocate(), DSSetSlepcSC()
307: @*/
308: PetscErrorCode DSDuplicate(DS ds,DS *dsnew)
309: {
310: PetscFunctionBegin;
312: PetscAssertPointer(dsnew,2);
313: PetscCall(DSCreate(PetscObjectComm((PetscObject)ds),dsnew));
314: if (((PetscObject)ds)->type_name) PetscCall(DSSetType(*dsnew,((PetscObject)ds)->type_name));
315: (*dsnew)->method = ds->method;
316: (*dsnew)->compact = ds->compact;
317: (*dsnew)->refined = ds->refined;
318: (*dsnew)->extrarow = ds->extrarow;
319: (*dsnew)->bs = ds->bs;
320: (*dsnew)->pmode = ds->pmode;
321: PetscFunctionReturn(PETSC_SUCCESS);
322: }
324: /*@
325: DSSetMethod - Selects the method to be used to solve the problem.
327: Logically Collective
329: Input Parameters:
330: + ds - the direct solver context
331: - meth - an index identifying the method
333: Options Database Key:
334: . -ds_method <meth> - Sets the method
336: Level: intermediate
338: .seealso: DSGetMethod()
339: @*/
340: PetscErrorCode DSSetMethod(DS ds,PetscInt meth)
341: {
342: PetscFunctionBegin;
345: PetscCheck(meth>=0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The method must be a non-negative integer");
346: PetscCheck(meth<=DS_MAX_SOLVE,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too large value for the method");
347: ds->method = meth;
348: PetscFunctionReturn(PETSC_SUCCESS);
349: }
351: /*@
352: DSGetMethod - Gets the method currently used in the DS.
354: Not Collective
356: Input Parameter:
357: . ds - the direct solver context
359: Output Parameter:
360: . meth - identifier of the method
362: Level: intermediate
364: .seealso: DSSetMethod()
365: @*/
366: PetscErrorCode DSGetMethod(DS ds,PetscInt *meth)
367: {
368: PetscFunctionBegin;
370: PetscAssertPointer(meth,2);
371: *meth = ds->method;
372: PetscFunctionReturn(PETSC_SUCCESS);
373: }
375: /*@
376: DSSetParallel - Selects the mode of operation in parallel runs.
378: Logically Collective
380: Input Parameters:
381: + ds - the direct solver context
382: - pmode - the parallel mode
384: Options Database Key:
385: . -ds_parallel <mode> - Sets the parallel mode, 'redundant', 'synchronized'
386: or 'distributed'
388: Notes:
389: In the 'redundant' parallel mode, all processes will make the computation
390: redundantly, starting from the same data, and producing the same result.
391: This result may be slightly different in the different processes if using a
392: multithreaded BLAS library, which may cause issues in ill-conditioned problems.
394: In the 'synchronized' parallel mode, only the first MPI process performs the
395: computation and then the computed quantities are broadcast to the other
396: processes in the communicator. This communication is not done automatically,
397: an explicit call to DSSynchronize() is required.
399: The 'distributed' parallel mode can be used in some DS types only, such
400: as the contour integral method of DSNEP. In this case, every MPI process
401: will be in charge of part of the computation.
403: Level: advanced
405: .seealso: DSSynchronize(), DSGetParallel()
406: @*/
407: PetscErrorCode DSSetParallel(DS ds,DSParallelType pmode)
408: {
409: PetscFunctionBegin;
412: ds->pmode = pmode;
413: PetscFunctionReturn(PETSC_SUCCESS);
414: }
416: /*@
417: DSGetParallel - Gets the mode of operation in parallel runs.
419: Not Collective
421: Input Parameter:
422: . ds - the direct solver context
424: Output Parameter:
425: . pmode - the parallel mode
427: Level: advanced
429: .seealso: DSSetParallel()
430: @*/
431: PetscErrorCode DSGetParallel(DS ds,DSParallelType *pmode)
432: {
433: PetscFunctionBegin;
435: PetscAssertPointer(pmode,2);
436: *pmode = ds->pmode;
437: PetscFunctionReturn(PETSC_SUCCESS);
438: }
440: /*@
441: DSSetCompact - Switch to compact storage of matrices.
443: Logically Collective
445: Input Parameters:
446: + ds - the direct solver context
447: - comp - a boolean flag
449: Notes:
450: Compact storage is used in some DS types such as DSHEP when the matrix
451: is tridiagonal. This flag can be used to indicate whether the user
452: provides the matrix entries via the compact form (the tridiagonal DS_MAT_T)
453: or the non-compact one (DS_MAT_A).
455: The default is PETSC_FALSE.
457: Level: advanced
459: .seealso: DSGetCompact()
460: @*/
461: PetscErrorCode DSSetCompact(DS ds,PetscBool comp)
462: {
463: PetscFunctionBegin;
466: if (ds->compact != comp && ds->ld) PetscTryTypeMethod(ds,setcompact,comp);
467: ds->compact = comp;
468: PetscFunctionReturn(PETSC_SUCCESS);
469: }
471: /*@
472: DSGetCompact - Gets the compact storage flag.
474: Not Collective
476: Input Parameter:
477: . ds - the direct solver context
479: Output Parameter:
480: . comp - the flag
482: Level: advanced
484: .seealso: DSSetCompact()
485: @*/
486: PetscErrorCode DSGetCompact(DS ds,PetscBool *comp)
487: {
488: PetscFunctionBegin;
490: PetscAssertPointer(comp,2);
491: *comp = ds->compact;
492: PetscFunctionReturn(PETSC_SUCCESS);
493: }
495: /*@
496: DSSetExtraRow - Sets a flag to indicate that the matrix has one extra
497: row.
499: Logically Collective
501: Input Parameters:
502: + ds - the direct solver context
503: - ext - a boolean flag
505: Notes:
506: In Krylov methods it is useful that the matrix representing the direct solver
507: has one extra row, i.e., has dimension (n+1) x n. If this flag is activated, all
508: transformations applied to the right of the matrix also affect this additional
509: row. In that case, (n+1) must be less or equal than the leading dimension.
511: The default is PETSC_FALSE.
513: Level: advanced
515: .seealso: DSSolve(), DSAllocate(), DSGetExtraRow()
516: @*/
517: PetscErrorCode DSSetExtraRow(DS ds,PetscBool ext)
518: {
519: PetscFunctionBegin;
522: PetscCheck(!ext || ds->n==0 || ds->n!=ds->ld,PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"Cannot set extra row after setting n=ld");
523: ds->extrarow = ext;
524: PetscFunctionReturn(PETSC_SUCCESS);
525: }
527: /*@
528: DSGetExtraRow - Gets the extra row flag.
530: Not Collective
532: Input Parameter:
533: . ds - the direct solver context
535: Output Parameter:
536: . ext - the flag
538: Level: advanced
540: .seealso: DSSetExtraRow()
541: @*/
542: PetscErrorCode DSGetExtraRow(DS ds,PetscBool *ext)
543: {
544: PetscFunctionBegin;
546: PetscAssertPointer(ext,2);
547: *ext = ds->extrarow;
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: /*@
552: DSSetRefined - Sets a flag to indicate that refined vectors must be
553: computed.
555: Logically Collective
557: Input Parameters:
558: + ds - the direct solver context
559: - ref - a boolean flag
561: Notes:
562: Normally the vectors returned in DS_MAT_X are eigenvectors of the
563: projected matrix. With this flag activated, DSVectors() will return
564: the right singular vector of the smallest singular value of matrix
565: \tilde{A}-theta*I, where \tilde{A} is the extended (n+1)xn matrix
566: and theta is the Ritz value. This is used in the refined Ritz
567: approximation.
569: The default is PETSC_FALSE.
571: Level: advanced
573: .seealso: DSVectors(), DSGetRefined()
574: @*/
575: PetscErrorCode DSSetRefined(DS ds,PetscBool ref)
576: {
577: PetscFunctionBegin;
580: ds->refined = ref;
581: PetscFunctionReturn(PETSC_SUCCESS);
582: }
584: /*@
585: DSGetRefined - Gets the refined vectors flag.
587: Not Collective
589: Input Parameter:
590: . ds - the direct solver context
592: Output Parameter:
593: . ref - the flag
595: Level: advanced
597: .seealso: DSSetRefined()
598: @*/
599: PetscErrorCode DSGetRefined(DS ds,PetscBool *ref)
600: {
601: PetscFunctionBegin;
603: PetscAssertPointer(ref,2);
604: *ref = ds->refined;
605: PetscFunctionReturn(PETSC_SUCCESS);
606: }
608: /*@
609: DSSetBlockSize - Sets the block size.
611: Logically Collective
613: Input Parameters:
614: + ds - the direct solver context
615: - bs - the block size
617: Options Database Key:
618: . -ds_block_size <bs> - Sets the block size
620: Level: intermediate
622: .seealso: DSGetBlockSize()
623: @*/
624: PetscErrorCode DSSetBlockSize(DS ds,PetscInt bs)
625: {
626: PetscFunctionBegin;
629: PetscCheck(bs>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The block size must be at least one");
630: ds->bs = bs;
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: /*@
635: DSGetBlockSize - Gets the block size.
637: Not Collective
639: Input Parameter:
640: . ds - the direct solver context
642: Output Parameter:
643: . bs - block size
645: Level: intermediate
647: .seealso: DSSetBlockSize()
648: @*/
649: PetscErrorCode DSGetBlockSize(DS ds,PetscInt *bs)
650: {
651: PetscFunctionBegin;
653: PetscAssertPointer(bs,2);
654: *bs = ds->bs;
655: PetscFunctionReturn(PETSC_SUCCESS);
656: }
658: /*@C
659: DSSetSlepcSC - Sets the sorting criterion context.
661: Logically Collective
663: Input Parameters:
664: + ds - the direct solver context
665: - sc - a pointer to the sorting criterion context
667: Note:
668: Not available in Fortran.
670: Level: developer
672: .seealso: DSGetSlepcSC(), DSSort()
673: @*/
674: PetscErrorCode DSSetSlepcSC(DS ds,SlepcSC sc)
675: {
676: PetscFunctionBegin;
678: PetscAssertPointer(sc,2);
679: if (ds->sc && !ds->scset) PetscCall(PetscFree(ds->sc));
680: ds->sc = sc;
681: ds->scset = PETSC_TRUE;
682: PetscFunctionReturn(PETSC_SUCCESS);
683: }
685: /*@C
686: DSGetSlepcSC - Gets the sorting criterion context.
688: Not Collective
690: Input Parameter:
691: . ds - the direct solver context
693: Output Parameter:
694: . sc - a pointer to the sorting criterion context
696: Note:
697: Not available in Fortran.
699: Level: developer
701: .seealso: DSSetSlepcSC(), DSSort()
702: @*/
703: PetscErrorCode DSGetSlepcSC(DS ds,SlepcSC *sc)
704: {
705: PetscFunctionBegin;
707: PetscAssertPointer(sc,2);
708: if (!ds->sc) PetscCall(PetscNew(&ds->sc));
709: *sc = ds->sc;
710: PetscFunctionReturn(PETSC_SUCCESS);
711: }
713: /*@
714: DSSetFromOptions - Sets DS options from the options database.
716: Collective
718: Input Parameters:
719: . ds - the direct solver context
721: Notes:
722: To see all options, run your program with the -help option.
724: Level: beginner
726: .seealso: DSSetOptionsPrefix()
727: @*/
728: PetscErrorCode DSSetFromOptions(DS ds)
729: {
730: PetscInt bs,meth;
731: PetscBool flag;
732: DSParallelType pmode;
734: PetscFunctionBegin;
736: PetscCall(DSRegisterAll());
737: /* Set default type (we do not allow changing it with -ds_type) */
738: if (!((PetscObject)ds)->type_name) PetscCall(DSSetType(ds,DSNHEP));
739: PetscObjectOptionsBegin((PetscObject)ds);
741: PetscCall(PetscOptionsInt("-ds_block_size","Block size for the dense system solver","DSSetBlockSize",ds->bs,&bs,&flag));
742: if (flag) PetscCall(DSSetBlockSize(ds,bs));
744: PetscCall(PetscOptionsInt("-ds_method","Method to be used for the dense system","DSSetMethod",ds->method,&meth,&flag));
745: if (flag) PetscCall(DSSetMethod(ds,meth));
747: PetscCall(PetscOptionsEnum("-ds_parallel","Operation mode in parallel runs","DSSetParallel",DSParallelTypes,(PetscEnum)ds->pmode,(PetscEnum*)&pmode,&flag));
748: if (flag) PetscCall(DSSetParallel(ds,pmode));
750: PetscTryTypeMethod(ds,setfromoptions,PetscOptionsObject);
751: PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)ds,PetscOptionsObject));
752: PetscOptionsEnd();
753: PetscFunctionReturn(PETSC_SUCCESS);
754: }
756: /*@
757: DSView - Prints the DS data structure.
759: Collective
761: Input Parameters:
762: + ds - the direct solver context
763: - viewer - optional visualization context
765: Note:
766: The available visualization contexts include
767: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
768: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
769: output where only the first processor opens
770: the file. All other processors send their
771: data to the first processor to print.
773: The user can open an alternative visualization context with
774: PetscViewerASCIIOpen() - output to a specified file.
776: Level: beginner
778: .seealso: DSViewMat()
779: @*/
780: PetscErrorCode DSView(DS ds,PetscViewer viewer)
781: {
782: PetscBool isascii;
783: PetscViewerFormat format;
784: PetscMPIInt size;
786: PetscFunctionBegin;
788: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)ds),&viewer));
790: PetscCheckSameComm(ds,1,viewer,2);
791: PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii));
792: if (isascii) {
793: PetscCall(PetscViewerGetFormat(viewer,&format));
794: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)ds,viewer));
795: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)ds),&size));
796: if (size>1) PetscCall(PetscViewerASCIIPrintf(viewer," parallel operation mode: %s\n",DSParallelTypes[ds->pmode]));
797: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
798: PetscCall(PetscViewerASCIIPrintf(viewer," current state: %s\n",DSStateTypes[ds->state]));
799: PetscCall(PetscViewerASCIIPrintf(viewer," dimensions: ld=%" PetscInt_FMT ", n=%" PetscInt_FMT ", l=%" PetscInt_FMT ", k=%" PetscInt_FMT,ds->ld,ds->n,ds->l,ds->k));
800: if (ds->state==DS_STATE_TRUNCATED) PetscCall(PetscViewerASCIIPrintf(viewer,", t=%" PetscInt_FMT "\n",ds->t));
801: else PetscCall(PetscViewerASCIIPrintf(viewer,"\n"));
802: PetscCall(PetscViewerASCIIPrintf(viewer," flags:%s%s%s\n",ds->compact?" compact":"",ds->extrarow?" extrarow":"",ds->refined?" refined":""));
803: }
804: PetscCall(PetscViewerASCIIPushTab(viewer));
805: PetscTryTypeMethod(ds,view,viewer);
806: PetscCall(PetscViewerASCIIPopTab(viewer));
807: }
808: PetscFunctionReturn(PETSC_SUCCESS);
809: }
811: /*@
812: DSViewFromOptions - View from options
814: Collective
816: Input Parameters:
817: + ds - the direct solver context
818: . obj - optional object
819: - name - command line option
821: Level: intermediate
823: .seealso: DSView(), DSCreate()
824: @*/
825: PetscErrorCode DSViewFromOptions(DS ds,PetscObject obj,const char name[])
826: {
827: PetscFunctionBegin;
829: PetscCall(PetscObjectViewFromOptions((PetscObject)ds,obj,name));
830: PetscFunctionReturn(PETSC_SUCCESS);
831: }
833: /*@
834: DSAllocate - Allocates memory for internal storage or matrices in DS.
836: Logically Collective
838: Input Parameters:
839: + ds - the direct solver context
840: - ld - leading dimension (maximum allowed dimension for the matrices, including
841: the extra row if present)
843: Note:
844: If the leading dimension is different from a previously set value, then
845: all matrices are destroyed with DSReset().
847: Level: intermediate
849: .seealso: DSGetLeadingDimension(), DSSetDimensions(), DSSetExtraRow(), DSReset(), DSReallocate()
850: @*/
851: PetscErrorCode DSAllocate(DS ds,PetscInt ld)
852: {
853: PetscFunctionBegin;
857: PetscCheck(ld>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Leading dimension should be at least one");
858: if (ld!=ds->ld) {
859: PetscCall(PetscInfo(ds,"Allocating memory with leading dimension=%" PetscInt_FMT "\n",ld));
860: PetscCall(DSReset(ds));
861: ds->ld = ld;
862: PetscUseTypeMethod(ds,allocate,ld);
863: }
864: PetscFunctionReturn(PETSC_SUCCESS);
865: }
867: /*@
868: DSReallocate - Reallocates memory for internal storage or matrices in DS,
869: keeping the previously set data.
871: Logically Collective
873: Input Parameters:
874: + ds - the direct solver context
875: - ld - new leading dimension
877: Notes:
878: The new leading dimension must be larger than the previous one. The relevant
879: data previously set is copied over to the new data structures.
881: This operation is not available in all DS types.
883: Level: developer
885: .seealso: DSAllocate()
886: @*/
887: PetscErrorCode DSReallocate(DS ds,PetscInt ld)
888: {
889: PetscFunctionBegin;
893: PetscCheck(ds->ld,PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"DSAllocate() must be called first");
894: PetscCheck(ld>ds->ld,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"New leading dimension %" PetscInt_FMT " must be larger than the previous one %" PetscInt_FMT,ld,ds->ld);
895: PetscCall(PetscInfo(ds,"Reallocating memory with new leading dimension=%" PetscInt_FMT "\n",ld));
896: PetscUseTypeMethod(ds,reallocate,ld);
897: ds->ld = ld;
898: PetscFunctionReturn(PETSC_SUCCESS);
899: }
901: /*@
902: DSReset - Resets the DS context to the initial state.
904: Collective
906: Input Parameter:
907: . ds - the direct solver context
909: Note:
910: All data structures with size depending on the leading dimension
911: of DSAllocate() are released.
913: Level: advanced
915: .seealso: DSDestroy(), DSAllocate()
916: @*/
917: PetscErrorCode DSReset(DS ds)
918: {
919: PetscInt i;
921: PetscFunctionBegin;
923: if (!ds) PetscFunctionReturn(PETSC_SUCCESS);
924: ds->state = DS_STATE_RAW;
925: ds->ld = 0;
926: ds->l = 0;
927: ds->n = 0;
928: ds->k = 0;
929: for (i=0;i<DS_NUM_MAT;i++) PetscCall(MatDestroy(&ds->omat[i]));
930: PetscCall(PetscFree(ds->perm));
931: PetscFunctionReturn(PETSC_SUCCESS);
932: }
934: /*@
935: DSDestroy - Destroys DS context that was created with DSCreate().
937: Collective
939: Input Parameter:
940: . ds - the direct solver context
942: Level: beginner
944: .seealso: DSCreate()
945: @*/
946: PetscErrorCode DSDestroy(DS *ds)
947: {
948: PetscFunctionBegin;
949: if (!*ds) PetscFunctionReturn(PETSC_SUCCESS);
951: if (--((PetscObject)*ds)->refct > 0) { *ds = NULL; PetscFunctionReturn(PETSC_SUCCESS); }
952: PetscCall(DSReset(*ds));
953: PetscTryTypeMethod(*ds,destroy);
954: PetscCall(PetscFree((*ds)->work));
955: PetscCall(PetscFree((*ds)->rwork));
956: PetscCall(PetscFree((*ds)->iwork));
957: if (!(*ds)->scset) PetscCall(PetscFree((*ds)->sc));
958: PetscCall(PetscHeaderDestroy(ds));
959: PetscFunctionReturn(PETSC_SUCCESS);
960: }
962: /*@C
963: DSRegister - Adds a direct solver to the DS package.
965: Not Collective
967: Input Parameters:
968: + name - name of a new user-defined DS
969: - function - routine to create context
971: Note:
972: DSRegister() may be called multiple times to add several user-defined
973: direct solvers.
975: Level: advanced
977: .seealso: DSRegisterAll()
978: @*/
979: PetscErrorCode DSRegister(const char *name,PetscErrorCode (*function)(DS))
980: {
981: PetscFunctionBegin;
982: PetscCall(DSInitializePackage());
983: PetscCall(PetscFunctionListAdd(&DSList,name,function));
984: PetscFunctionReturn(PETSC_SUCCESS);
985: }
987: SLEPC_EXTERN PetscErrorCode DSCreate_HEP(DS);
988: SLEPC_EXTERN PetscErrorCode DSCreate_NHEP(DS);
989: SLEPC_EXTERN PetscErrorCode DSCreate_GHEP(DS);
990: SLEPC_EXTERN PetscErrorCode DSCreate_GHIEP(DS);
991: SLEPC_EXTERN PetscErrorCode DSCreate_GNHEP(DS);
992: SLEPC_EXTERN PetscErrorCode DSCreate_NHEPTS(DS);
993: SLEPC_EXTERN PetscErrorCode DSCreate_SVD(DS);
994: SLEPC_EXTERN PetscErrorCode DSCreate_HSVD(DS);
995: SLEPC_EXTERN PetscErrorCode DSCreate_GSVD(DS);
996: SLEPC_EXTERN PetscErrorCode DSCreate_PEP(DS);
997: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS);
999: /*@C
1000: DSRegisterAll - Registers all of the direct solvers in the DS package.
1002: Not Collective
1004: Level: advanced
1006: .seealso: DSRegister()
1007: @*/
1008: PetscErrorCode DSRegisterAll(void)
1009: {
1010: PetscFunctionBegin;
1011: if (DSRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS);
1012: DSRegisterAllCalled = PETSC_TRUE;
1013: PetscCall(DSRegister(DSHEP,DSCreate_HEP));
1014: PetscCall(DSRegister(DSNHEP,DSCreate_NHEP));
1015: PetscCall(DSRegister(DSGHEP,DSCreate_GHEP));
1016: PetscCall(DSRegister(DSGHIEP,DSCreate_GHIEP));
1017: PetscCall(DSRegister(DSGNHEP,DSCreate_GNHEP));
1018: PetscCall(DSRegister(DSNHEPTS,DSCreate_NHEPTS));
1019: PetscCall(DSRegister(DSSVD,DSCreate_SVD));
1020: PetscCall(DSRegister(DSHSVD,DSCreate_HSVD));
1021: PetscCall(DSRegister(DSGSVD,DSCreate_GSVD));
1022: PetscCall(DSRegister(DSPEP,DSCreate_PEP));
1023: PetscCall(DSRegister(DSNEP,DSCreate_NEP));
1024: PetscFunctionReturn(PETSC_SUCCESS);
1025: }