Actual source code: dsnep.c
slepc-3.22.1 2024-10-28
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
11: #include <slepc/private/dsimpl.h>
12: #include <slepcblaslapack.h>
14: typedef struct {
15: PetscInt nf; /* number of functions in f[] */
16: FN f[DS_NUM_EXTRA]; /* functions defining the nonlinear operator */
17: PetscInt max_mid; /* maximum minimality index */
18: PetscInt nnod; /* number of nodes for quadrature rules */
19: PetscInt spls; /* number of sampling columns for quadrature rules */
20: PetscInt Nit; /* number of refinement iterations */
21: PetscReal rtol; /* tolerance of Newton refinement */
22: RG rg; /* region for contour integral */
23: PetscLayout map; /* used to distribute work among MPI processes */
24: void *computematrixctx;
25: DSNEPMatrixFunctionFn *computematrix;
26: } DS_NEP;
28: /*
29: DSNEPComputeMatrix - Build the matrix associated with a nonlinear operator
30: T(lambda) or its derivative T'(lambda), given the parameter lambda, where
31: T(lambda) = sum_i E_i*f_i(lambda). The result is written in mat.
32: */
33: static PetscErrorCode DSNEPComputeMatrix(DS ds,PetscScalar lambda,PetscBool deriv,DSMatType mat)
34: {
35: DS_NEP *ctx = (DS_NEP*)ds->data;
36: PetscScalar *T,alpha;
37: const PetscScalar *E;
38: PetscInt i,ld,n;
39: PetscBLASInt k,inc=1;
41: PetscFunctionBegin;
42: PetscCall(PetscLogEventBegin(DS_Other,ds,0,0,0));
43: if (ctx->computematrix) PetscCall((*ctx->computematrix)(ds,lambda,deriv,mat,ctx->computematrixctx));
44: else {
45: PetscCall(DSGetDimensions(ds,&n,NULL,NULL,NULL));
46: PetscCall(DSGetLeadingDimension(ds,&ld));
47: PetscCall(PetscBLASIntCast(ld*n,&k));
48: PetscCall(MatDenseGetArray(ds->omat[mat],&T));
49: PetscCall(PetscArrayzero(T,k));
50: for (i=0;i<ctx->nf;i++) {
51: if (deriv) PetscCall(FNEvaluateDerivative(ctx->f[i],lambda,&alpha));
52: else PetscCall(FNEvaluateFunction(ctx->f[i],lambda,&alpha));
53: PetscCall(MatDenseGetArrayRead(ds->omat[DSMatExtra[i]],&E));
54: PetscCallBLAS("BLASaxpy",BLASaxpy_(&k,&alpha,E,&inc,T,&inc));
55: PetscCall(MatDenseRestoreArrayRead(ds->omat[DSMatExtra[i]],&E));
56: }
57: PetscCall(MatDenseRestoreArray(ds->omat[mat],&T));
58: }
59: PetscCall(PetscLogEventEnd(DS_Other,ds,0,0,0));
60: PetscFunctionReturn(PETSC_SUCCESS);
61: }
63: static PetscErrorCode DSAllocate_NEP(DS ds,PetscInt ld)
64: {
65: DS_NEP *ctx = (DS_NEP*)ds->data;
66: PetscInt i;
68: PetscFunctionBegin;
69: PetscCall(DSAllocateMat_Private(ds,DS_MAT_X));
70: for (i=0;i<ctx->nf;i++) PetscCall(DSAllocateMat_Private(ds,DSMatExtra[i]));
71: PetscCall(PetscFree(ds->perm));
72: PetscCall(PetscMalloc1(ld*ctx->max_mid,&ds->perm));
73: PetscFunctionReturn(PETSC_SUCCESS);
74: }
76: static PetscErrorCode DSView_NEP(DS ds,PetscViewer viewer)
77: {
78: DS_NEP *ctx = (DS_NEP*)ds->data;
79: PetscViewerFormat format;
80: PetscInt i;
81: const char *methodname[] = {
82: "Successive Linear Problems",
83: "Contour Integral"
84: };
85: const int nmeth=PETSC_STATIC_ARRAY_LENGTH(methodname);
87: PetscFunctionBegin;
88: PetscCall(PetscViewerGetFormat(viewer,&format));
89: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
90: if (ds->method<nmeth) PetscCall(PetscViewerASCIIPrintf(viewer,"solving the problem with: %s\n",methodname[ds->method]));
91: #if defined(PETSC_USE_COMPLEX)
92: if (ds->method==1) { /* contour integral method */
93: PetscCall(PetscViewerASCIIPrintf(viewer,"number of integration points: %" PetscInt_FMT "\n",ctx->nnod));
94: PetscCall(PetscViewerASCIIPrintf(viewer,"maximum minimality index: %" PetscInt_FMT "\n",ctx->max_mid));
95: if (ctx->spls) PetscCall(PetscViewerASCIIPrintf(viewer,"number of sampling columns for quadrature: %" PetscInt_FMT "\n",ctx->spls));
96: if (ctx->Nit) PetscCall(PetscViewerASCIIPrintf(viewer,"doing iterative refinement (%" PetscInt_FMT " its, tolerance %g)\n",ctx->Nit,(double)ctx->rtol));
97: PetscCall(RGView(ctx->rg,viewer));
98: }
99: #endif
100: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPrintf(viewer,"number of functions: %" PetscInt_FMT "\n",ctx->nf));
101: PetscFunctionReturn(PETSC_SUCCESS);
102: }
103: for (i=0;i<ctx->nf;i++) {
104: PetscCall(FNView(ctx->f[i],viewer));
105: PetscCall(DSViewMat(ds,viewer,DSMatExtra[i]));
106: }
107: if (ds->state>DS_STATE_INTERMEDIATE) PetscCall(DSViewMat(ds,viewer,DS_MAT_X));
108: PetscFunctionReturn(PETSC_SUCCESS);
109: }
111: static PetscErrorCode DSVectors_NEP(DS ds,DSMatType mat,PetscInt *j,PetscReal *rnorm)
112: {
113: PetscFunctionBegin;
114: PetscCheck(!rnorm,PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
115: switch (mat) {
116: case DS_MAT_X:
117: break;
118: case DS_MAT_Y:
119: SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
120: default:
121: SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Invalid mat parameter");
122: }
123: PetscFunctionReturn(PETSC_SUCCESS);
124: }
126: static PetscErrorCode DSSort_NEP(DS ds,PetscScalar *wr,PetscScalar *wi,PetscScalar *rr,PetscScalar *ri,PetscInt *dummy)
127: {
128: DS_NEP *ctx = (DS_NEP*)ds->data;
129: PetscInt n,l,i,*perm,lds;
130: PetscScalar *Q;
132: PetscFunctionBegin;
133: if (!ds->sc) PetscFunctionReturn(PETSC_SUCCESS);
134: if (!ds->method) PetscFunctionReturn(PETSC_SUCCESS); /* SLP computes just one eigenvalue */
135: n = ds->n*ctx->max_mid;
136: lds = ds->ld*ctx->max_mid;
137: l = ds->l;
138: perm = ds->perm;
139: for (i=0;i<n;i++) perm[i] = i;
140: if (rr) PetscCall(DSSortEigenvalues_Private(ds,rr,ri,perm,PETSC_FALSE));
141: else PetscCall(DSSortEigenvalues_Private(ds,wr,NULL,perm,PETSC_FALSE));
142: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_Q],&Q));
143: for (i=l;i<ds->t;i++) Q[i+i*lds] = wr[perm[i]];
144: for (i=l;i<ds->t;i++) wr[i] = Q[i+i*lds];
145: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_Q],&Q));
146: /* n != ds->n */
147: PetscCall(DSPermuteColumns_Private(ds,0,ds->t,ds->n,DS_MAT_X,perm));
148: PetscFunctionReturn(PETSC_SUCCESS);
149: }
151: #if defined(SLEPC_MISSING_LAPACK_GGEV3)
152: #define LAPGEEV "ggev"
153: #else
154: #define LAPGEEV "ggev3"
155: #endif
157: static PetscErrorCode DSSolve_NEP_SLP(DS ds,PetscScalar *wr,PetscScalar *wi)
158: {
159: PetscScalar *A,*B,*W,*X,*work,*alpha,*beta,a;
160: PetscScalar sigma,lambda,mu,re,re2,sone=1.0,szero=0.0;
161: PetscBLASInt info,n,ld,lwork,one=1,zero=0;
162: PetscInt it,pos,j,maxit=100,result;
163: PetscReal norm,tol,done=1.0;
164: #if !defined(PETSC_USE_COMPLEX)
165: PetscReal *alphai,im,im2;
166: #endif
168: PetscFunctionBegin;
169: PetscCall(PetscBLASIntCast(ds->n,&n));
170: PetscCall(PetscBLASIntCast(ds->ld,&ld));
171: PetscCall(DSAllocateMat_Private(ds,DS_MAT_A));
172: PetscCall(DSAllocateMat_Private(ds,DS_MAT_B));
173: PetscCall(DSAllocateMat_Private(ds,DS_MAT_W));
174: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_A],&A));
175: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_B],&B));
176: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_W],&W));
177: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_X],&X));
179: /* workspace query and memory allocation */
180: lwork = -1;
181: #if defined(PETSC_USE_COMPLEX)
182: PetscCallBLAS("LAPACK" LAPGEEV,LAPACKggevalt_("N","V",&n,A,&ld,B,&ld,NULL,NULL,NULL,&ld,W,&ld,&a,&lwork,NULL,&info));
183: PetscCall(PetscBLASIntCast((PetscInt)PetscRealPart(a),&lwork));
184: PetscCall(DSAllocateWork_Private(ds,lwork+2*ds->n,8*ds->n,0));
185: alpha = ds->work;
186: beta = ds->work + ds->n;
187: work = ds->work + 2*ds->n;
188: #else
189: PetscCallBLAS("LAPACK" LAPGEEV,LAPACKggevalt_("N","V",&n,A,&ld,B,&ld,NULL,NULL,NULL,NULL,&ld,W,&ld,&a,&lwork,&info));
190: PetscCall(PetscBLASIntCast((PetscInt)a,&lwork));
191: PetscCall(DSAllocateWork_Private(ds,lwork+3*ds->n,0,0));
192: alpha = ds->work;
193: beta = ds->work + ds->n;
194: alphai = ds->work + 2*ds->n;
195: work = ds->work + 3*ds->n;
196: #endif
198: sigma = 0.0;
199: if (ds->sc->comparison==SlepcCompareTargetMagnitude || ds->sc->comparison==SlepcCompareTargetReal) sigma = *(PetscScalar*)ds->sc->comparisonctx;
200: lambda = sigma;
201: tol = n*PETSC_MACHINE_EPSILON/PetscSqrtReal(PETSC_SQRT_MACHINE_EPSILON);
203: for (it=0;it<maxit;it++) {
205: /* evaluate T and T' */
206: PetscCall(DSNEPComputeMatrix(ds,lambda,PETSC_FALSE,DS_MAT_A));
207: if (it) {
208: PetscCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&sone,A,&ld,X,&one,&szero,X+ld,&one));
209: norm = BLASnrm2_(&n,X+ld,&one);
210: if (norm/PetscAbsScalar(lambda)<=tol) break;
211: }
212: PetscCall(DSNEPComputeMatrix(ds,lambda,PETSC_TRUE,DS_MAT_B));
214: /* compute eigenvalue correction mu and eigenvector u */
215: #if defined(PETSC_USE_COMPLEX)
216: PetscCallBLAS("LAPACK" LAPGEEV,LAPACKggevalt_("N","V",&n,A,&ld,B,&ld,alpha,beta,NULL,&ld,W,&ld,work,&lwork,ds->rwork,&info));
217: #else
218: PetscCallBLAS("LAPACK" LAPGEEV,LAPACKggevalt_("N","V",&n,A,&ld,B,&ld,alpha,alphai,beta,NULL,&ld,W,&ld,work,&lwork,&info));
219: #endif
220: SlepcCheckLapackInfo(LAPGEEV,info);
222: /* find smallest eigenvalue */
223: j = 0;
224: if (beta[j]==0.0) re = (PetscRealPart(alpha[j])>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
225: else re = alpha[j]/beta[j];
226: #if !defined(PETSC_USE_COMPLEX)
227: if (beta[j]==0.0) im = (alphai[j]>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
228: else im = alphai[j]/beta[j];
229: #endif
230: pos = 0;
231: for (j=1;j<n;j++) {
232: if (beta[j]==0.0) re2 = (PetscRealPart(alpha[j])>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
233: else re2 = alpha[j]/beta[j];
234: #if !defined(PETSC_USE_COMPLEX)
235: if (beta[j]==0.0) im2 = (alphai[j]>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
236: else im2 = alphai[j]/beta[j];
237: PetscCall(SlepcCompareSmallestMagnitude(re,im,re2,im2,&result,NULL));
238: #else
239: PetscCall(SlepcCompareSmallestMagnitude(re,0.0,re2,0.0,&result,NULL));
240: #endif
241: if (result > 0) {
242: re = re2;
243: #if !defined(PETSC_USE_COMPLEX)
244: im = im2;
245: #endif
246: pos = j;
247: }
248: }
250: #if !defined(PETSC_USE_COMPLEX)
251: PetscCheck(im==0.0,PETSC_COMM_SELF,PETSC_ERR_SUP,"DSNEP found a complex eigenvalue; try rerunning with complex scalars");
252: #endif
253: mu = alpha[pos]/beta[pos];
254: PetscCall(PetscArraycpy(X,W+pos*ld,n));
255: norm = BLASnrm2_(&n,X,&one);
256: PetscCallBLAS("LAPACKlascl",LAPACKlascl_("G",&zero,&zero,&norm,&done,&n,&one,X,&n,&info));
257: SlepcCheckLapackInfo("lascl",info);
259: /* correct eigenvalue approximation */
260: lambda = lambda - mu;
261: }
262: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_A],&A));
263: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_B],&B));
264: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_W],&W));
265: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_X],&X));
267: PetscCheck(it<maxit,PETSC_COMM_SELF,PETSC_ERR_CONV_FAILED,"DSNEP did not converge");
268: ds->t = 1;
269: wr[0] = lambda;
270: if (wi) wi[0] = 0.0;
271: PetscFunctionReturn(PETSC_SUCCESS);
272: }
274: #if defined(PETSC_USE_COMPLEX)
275: /*
276: Newton refinement for eigenpairs computed with contour integral.
277: k - number of eigenpairs to refine
278: wr - eigenvalues (eigenvectors are stored in DS_MAT_X)
279: */
280: static PetscErrorCode DSNEPNewtonRefine(DS ds,PetscInt k,PetscScalar *wr)
281: {
282: DS_NEP *ctx = (DS_NEP*)ds->data;
283: PetscScalar *X,*W,*U,*R,sone=1.0,szero=0.0;
284: PetscReal norm;
285: PetscInt i,j,ii,nwu=0,*p,jstart=0,jend=k;
286: const PetscInt *range;
287: PetscBLASInt n,*perm,info,ld,one=1,n1;
288: PetscMPIInt len,size,root;
289: PetscLayout map;
291: PetscFunctionBegin;
292: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_X],&X));
293: PetscCall(PetscBLASIntCast(ds->n,&n));
294: PetscCall(PetscBLASIntCast(ds->ld,&ld));
295: n1 = n+1;
296: p = ds->perm;
297: PetscCall(PetscArrayzero(p,k));
298: PetscCall(DSAllocateWork_Private(ds,(n+2)*(n+1),0,n+1));
299: U = ds->work+nwu; nwu += (n+1)*(n+1);
300: R = ds->work+nwu; /*nwu += n+1;*/
301: perm = ds->iwork;
302: if (ds->pmode==DS_PARALLEL_DISTRIBUTED) {
303: PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)ds),PETSC_DECIDE,k,1,&map));
304: PetscCall(PetscLayoutGetRange(map,&jstart,&jend));
305: }
306: for (ii=0;ii<ctx->Nit;ii++) {
307: for (j=jstart;j<jend;j++) {
308: if (p[j]<2) {
309: PetscCall(DSNEPComputeMatrix(ds,wr[j],PETSC_FALSE,DS_MAT_W));
310: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_W],&W));
311: PetscCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&sone,W,&ld,X+ld*j,&one,&szero,R,&one));
312: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_W],&W));
313: norm = BLASnrm2_(&n,R,&one);
314: if (norm/PetscAbsScalar(wr[j]) > ctx->rtol) {
315: PetscCall(PetscInfo(NULL,"Refining eigenpair %" PetscInt_FMT ", residual=%g\n",j,(double)(norm/PetscAbsScalar(wr[j]))));
316: p[j] = 1;
317: R[n] = 0.0;
318: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_W],&W));
319: for (i=0;i<n;i++) {
320: PetscCall(PetscArraycpy(U+i*n1,W+i*ld,n));
321: U[n+i*n1] = PetscConj(X[j*ld+i]);
322: }
323: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_W],&W));
324: U[n+n*n1] = 0.0;
325: PetscCall(DSNEPComputeMatrix(ds,wr[j],PETSC_TRUE,DS_MAT_W));
326: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_W],&W));
327: PetscCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&sone,W,&ld,X+ld*j,&one,&szero,U+n*(n+1),&one));
328: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_W],&W));
329: /* solve system */
330: PetscCallBLAS("LAPACKgetrf",LAPACKgetrf_(&n1,&n1,U,&n1,perm,&info));
331: SlepcCheckLapackInfo("getrf",info);
332: PetscCallBLAS("LAPACKgetrs",LAPACKgetrs_("N",&n1,&one,U,&n1,perm,R,&n1,&info));
333: SlepcCheckLapackInfo("getrs",info);
334: wr[j] -= R[n];
335: for (i=0;i<n;i++) X[j*ld+i] -= R[i];
336: /* normalization */
337: norm = BLASnrm2_(&n,X+ld*j,&one);
338: for (i=0;i<n;i++) X[ld*j+i] /= norm;
339: } else p[j] = 2;
340: }
341: }
342: }
343: if (ds->pmode==DS_PARALLEL_DISTRIBUTED) { /* communicate results */
344: PetscCall(PetscMPIIntCast(k,&len));
345: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE,p,len,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)ds)));
346: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)ds),&size));
347: PetscCall(PetscLayoutGetRanges(map,&range));
348: for (j=0;j<k;j++) {
349: if (p[j]) { /* j-th eigenpair has been refined */
350: for (root=0;root<size;root++) if (range[root+1]>j) break;
351: PetscCall(PetscMPIIntCast(1,&len));
352: PetscCallMPI(MPI_Bcast(wr+j,len,MPIU_SCALAR,root,PetscObjectComm((PetscObject)ds)));
353: PetscCall(PetscMPIIntCast(n,&len));
354: PetscCallMPI(MPI_Bcast(X+ld*j,len,MPIU_SCALAR,root,PetscObjectComm((PetscObject)ds)));
355: }
356: }
357: PetscCall(PetscLayoutDestroy(&map));
358: }
359: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_X],&X));
360: PetscFunctionReturn(PETSC_SUCCESS);
361: }
363: PetscErrorCode DSSolve_NEP_Contour(DS ds,PetscScalar *wr,PetscScalar *wi)
364: {
365: DS_NEP *ctx = (DS_NEP*)ds->data;
366: PetscScalar *alpha,*beta,*Q,*Z,*X,*U,*V,*W,*work,*Rc,*R,*w,*z,*zn,*S;
367: PetscScalar sone=1.0,szero=0.0,center,a;
368: PetscReal *rwork,norm,radius,vscale,rgscale,*sigma;
369: PetscBLASInt info,n,*perm,p,pp,ld,lwork,k_,rk_,colA,rowA,one=1;
370: PetscInt mid,lds,nnod=ctx->nnod,k,i,ii,jj,j,s,off,rk,nwu=0,nw,lrwork,*inside,kstart=0,kend=nnod;
371: PetscMPIInt len;
372: PetscBool isellipse;
373: PetscRandom rand;
375: PetscFunctionBegin;
376: PetscCheck(ctx->rg,PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"The contour solver requires a region passed with DSNEPSetRG()");
377: /* Contour parameters */
378: PetscCall(PetscObjectTypeCompare((PetscObject)ctx->rg,RGELLIPSE,&isellipse));
379: PetscCheck(isellipse,PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Region must be Ellipse");
380: PetscCall(RGEllipseGetParameters(ctx->rg,¢er,&radius,&vscale));
381: PetscCall(RGGetScale(ctx->rg,&rgscale));
382: if (ds->pmode==DS_PARALLEL_DISTRIBUTED) {
383: if (!ctx->map) PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)ds),PETSC_DECIDE,ctx->nnod,1,&ctx->map));
384: PetscCall(PetscLayoutGetRange(ctx->map,&kstart,&kend));
385: }
387: PetscCall(DSAllocateMat_Private(ds,DS_MAT_W)); /* size n */
388: PetscCall(DSAllocateMat_Private(ds,DS_MAT_Q)); /* size mid*n */
389: PetscCall(DSAllocateMat_Private(ds,DS_MAT_Z)); /* size mid*n */
390: PetscCall(DSAllocateMat_Private(ds,DS_MAT_U)); /* size mid*n */
391: PetscCall(DSAllocateMat_Private(ds,DS_MAT_V)); /* size mid*n */
392: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_Q],&Q));
393: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_Z],&Z));
394: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_U],&U));
395: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_V],&V));
396: mid = ctx->max_mid;
397: PetscCall(PetscBLASIntCast(ds->n,&n));
398: p = n; /* maximum number of columns for the probing matrix */
399: PetscCall(PetscBLASIntCast(ds->ld,&ld));
400: PetscCall(PetscBLASIntCast(mid*n,&rowA));
401: nw = 2*n*(p+mid)+3*nnod+2*mid*n*p;
402: lrwork = 9*mid*n;
404: /* workspace query and memory allocation */
405: lwork = -1;
406: PetscCallBLAS("LAPACK" LAPGEEV,LAPACKggevalt_("N","V",&rowA,Q,&rowA,Z,&rowA,NULL,NULL,NULL,&ld,V,&rowA,&a,&lwork,NULL,&info));
407: PetscCall(PetscBLASIntCast((PetscInt)PetscRealPart(a),&lwork));
408: PetscCall(DSAllocateWork_Private(ds,lwork+nw,lrwork,n+1));
410: sigma = ds->rwork;
411: rwork = ds->rwork+mid*n;
412: perm = ds->iwork;
413: z = ds->work+nwu; nwu += nnod; /* quadrature points */
414: zn = ds->work+nwu; nwu += nnod; /* normalized quadrature points */
415: w = ds->work+nwu; nwu += nnod; /* quadrature weights */
416: Rc = ds->work+nwu; nwu += n*p;
417: R = ds->work+nwu; nwu += n*p;
418: alpha = ds->work+nwu; nwu += mid*n;
419: beta = ds->work+nwu; nwu += mid*n;
420: S = ds->work+nwu; nwu += 2*mid*n*p;
421: work = ds->work+nwu;
423: /* Compute quadrature parameters */
424: PetscCall(RGComputeQuadrature(ctx->rg,RG_QUADRULE_TRAPEZOIDAL,nnod,z,zn,w));
426: /* Set random matrix */
427: PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)ds),&rand));
428: PetscCall(PetscRandomSetSeed(rand,0x12345678));
429: PetscCall(PetscRandomSeed(rand));
430: for (j=0;j<p;j++)
431: for (i=0;i<n;i++) PetscCall(PetscRandomGetValue(rand,Rc+i+j*n));
432: PetscCall(PetscArrayzero(S,2*mid*n*p));
433: /* Loop of integration points */
434: for (k=kstart;k<kend;k++) {
435: PetscCall(PetscInfo(NULL,"Solving integration point %" PetscInt_FMT "\n",k));
436: PetscCall(PetscArraycpy(R,Rc,p*n));
437: PetscCall(DSNEPComputeMatrix(ds,z[k],PETSC_FALSE,DS_MAT_W));
439: /* LU factorization */
440: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_W],&W));
441: PetscCallBLAS("LAPACKgetrf",LAPACKgetrf_(&n,&n,W,&ld,perm,&info));
442: SlepcCheckLapackInfo("getrf",info);
443: PetscCallBLAS("LAPACKgetrs",LAPACKgetrs_("N",&n,&p,W,&ld,perm,R,&n,&info));
444: SlepcCheckLapackInfo("getrs",info);
445: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_W],&W));
447: /* Moments computation */
448: for (s=0;s<2*ctx->max_mid;s++) {
449: off = s*n*p;
450: for (j=0;j<p;j++)
451: for (i=0;i<n;i++) S[off+i+j*n] += w[k]*R[j*n+i];
452: w[k] *= zn[k];
453: }
454: }
456: if (ds->pmode==DS_PARALLEL_DISTRIBUTED) { /* compute final S via reduction */
457: PetscCall(PetscMPIIntCast(2*mid*n*p,&len));
458: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE,S,len,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)ds)));
459: }
460: p = ctx->spls?PetscMin(ctx->spls,n):n;
461: pp = p;
462: do {
463: p = pp;
464: PetscCall(PetscBLASIntCast(mid*p,&colA));
466: PetscCall(PetscInfo(ds,"Computing SVD of size %" PetscBLASInt_FMT "x%" PetscBLASInt_FMT "\n",rowA,colA));
467: for (jj=0;jj<mid;jj++) {
468: for (ii=0;ii<mid;ii++) {
469: off = jj*p*rowA+ii*n;
470: for (j=0;j<p;j++)
471: for (i=0;i<n;i++) Q[off+j*rowA+i] = S[((jj+ii)*n+j)*n+i];
472: }
473: }
474: PetscCallBLAS("LAPACKgesvd",LAPACKgesvd_("S","S",&rowA,&colA,Q,&rowA,sigma,U,&rowA,V,&colA,work,&lwork,rwork,&info));
475: SlepcCheckLapackInfo("gesvd",info);
477: rk = colA;
478: for (i=1;i<colA;i++) if (sigma[i]/sigma[0]<PETSC_MACHINE_EPSILON*1e4) {rk = i; break;}
479: if (rk<colA || p==n) break;
480: pp *= 2;
481: } while (pp<=n);
482: PetscCall(PetscInfo(ds,"Solving generalized eigenproblem of size %" PetscInt_FMT "\n",rk));
483: for (jj=0;jj<mid;jj++) {
484: for (ii=0;ii<mid;ii++) {
485: off = jj*p*rowA+ii*n;
486: for (j=0;j<p;j++)
487: for (i=0;i<n;i++) Q[off+j*rowA+i] = S[((jj+ii+1)*n+j)*n+i];
488: }
489: }
490: PetscCall(PetscBLASIntCast(rk,&rk_));
491: PetscCallBLAS("BLASgemm",BLASgemm_("N","C",&rowA,&rk_,&colA,&sone,Q,&rowA,V,&colA,&szero,Z,&rowA));
492: PetscCallBLAS("BLASgemm",BLASgemm_("C","N",&rk_,&rk_,&rowA,&sone,U,&rowA,Z,&rowA,&szero,Q,&rk_));
493: PetscCall(PetscArrayzero(Z,n*mid*n*mid));
494: for (j=0;j<rk;j++) Z[j+j*rk_] = sigma[j];
495: PetscCallBLAS("LAPACK" LAPGEEV,LAPACKggevalt_("N","V",&rk_,Q,&rk_,Z,&rk_,alpha,beta,NULL,&ld,V,&rk_,work,&lwork,rwork,&info));
496: SlepcCheckLapackInfo(LAPGEEV,info);
497: for (i=0;i<rk;i++) wr[i] = (center+radius*alpha[i]/beta[i])*rgscale;
498: PetscCall(PetscMalloc1(rk,&inside));
499: PetscCall(RGCheckInside(ctx->rg,rk,wr,wi,inside));
500: k=0;
501: for (i=0;i<rk;i++)
502: if (inside[i]==1) inside[k++] = i;
503: /* Discard values outside region */
504: lds = ld*mid;
505: PetscCall(PetscArrayzero(Q,lds*lds));
506: PetscCall(PetscArrayzero(Z,lds*lds));
507: for (i=0;i<k;i++) Q[i+i*lds] = (center*beta[inside[i]]+radius*alpha[inside[i]])*rgscale;
508: for (i=0;i<k;i++) Z[i+i*lds] = beta[inside[i]];
509: for (i=0;i<k;i++) wr[i] = Q[i+i*lds]/Z[i+i*lds];
510: for (j=0;j<k;j++) for (i=0;i<rk;i++) V[j*rk+i] = sigma[i]*V[inside[j]*rk+i];
511: PetscCall(PetscBLASIntCast(k,&k_));
512: PetscCall(MatDenseGetArray(ds->omat[DS_MAT_X],&X));
513: PetscCallBLAS("BLASgemm",BLASgemm_("N","N",&n,&k_,&rk_,&sone,U,&rowA,V,&rk_,&szero,X,&ld));
514: /* Normalize */
515: for (j=0;j<k;j++) {
516: norm = BLASnrm2_(&n,X+ld*j,&one);
517: for (i=0;i<n;i++) X[ld*j+i] /= norm;
518: }
519: PetscCall(PetscFree(inside));
520: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_X],&X));
521: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_Q],&Q));
522: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_Z],&Z));
523: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_U],&U));
524: PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_V],&V));
526: /* Newton refinement */
527: if (ctx->Nit) PetscCall(DSNEPNewtonRefine(ds,k,wr));
528: ds->t = k;
529: PetscCall(PetscRandomDestroy(&rand));
530: PetscFunctionReturn(PETSC_SUCCESS);
531: }
532: #endif
534: #if !defined(PETSC_HAVE_MPIUNI)
535: static PetscErrorCode DSSynchronize_NEP(DS ds,PetscScalar eigr[],PetscScalar eigi[])
536: {
537: DS_NEP *ctx = (DS_NEP*)ds->data;
538: PetscInt ld=ds->ld,k=0;
539: PetscMPIInt n,n2,rank,size,off=0;
540: PetscScalar *X;
542: PetscFunctionBegin;
543: if (!ds->method) { /* SLP */
544: if (ds->state>=DS_STATE_CONDENSED) k += ds->n;
545: if (eigr) k += 1;
546: if (eigi) k += 1;
547: PetscCall(PetscMPIIntCast(1,&n));
548: PetscCall(PetscMPIIntCast(ds->n,&n2));
549: } else { /* Contour */
550: if (ds->state>=DS_STATE_CONDENSED) k += ctx->max_mid*ds->n*ld;
551: if (eigr) k += ctx->max_mid*ds->n;
552: if (eigi) k += ctx->max_mid*ds->n;
553: PetscCall(PetscMPIIntCast(ctx->max_mid*ds->n,&n));
554: PetscCall(PetscMPIIntCast(ctx->max_mid*ds->n*ld,&n2));
555: }
556: PetscCall(DSAllocateWork_Private(ds,k,0,0));
557: PetscCall(PetscMPIIntCast(k*sizeof(PetscScalar),&size));
558: if (ds->state>=DS_STATE_CONDENSED) PetscCall(MatDenseGetArray(ds->omat[DS_MAT_X],&X));
559: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ds),&rank));
560: if (!rank) {
561: if (ds->state>=DS_STATE_CONDENSED) PetscCallMPI(MPI_Pack(X,n2,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds)));
562: if (eigr) PetscCallMPI(MPI_Pack(eigr,n,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds)));
563: #if !defined(PETSC_USE_COMPLEX)
564: if (eigi) PetscCallMPI(MPI_Pack(eigi,n,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds)));
565: #endif
566: }
567: PetscCallMPI(MPI_Bcast(ds->work,size,MPI_BYTE,0,PetscObjectComm((PetscObject)ds)));
568: if (rank) {
569: if (ds->state>=DS_STATE_CONDENSED) PetscCallMPI(MPI_Unpack(ds->work,size,&off,X,n2,MPIU_SCALAR,PetscObjectComm((PetscObject)ds)));
570: if (eigr) PetscCallMPI(MPI_Unpack(ds->work,size,&off,eigr,n,MPIU_SCALAR,PetscObjectComm((PetscObject)ds)));
571: #if !defined(PETSC_USE_COMPLEX)
572: if (eigi) PetscCallMPI(MPI_Unpack(ds->work,size,&off,eigi,n,MPIU_SCALAR,PetscObjectComm((PetscObject)ds)));
573: #endif
574: }
575: if (ds->state>=DS_STATE_CONDENSED) PetscCall(MatDenseRestoreArray(ds->omat[DS_MAT_X],&X));
576: PetscFunctionReturn(PETSC_SUCCESS);
577: }
578: #endif
580: static PetscErrorCode DSNEPSetFN_NEP(DS ds,PetscInt n,FN fn[])
581: {
582: DS_NEP *ctx = (DS_NEP*)ds->data;
583: PetscInt i;
585: PetscFunctionBegin;
586: PetscCheck(n>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more functions, you have %" PetscInt_FMT,n);
587: PetscCheck(n<=DS_NUM_EXTRA,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too many functions, you specified %" PetscInt_FMT " but the limit is %d",n,DS_NUM_EXTRA);
588: if (ds->ld) PetscCall(PetscInfo(ds,"DSNEPSetFN() called after DSAllocate()\n"));
589: for (i=0;i<n;i++) PetscCall(PetscObjectReference((PetscObject)fn[i]));
590: for (i=0;i<ctx->nf;i++) PetscCall(FNDestroy(&ctx->f[i]));
591: for (i=0;i<n;i++) ctx->f[i] = fn[i];
592: ctx->nf = n;
593: PetscFunctionReturn(PETSC_SUCCESS);
594: }
596: /*@
597: DSNEPSetFN - Sets a number of functions that define the nonlinear
598: eigenproblem.
600: Collective
602: Input Parameters:
603: + ds - the direct solver context
604: . n - number of functions
605: - fn - array of functions
607: Notes:
608: The nonlinear eigenproblem is defined in terms of the split nonlinear
609: operator T(lambda) = sum_i A_i*f_i(lambda).
611: This function must be called before DSAllocate(). Then DSAllocate()
612: will allocate an extra matrix A_i per each function, that can be
613: filled in the usual way.
615: Level: advanced
617: .seealso: DSNEPGetFN(), DSAllocate()
618: @*/
619: PetscErrorCode DSNEPSetFN(DS ds,PetscInt n,FN fn[])
620: {
621: PetscInt i;
623: PetscFunctionBegin;
626: PetscAssertPointer(fn,3);
627: for (i=0;i<n;i++) {
629: PetscCheckSameComm(ds,1,fn[i],3);
630: }
631: PetscTryMethod(ds,"DSNEPSetFN_C",(DS,PetscInt,FN[]),(ds,n,fn));
632: PetscFunctionReturn(PETSC_SUCCESS);
633: }
635: static PetscErrorCode DSNEPGetFN_NEP(DS ds,PetscInt k,FN *fn)
636: {
637: DS_NEP *ctx = (DS_NEP*)ds->data;
639: PetscFunctionBegin;
640: PetscCheck(k>=0 && k<ctx->nf,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"k must be between 0 and %" PetscInt_FMT,ctx->nf-1);
641: *fn = ctx->f[k];
642: PetscFunctionReturn(PETSC_SUCCESS);
643: }
645: /*@
646: DSNEPGetFN - Gets the functions associated with the nonlinear DS.
648: Not Collective
650: Input Parameters:
651: + ds - the direct solver context
652: - k - the index of the requested function (starting in 0)
654: Output Parameter:
655: . fn - the function
657: Level: advanced
659: .seealso: DSNEPSetFN()
660: @*/
661: PetscErrorCode DSNEPGetFN(DS ds,PetscInt k,FN *fn)
662: {
663: PetscFunctionBegin;
665: PetscAssertPointer(fn,3);
666: PetscUseMethod(ds,"DSNEPGetFN_C",(DS,PetscInt,FN*),(ds,k,fn));
667: PetscFunctionReturn(PETSC_SUCCESS);
668: }
670: static PetscErrorCode DSNEPGetNumFN_NEP(DS ds,PetscInt *n)
671: {
672: DS_NEP *ctx = (DS_NEP*)ds->data;
674: PetscFunctionBegin;
675: *n = ctx->nf;
676: PetscFunctionReturn(PETSC_SUCCESS);
677: }
679: /*@
680: DSNEPGetNumFN - Returns the number of functions stored internally by
681: the DS.
683: Not Collective
685: Input Parameter:
686: . ds - the direct solver context
688: Output Parameters:
689: . n - the number of functions passed in DSNEPSetFN()
691: Level: advanced
693: .seealso: DSNEPSetFN()
694: @*/
695: PetscErrorCode DSNEPGetNumFN(DS ds,PetscInt *n)
696: {
697: PetscFunctionBegin;
699: PetscAssertPointer(n,2);
700: PetscUseMethod(ds,"DSNEPGetNumFN_C",(DS,PetscInt*),(ds,n));
701: PetscFunctionReturn(PETSC_SUCCESS);
702: }
704: static PetscErrorCode DSNEPSetMinimality_NEP(DS ds,PetscInt n)
705: {
706: DS_NEP *ctx = (DS_NEP*)ds->data;
708: PetscFunctionBegin;
709: if (n == PETSC_DECIDE || n == PETSC_DEFAULT) ctx->max_mid = 4;
710: else {
711: PetscCheck(n>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The minimality value must be > 0");
712: ctx->max_mid = n;
713: }
714: PetscFunctionReturn(PETSC_SUCCESS);
715: }
717: /*@
718: DSNEPSetMinimality - Sets the maximum minimality index used internally by
719: the DSNEP.
721: Logically Collective
723: Input Parameters:
724: + ds - the direct solver context
725: - n - the maximum minimality index
727: Options Database Key:
728: . -ds_nep_minimality <n> - sets the maximum minimality index
730: Notes:
731: The maximum minimality index is used only in the contour integral method,
732: and is related to the highest momemts used in the method. The default
733: value is 1, an larger value might give better accuracy in some cases, but
734: at a higher cost.
736: Level: advanced
738: .seealso: DSNEPGetMinimality()
739: @*/
740: PetscErrorCode DSNEPSetMinimality(DS ds,PetscInt n)
741: {
742: PetscFunctionBegin;
745: PetscTryMethod(ds,"DSNEPSetMinimality_C",(DS,PetscInt),(ds,n));
746: PetscFunctionReturn(PETSC_SUCCESS);
747: }
749: static PetscErrorCode DSNEPGetMinimality_NEP(DS ds,PetscInt *n)
750: {
751: DS_NEP *ctx = (DS_NEP*)ds->data;
753: PetscFunctionBegin;
754: *n = ctx->max_mid;
755: PetscFunctionReturn(PETSC_SUCCESS);
756: }
758: /*@
759: DSNEPGetMinimality - Returns the maximum minimality index used internally by
760: the DSNEP.
762: Not Collective
764: Input Parameter:
765: . ds - the direct solver context
767: Output Parameters:
768: . n - the maximum minimality index passed in DSNEPSetMinimality()
770: Level: advanced
772: .seealso: DSNEPSetMinimality()
773: @*/
774: PetscErrorCode DSNEPGetMinimality(DS ds,PetscInt *n)
775: {
776: PetscFunctionBegin;
778: PetscAssertPointer(n,2);
779: PetscUseMethod(ds,"DSNEPGetMinimality_C",(DS,PetscInt*),(ds,n));
780: PetscFunctionReturn(PETSC_SUCCESS);
781: }
783: static PetscErrorCode DSNEPSetRefine_NEP(DS ds,PetscReal tol,PetscInt its)
784: {
785: DS_NEP *ctx = (DS_NEP*)ds->data;
787: PetscFunctionBegin;
788: if (tol == (PetscReal)PETSC_DETERMINE) {
789: ctx->rtol = PETSC_MACHINE_EPSILON/PetscSqrtReal(PETSC_SQRT_MACHINE_EPSILON);
790: } else if (tol != (PetscReal)PETSC_CURRENT) {
791: PetscCheck(tol>0.0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The tolerance must be > 0");
792: ctx->rtol = tol;
793: }
794: if (its == PETSC_DETERMINE) {
795: ctx->Nit = 3;
796: } else if (its != PETSC_CURRENT) {
797: PetscCheck(its>=0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The number of iterations must be >= 0");
798: ctx->Nit = its;
799: }
800: PetscFunctionReturn(PETSC_SUCCESS);
801: }
803: /*@
804: DSNEPSetRefine - Sets the tolerance and the number of iterations of Newton iterative
805: refinement for eigenpairs.
807: Logically Collective
809: Input Parameters:
810: + ds - the direct solver context
811: . tol - the tolerance
812: - its - the number of iterations
814: Options Database Key:
815: + -ds_nep_refine_tol <tol> - sets the tolerance
816: - -ds_nep_refine_its <its> - sets the number of Newton iterations
818: Notes:
819: Iterative refinement of eigenpairs is currently used only in the contour
820: integral method.
822: Use PETSC_CURRENT to retain the current value of any of the parameters.
823: Use PETSC_DETERMINE for either argument to assign a default value computed
824: internally.
826: Level: advanced
828: .seealso: DSNEPGetRefine()
829: @*/
830: PetscErrorCode DSNEPSetRefine(DS ds,PetscReal tol,PetscInt its)
831: {
832: PetscFunctionBegin;
836: PetscTryMethod(ds,"DSNEPSetRefine_C",(DS,PetscReal,PetscInt),(ds,tol,its));
837: PetscFunctionReturn(PETSC_SUCCESS);
838: }
840: static PetscErrorCode DSNEPGetRefine_NEP(DS ds,PetscReal *tol,PetscInt *its)
841: {
842: DS_NEP *ctx = (DS_NEP*)ds->data;
844: PetscFunctionBegin;
845: if (tol) *tol = ctx->rtol;
846: if (its) *its = ctx->Nit;
847: PetscFunctionReturn(PETSC_SUCCESS);
848: }
850: /*@
851: DSNEPGetRefine - Returns the tolerance and the number of iterations of Newton iterative
852: refinement for eigenpairs.
854: Not Collective
856: Input Parameter:
857: . ds - the direct solver context
859: Output Parameters:
860: + tol - the tolerance
861: - its - the number of iterations
863: Level: advanced
865: .seealso: DSNEPSetRefine()
866: @*/
867: PetscErrorCode DSNEPGetRefine(DS ds,PetscReal *tol,PetscInt *its)
868: {
869: PetscFunctionBegin;
871: PetscUseMethod(ds,"DSNEPGetRefine_C",(DS,PetscReal*,PetscInt*),(ds,tol,its));
872: PetscFunctionReturn(PETSC_SUCCESS);
873: }
875: static PetscErrorCode DSNEPSetIntegrationPoints_NEP(DS ds,PetscInt ip)
876: {
877: DS_NEP *ctx = (DS_NEP*)ds->data;
879: PetscFunctionBegin;
880: if (ip == PETSC_DECIDE || ip == PETSC_DEFAULT) ctx->nnod = 64;
881: else {
882: PetscCheck(ip>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The number of integration points must be > 0");
883: ctx->nnod = ip;
884: }
885: PetscCall(PetscLayoutDestroy(&ctx->map)); /* need to redistribute at next solve */
886: PetscFunctionReturn(PETSC_SUCCESS);
887: }
889: /*@
890: DSNEPSetIntegrationPoints - Sets the number of integration points to be
891: used in the contour integral method.
893: Logically Collective
895: Input Parameters:
896: + ds - the direct solver context
897: - ip - the number of integration points
899: Options Database Key:
900: . -ds_nep_integration_points <ip> - sets the number of integration points
902: Notes:
903: This parameter is relevant only in the contour integral method.
905: Level: advanced
907: .seealso: DSNEPGetIntegrationPoints()
908: @*/
909: PetscErrorCode DSNEPSetIntegrationPoints(DS ds,PetscInt ip)
910: {
911: PetscFunctionBegin;
914: PetscTryMethod(ds,"DSNEPSetIntegrationPoints_C",(DS,PetscInt),(ds,ip));
915: PetscFunctionReturn(PETSC_SUCCESS);
916: }
918: static PetscErrorCode DSNEPGetIntegrationPoints_NEP(DS ds,PetscInt *ip)
919: {
920: DS_NEP *ctx = (DS_NEP*)ds->data;
922: PetscFunctionBegin;
923: *ip = ctx->nnod;
924: PetscFunctionReturn(PETSC_SUCCESS);
925: }
927: /*@
928: DSNEPGetIntegrationPoints - Returns the number of integration points used
929: in the contour integral method.
931: Not Collective
933: Input Parameter:
934: . ds - the direct solver context
936: Output Parameters:
937: . ip - the number of integration points
939: Level: advanced
941: .seealso: DSNEPSetIntegrationPoints()
942: @*/
943: PetscErrorCode DSNEPGetIntegrationPoints(DS ds,PetscInt *ip)
944: {
945: PetscFunctionBegin;
947: PetscAssertPointer(ip,2);
948: PetscUseMethod(ds,"DSNEPGetIntegrationPoints_C",(DS,PetscInt*),(ds,ip));
949: PetscFunctionReturn(PETSC_SUCCESS);
950: }
952: static PetscErrorCode DSNEPSetSamplingSize_NEP(DS ds,PetscInt p)
953: {
954: DS_NEP *ctx = (DS_NEP*)ds->data;
956: PetscFunctionBegin;
957: if (p == PETSC_DECIDE || p == PETSC_DEFAULT) ctx->spls = 0;
958: else {
959: PetscCheck(p>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The sample size must be > 0");
960: PetscCheck(p>=20,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The sample size cannot be smaller than 20");
961: ctx->spls = p;
962: }
963: PetscFunctionReturn(PETSC_SUCCESS);
964: }
966: /*@
967: DSNEPSetSamplingSize - Sets the number of sampling columns to be
968: used in the contour integral method.
970: Logically Collective
972: Input Parameters:
973: + ds - the direct solver context
974: - p - the number of columns for the sampling matrix
976: Options Database Key:
977: . -ds_nep_sampling_size <p> - sets the number of sampling columns
979: Notes:
980: This parameter is relevant only in the contour integral method.
982: Level: advanced
984: .seealso: DSNEPGetSamplingSize()
985: @*/
986: PetscErrorCode DSNEPSetSamplingSize(DS ds,PetscInt p)
987: {
988: PetscFunctionBegin;
991: PetscTryMethod(ds,"DSNEPSetSamplingSize_C",(DS,PetscInt),(ds,p));
992: PetscFunctionReturn(PETSC_SUCCESS);
993: }
995: static PetscErrorCode DSNEPGetSamplingSize_NEP(DS ds,PetscInt *p)
996: {
997: DS_NEP *ctx = (DS_NEP*)ds->data;
999: PetscFunctionBegin;
1000: *p = ctx->spls;
1001: PetscFunctionReturn(PETSC_SUCCESS);
1002: }
1004: /*@
1005: DSNEPGetSamplingSize - Returns the number of sampling columns used
1006: in the contour integral method.
1008: Not Collective
1010: Input Parameter:
1011: . ds - the direct solver context
1013: Output Parameters:
1014: . p - the number of columns for the sampling matrix
1016: Level: advanced
1018: .seealso: DSNEPSetSamplingSize()
1019: @*/
1020: PetscErrorCode DSNEPGetSamplingSize(DS ds,PetscInt *p)
1021: {
1022: PetscFunctionBegin;
1024: PetscAssertPointer(p,2);
1025: PetscUseMethod(ds,"DSNEPGetSamplingSize_C",(DS,PetscInt*),(ds,p));
1026: PetscFunctionReturn(PETSC_SUCCESS);
1027: }
1029: static PetscErrorCode DSNEPSetComputeMatrixFunction_NEP(DS ds,DSNEPMatrixFunctionFn *fun,void *ctx)
1030: {
1031: DS_NEP *dsctx = (DS_NEP*)ds->data;
1033: PetscFunctionBegin;
1034: dsctx->computematrix = fun;
1035: dsctx->computematrixctx = ctx;
1036: PetscFunctionReturn(PETSC_SUCCESS);
1037: }
1039: /*@C
1040: DSNEPSetComputeMatrixFunction - Sets a user-provided subroutine to compute
1041: the matrices T(lambda) or T'(lambda).
1043: Logically Collective
1045: Input Parameters:
1046: + ds - the direct solver context
1047: . fun - matrix function evaluation routine, see DSNEPMatrixFunctionFn for the calling sequence
1048: - ctx - a context pointer (the last parameter to the user function)
1050: Note:
1051: The result is computed as T(lambda) = sum_i E_i*f_i(lambda), and similarly
1052: for the derivative.
1054: Level: developer
1056: .seealso: DSNEPGetComputeMatrixFunction()
1057: @*/
1058: PetscErrorCode DSNEPSetComputeMatrixFunction(DS ds,DSNEPMatrixFunctionFn *fun,void *ctx)
1059: {
1060: PetscFunctionBegin;
1062: PetscTryMethod(ds,"DSNEPSetComputeMatrixFunction_C",(DS,DSNEPMatrixFunctionFn*,void*),(ds,fun,ctx));
1063: PetscFunctionReturn(PETSC_SUCCESS);
1064: }
1066: static PetscErrorCode DSNEPGetComputeMatrixFunction_NEP(DS ds,DSNEPMatrixFunctionFn **fun,void **ctx)
1067: {
1068: DS_NEP *dsctx = (DS_NEP*)ds->data;
1070: PetscFunctionBegin;
1071: if (fun) *fun = dsctx->computematrix;
1072: if (ctx) *ctx = dsctx->computematrixctx;
1073: PetscFunctionReturn(PETSC_SUCCESS);
1074: }
1076: /*@C
1077: DSNEPGetComputeMatrixFunction - Returns the user-provided callback function
1078: set in DSNEPSetComputeMatrixFunction().
1080: Not Collective
1082: Input Parameter:
1083: . ds - the direct solver context
1085: Output Parameters:
1086: + fun - the pointer to the user function
1087: - ctx - the context pointer
1089: Level: developer
1091: .seealso: DSNEPSetComputeMatrixFunction()
1092: @*/
1093: PetscErrorCode DSNEPGetComputeMatrixFunction(DS ds,DSNEPMatrixFunctionFn **fun,void **ctx)
1094: {
1095: PetscFunctionBegin;
1097: PetscUseMethod(ds,"DSNEPGetComputeMatrixFunction_C",(DS,DSNEPMatrixFunctionFn**,void**),(ds,fun,ctx));
1098: PetscFunctionReturn(PETSC_SUCCESS);
1099: }
1101: static PetscErrorCode DSNEPSetRG_NEP(DS ds,RG rg)
1102: {
1103: DS_NEP *dsctx = (DS_NEP*)ds->data;
1105: PetscFunctionBegin;
1106: PetscCall(PetscObjectReference((PetscObject)rg));
1107: PetscCall(RGDestroy(&dsctx->rg));
1108: dsctx->rg = rg;
1109: PetscFunctionReturn(PETSC_SUCCESS);
1110: }
1112: /*@
1113: DSNEPSetRG - Associates a region object to the DSNEP solver.
1115: Collective
1117: Input Parameters:
1118: + ds - the direct solver context
1119: - rg - the region context
1121: Notes:
1122: The region is used only in the contour integral method, and
1123: should enclose the wanted eigenvalues.
1125: Level: developer
1127: .seealso: DSNEPGetRG()
1128: @*/
1129: PetscErrorCode DSNEPSetRG(DS ds,RG rg)
1130: {
1131: PetscFunctionBegin;
1133: if (rg) {
1135: PetscCheckSameComm(ds,1,rg,2);
1136: }
1137: PetscTryMethod(ds,"DSNEPSetRG_C",(DS,RG),(ds,rg));
1138: PetscFunctionReturn(PETSC_SUCCESS);
1139: }
1141: static PetscErrorCode DSNEPGetRG_NEP(DS ds,RG *rg)
1142: {
1143: DS_NEP *ctx = (DS_NEP*)ds->data;
1145: PetscFunctionBegin;
1146: if (!ctx->rg) {
1147: PetscCall(RGCreate(PetscObjectComm((PetscObject)ds),&ctx->rg));
1148: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->rg,(PetscObject)ds,1));
1149: PetscCall(RGSetOptionsPrefix(ctx->rg,((PetscObject)ds)->prefix));
1150: PetscCall(RGAppendOptionsPrefix(ctx->rg,"ds_nep_"));
1151: PetscCall(PetscObjectSetOptions((PetscObject)ctx->rg,((PetscObject)ds)->options));
1152: }
1153: *rg = ctx->rg;
1154: PetscFunctionReturn(PETSC_SUCCESS);
1155: }
1157: /*@
1158: DSNEPGetRG - Obtain the region object associated to the DSNEP solver.
1160: Collective
1162: Input Parameter:
1163: . ds - the direct solver context
1165: Output Parameter:
1166: . rg - the region context
1168: Level: developer
1170: .seealso: DSNEPSetRG()
1171: @*/
1172: PetscErrorCode DSNEPGetRG(DS ds,RG *rg)
1173: {
1174: PetscFunctionBegin;
1176: PetscAssertPointer(rg,2);
1177: PetscUseMethod(ds,"DSNEPGetRG_C",(DS,RG*),(ds,rg));
1178: PetscFunctionReturn(PETSC_SUCCESS);
1179: }
1181: static PetscErrorCode DSSetFromOptions_NEP(DS ds,PetscOptionItems *PetscOptionsObject)
1182: {
1183: PetscInt k;
1184: PetscBool flg;
1185: #if defined(PETSC_USE_COMPLEX)
1186: PetscReal r;
1187: PetscBool flg1;
1188: DS_NEP *ctx = (DS_NEP*)ds->data;
1189: #endif
1191: PetscFunctionBegin;
1192: PetscOptionsHeadBegin(PetscOptionsObject,"DS NEP Options");
1194: PetscCall(PetscOptionsInt("-ds_nep_minimality","Maximum minimality index","DSNEPSetMinimality",4,&k,&flg));
1195: if (flg) PetscCall(DSNEPSetMinimality(ds,k));
1197: PetscCall(PetscOptionsInt("-ds_nep_integration_points","Number of integration points","DSNEPSetIntegrationPoints",64,&k,&flg));
1198: if (flg) PetscCall(DSNEPSetIntegrationPoints(ds,k));
1200: PetscCall(PetscOptionsInt("-ds_nep_sampling_size","Number of sampling columns","DSNEPSetSamplingSize",0,&k,&flg));
1201: if (flg) PetscCall(DSNEPSetSamplingSize(ds,k));
1203: #if defined(PETSC_USE_COMPLEX)
1204: r = ctx->rtol;
1205: PetscCall(PetscOptionsReal("-ds_nep_refine_tol","Refinement tolerance","DSNEPSetRefine",ctx->rtol,&r,&flg1));
1206: k = ctx->Nit;
1207: PetscCall(PetscOptionsInt("-ds_nep_refine_its","Number of iterative refinement iterations","DSNEPSetRefine",ctx->Nit,&k,&flg));
1208: if (flg1||flg) PetscCall(DSNEPSetRefine(ds,r,k));
1210: if (ds->method==1) {
1211: if (!ctx->rg) PetscCall(DSNEPGetRG(ds,&ctx->rg));
1212: PetscCall(RGSetFromOptions(ctx->rg));
1213: }
1214: #endif
1216: PetscOptionsHeadEnd();
1217: PetscFunctionReturn(PETSC_SUCCESS);
1218: }
1220: static PetscErrorCode DSDestroy_NEP(DS ds)
1221: {
1222: DS_NEP *ctx = (DS_NEP*)ds->data;
1223: PetscInt i;
1225: PetscFunctionBegin;
1226: for (i=0;i<ctx->nf;i++) PetscCall(FNDestroy(&ctx->f[i]));
1227: PetscCall(RGDestroy(&ctx->rg));
1228: PetscCall(PetscLayoutDestroy(&ctx->map));
1229: PetscCall(PetscFree(ds->data));
1230: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetFN_C",NULL));
1231: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetFN_C",NULL));
1232: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetNumFN_C",NULL));
1233: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetMinimality_C",NULL));
1234: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetMinimality_C",NULL));
1235: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetRefine_C",NULL));
1236: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetRefine_C",NULL));
1237: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetIntegrationPoints_C",NULL));
1238: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetIntegrationPoints_C",NULL));
1239: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetSamplingSize_C",NULL));
1240: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetSamplingSize_C",NULL));
1241: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetRG_C",NULL));
1242: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetRG_C",NULL));
1243: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetComputeMatrixFunction_C",NULL));
1244: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetComputeMatrixFunction_C",NULL));
1245: PetscFunctionReturn(PETSC_SUCCESS);
1246: }
1248: static PetscErrorCode DSMatGetSize_NEP(DS ds,DSMatType t,PetscInt *rows,PetscInt *cols)
1249: {
1250: DS_NEP *ctx = (DS_NEP*)ds->data;
1252: PetscFunctionBegin;
1253: *rows = ds->n;
1254: if (t==DS_MAT_Q || t==DS_MAT_Z || t==DS_MAT_U || t==DS_MAT_V) *rows *= ctx->max_mid;
1255: *cols = ds->n;
1256: if (t==DS_MAT_Q || t==DS_MAT_Z || t==DS_MAT_U || t==DS_MAT_V || t==DS_MAT_X || t==DS_MAT_Y) *cols *= ctx->max_mid;
1257: PetscFunctionReturn(PETSC_SUCCESS);
1258: }
1260: /*MC
1261: DSNEP - Dense Nonlinear Eigenvalue Problem.
1263: Level: beginner
1265: Notes:
1266: The problem is expressed as T(lambda)*x = 0, where T(lambda) is a
1267: parameter-dependent matrix written as T(lambda) = sum_i E_i*f_i(lambda).
1268: The eigenvalues lambda are the arguments returned by DSSolve()..
1270: The coefficient matrices E_i are the extra matrices of the DS, and
1271: the scalar functions f_i are passed via DSNEPSetFN(). Optionally, a
1272: callback function to fill the E_i matrices can be set with
1273: DSNEPSetComputeMatrixFunction().
1275: Used DS matrices:
1276: + DS_MAT_Ex - coefficient matrices of the split form of T(lambda)
1277: . DS_MAT_X - eigenvectors
1278: . DS_MAT_A - (workspace) T(lambda) evaluated at a given lambda (SLP only)
1279: . DS_MAT_B - (workspace) T'(lambda) evaluated at a given lambda (SLP only)
1280: . DS_MAT_Q - (workspace) left Hankel matrix (contour only)
1281: . DS_MAT_Z - (workspace) right Hankel matrix (contour only)
1282: . DS_MAT_U - (workspace) left singular vectors (contour only)
1283: . DS_MAT_V - (workspace) right singular vectors (contour only)
1284: - DS_MAT_W - (workspace) auxiliary matrix of size nxn
1286: Implemented methods:
1287: + 0 - Successive Linear Problems (SLP), computes just one eigenpair
1288: - 1 - Contour integral, computes all eigenvalues inside a region
1290: .seealso: DSCreate(), DSSetType(), DSType, DSNEPSetFN(), DSNEPSetComputeMatrixFunction()
1291: M*/
1292: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS ds)
1293: {
1294: DS_NEP *ctx;
1296: PetscFunctionBegin;
1297: PetscCall(PetscNew(&ctx));
1298: ds->data = (void*)ctx;
1299: ctx->max_mid = 4;
1300: ctx->nnod = 64;
1301: ctx->Nit = 3;
1302: ctx->rtol = PETSC_MACHINE_EPSILON/PetscSqrtReal(PETSC_SQRT_MACHINE_EPSILON);
1304: ds->ops->allocate = DSAllocate_NEP;
1305: ds->ops->setfromoptions = DSSetFromOptions_NEP;
1306: ds->ops->view = DSView_NEP;
1307: ds->ops->vectors = DSVectors_NEP;
1308: ds->ops->solve[0] = DSSolve_NEP_SLP;
1309: #if defined(PETSC_USE_COMPLEX)
1310: ds->ops->solve[1] = DSSolve_NEP_Contour;
1311: #endif
1312: ds->ops->sort = DSSort_NEP;
1313: #if !defined(PETSC_HAVE_MPIUNI)
1314: ds->ops->synchronize = DSSynchronize_NEP;
1315: #endif
1316: ds->ops->destroy = DSDestroy_NEP;
1317: ds->ops->matgetsize = DSMatGetSize_NEP;
1319: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetFN_C",DSNEPSetFN_NEP));
1320: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetFN_C",DSNEPGetFN_NEP));
1321: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetNumFN_C",DSNEPGetNumFN_NEP));
1322: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetMinimality_C",DSNEPGetMinimality_NEP));
1323: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetMinimality_C",DSNEPSetMinimality_NEP));
1324: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetRefine_C",DSNEPGetRefine_NEP));
1325: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetRefine_C",DSNEPSetRefine_NEP));
1326: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetIntegrationPoints_C",DSNEPGetIntegrationPoints_NEP));
1327: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetIntegrationPoints_C",DSNEPSetIntegrationPoints_NEP));
1328: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetSamplingSize_C",DSNEPGetSamplingSize_NEP));
1329: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetSamplingSize_C",DSNEPSetSamplingSize_NEP));
1330: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetRG_C",DSNEPSetRG_NEP));
1331: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetRG_C",DSNEPGetRG_NEP));
1332: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetComputeMatrixFunction_C",DSNEPSetComputeMatrixFunction_NEP));
1333: PetscCall(PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetComputeMatrixFunction_C",DSNEPGetComputeMatrixFunction_NEP));
1334: PetscFunctionReturn(PETSC_SUCCESS);
1335: }