Actual source code: ex21.c
slepc-3.22.2 2024-12-02
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
11: static char help[] = "Simple 1-D nonlinear eigenproblem (matrix-free version).\n\n"
12: "The command line options are:\n"
13: " -n <n>, where <n> = number of grid subdivisions\n\n";
15: /*
16: Solve 1-D PDE
17: -u'' = lambda*u
18: on [0,1] subject to
19: u(0)=0, u'(1)=u(1)*lambda*kappa/(kappa-lambda)
20: */
22: #include <slepcnep.h>
24: /*
25: User-defined routines
26: */
27: PetscErrorCode FormFunction(NEP,PetscScalar,Mat,Mat,void*);
28: PetscErrorCode FormJacobian(NEP,PetscScalar,Mat,void*);
30: /*
31: Matrix operations and context
32: */
33: PetscErrorCode MatMult_Fun(Mat,Vec,Vec);
34: PetscErrorCode MatGetDiagonal_Fun(Mat,Vec);
35: PetscErrorCode MatDestroy_Fun(Mat);
36: PetscErrorCode MatDuplicate_Fun(Mat,MatDuplicateOption,Mat*);
37: PetscErrorCode MatMult_Jac(Mat,Vec,Vec);
38: PetscErrorCode MatGetDiagonal_Jac(Mat,Vec);
39: PetscErrorCode MatDestroy_Jac(Mat);
41: typedef struct {
42: PetscScalar lambda,kappa;
43: PetscReal h;
44: PetscMPIInt next,prev;
45: } MatCtx;
47: /*
48: User-defined application context
49: */
50: typedef struct {
51: PetscScalar kappa; /* ratio between stiffness of spring and attached mass */
52: PetscReal h; /* mesh spacing */
53: } ApplicationCtx;
55: int main(int argc,char **argv)
56: {
57: NEP nep; /* nonlinear eigensolver context */
58: Mat F,J; /* Function and Jacobian matrices */
59: ApplicationCtx ctx; /* user-defined context */
60: MatCtx *ctxF,*ctxJ; /* contexts for shell matrices */
61: PetscInt n=128,nev;
62: KSP ksp;
63: PC pc;
64: PetscMPIInt rank,size;
65: PetscBool terse;
67: PetscFunctionBeginUser;
68: PetscCall(SlepcInitialize(&argc,&argv,NULL,help));
69: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
70: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
71: PetscCall(PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL));
72: PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\n1-D Nonlinear Eigenproblem, n=%" PetscInt_FMT "\n\n",n));
73: ctx.h = 1.0/(PetscReal)n;
74: ctx.kappa = 1.0;
76: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77: Create nonlinear eigensolver context
78: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
80: PetscCall(NEPCreate(PETSC_COMM_WORLD,&nep));
82: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: Create matrix data structure; set Function evaluation routine
84: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
86: PetscCall(PetscNew(&ctxF));
87: ctxF->h = ctx.h;
88: ctxF->kappa = ctx.kappa;
89: ctxF->next = rank==size-1? MPI_PROC_NULL: rank+1;
90: ctxF->prev = rank==0? MPI_PROC_NULL: rank-1;
92: PetscCall(MatCreateShell(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,n,(void*)ctxF,&F));
93: PetscCall(MatShellSetOperation(F,MATOP_MULT,(void(*)(void))MatMult_Fun));
94: PetscCall(MatShellSetOperation(F,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Fun));
95: PetscCall(MatShellSetOperation(F,MATOP_DESTROY,(void(*)(void))MatDestroy_Fun));
96: PetscCall(MatShellSetOperation(F,MATOP_DUPLICATE,(void(*)(void))MatDuplicate_Fun));
98: /*
99: Set Function matrix data structure and default Function evaluation
100: routine
101: */
102: PetscCall(NEPSetFunction(nep,F,F,FormFunction,NULL));
104: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105: Create matrix data structure; set Jacobian evaluation routine
106: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108: PetscCall(PetscNew(&ctxJ));
109: ctxJ->h = ctx.h;
110: ctxJ->kappa = ctx.kappa;
111: ctxJ->next = rank==size-1? MPI_PROC_NULL: rank+1;
112: ctxJ->prev = rank==0? MPI_PROC_NULL: rank-1;
114: PetscCall(MatCreateShell(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,n,(void*)ctxJ,&J));
115: PetscCall(MatShellSetOperation(J,MATOP_MULT,(void(*)(void))MatMult_Jac));
116: PetscCall(MatShellSetOperation(J,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Jac));
117: PetscCall(MatShellSetOperation(J,MATOP_DESTROY,(void(*)(void))MatDestroy_Jac));
119: /*
120: Set Jacobian matrix data structure and default Jacobian evaluation
121: routine
122: */
123: PetscCall(NEPSetJacobian(nep,J,FormJacobian,NULL));
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Customize nonlinear solver; set runtime options
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129: PetscCall(NEPSetType(nep,NEPRII));
130: PetscCall(NEPRIISetLagPreconditioner(nep,0));
131: PetscCall(NEPRIIGetKSP(nep,&ksp));
132: PetscCall(KSPSetType(ksp,KSPBCGS));
133: PetscCall(KSPGetPC(ksp,&pc));
134: PetscCall(PCSetType(pc,PCJACOBI));
136: /*
137: Set solver parameters at runtime
138: */
139: PetscCall(NEPSetFromOptions(nep));
141: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: Solve the eigensystem
143: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145: PetscCall(NEPSolve(nep));
146: PetscCall(NEPGetDimensions(nep,&nev,NULL,NULL));
147: PetscCall(PetscPrintf(PETSC_COMM_WORLD," Number of requested eigenvalues: %" PetscInt_FMT "\n",nev));
149: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Display solution and clean up
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153: /* show detailed info unless -terse option is given by user */
154: PetscCall(PetscOptionsHasName(NULL,NULL,"-terse",&terse));
155: if (terse) PetscCall(NEPErrorView(nep,NEP_ERROR_RELATIVE,NULL));
156: else {
157: PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL));
158: PetscCall(NEPConvergedReasonView(nep,PETSC_VIEWER_STDOUT_WORLD));
159: PetscCall(NEPErrorView(nep,NEP_ERROR_RELATIVE,PETSC_VIEWER_STDOUT_WORLD));
160: PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
161: }
162: PetscCall(NEPDestroy(&nep));
163: PetscCall(MatDestroy(&F));
164: PetscCall(MatDestroy(&J));
165: PetscCall(SlepcFinalize());
166: return 0;
167: }
169: /* ------------------------------------------------------------------- */
170: /*
171: FormFunction - Computes Function matrix T(lambda)
173: Input Parameters:
174: . nep - the NEP context
175: . lambda - real part of the scalar argument
176: . ctx - optional user-defined context, as set by NEPSetFunction()
178: Output Parameters:
179: . fun - Function matrix
180: . B - optionally different preconditioning matrix
181: */
182: PetscErrorCode FormFunction(NEP nep,PetscScalar lambda,Mat fun,Mat B,void *ctx)
183: {
184: MatCtx *ctxF;
186: PetscFunctionBeginUser;
187: PetscCall(MatShellGetContext(fun,&ctxF));
188: ctxF->lambda = lambda;
189: PetscFunctionReturn(PETSC_SUCCESS);
190: }
192: /* ------------------------------------------------------------------- */
193: /*
194: FormJacobian - Computes Jacobian matrix T'(lambda)
196: Input Parameters:
197: . nep - the NEP context
198: . lambda - real part of the scalar argument
199: . ctx - optional user-defined context, as set by NEPSetJacobian()
201: Output Parameters:
202: . jac - Jacobian matrix
203: */
204: PetscErrorCode FormJacobian(NEP nep,PetscScalar lambda,Mat jac,void *ctx)
205: {
206: MatCtx *ctxJ;
208: PetscFunctionBeginUser;
209: PetscCall(MatShellGetContext(jac,&ctxJ));
210: ctxJ->lambda = lambda;
211: PetscFunctionReturn(PETSC_SUCCESS);
212: }
214: /* ------------------------------------------------------------------- */
215: PetscErrorCode MatMult_Fun(Mat A,Vec x,Vec y)
216: {
217: MatCtx *ctx;
218: PetscInt i,n,N;
219: const PetscScalar *px;
220: PetscScalar *py,c,d,de,oe,upper=0.0,lower=0.0;
221: PetscReal h;
222: MPI_Comm comm;
224: PetscFunctionBeginUser;
225: PetscCall(MatShellGetContext(A,&ctx));
226: PetscCall(VecGetArrayRead(x,&px));
227: PetscCall(VecGetArray(y,&py));
228: PetscCall(VecGetSize(x,&N));
229: PetscCall(VecGetLocalSize(x,&n));
231: PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
232: PetscCallMPI(MPI_Sendrecv(px,1,MPIU_SCALAR,ctx->prev,0,&lower,1,MPIU_SCALAR,ctx->next,0,comm,MPI_STATUS_IGNORE));
233: PetscCallMPI(MPI_Sendrecv(px+n-1,1,MPIU_SCALAR,ctx->next,0,&upper,1,MPIU_SCALAR,ctx->prev,0,comm,MPI_STATUS_IGNORE));
235: h = ctx->h;
236: c = ctx->kappa/(ctx->lambda-ctx->kappa);
237: d = N;
238: de = 2.0*(d-ctx->lambda*h/3.0); /* diagonal entry */
239: oe = -d-ctx->lambda*h/6.0; /* offdiagonal entry */
240: py[0] = oe*upper + de*px[0] + oe*px[1];
241: for (i=1;i<n-1;i++) py[i] = oe*px[i-1] +de*px[i] + oe*px[i+1];
242: if (ctx->next==MPI_PROC_NULL) de = d-ctx->lambda*h/3.0+ctx->lambda*c; /* diagonal entry of last row */
243: py[n-1] = oe*px[n-2] + de*px[n-1] + oe*lower;
245: PetscCall(VecRestoreArrayRead(x,&px));
246: PetscCall(VecRestoreArray(y,&py));
247: PetscFunctionReturn(PETSC_SUCCESS);
248: }
250: /* ------------------------------------------------------------------- */
251: PetscErrorCode MatGetDiagonal_Fun(Mat A,Vec diag)
252: {
253: MatCtx *ctx;
254: PetscInt n,N;
255: PetscScalar *pd,c,d;
256: PetscReal h;
258: PetscFunctionBeginUser;
259: PetscCall(MatShellGetContext(A,&ctx));
260: PetscCall(VecGetSize(diag,&N));
261: PetscCall(VecGetLocalSize(diag,&n));
262: h = ctx->h;
263: c = ctx->kappa/(ctx->lambda-ctx->kappa);
264: d = N;
265: PetscCall(VecSet(diag,2.0*(d-ctx->lambda*h/3.0)));
266: PetscCall(VecGetArray(diag,&pd));
267: pd[n-1] = d-ctx->lambda*h/3.0+ctx->lambda*c;
268: PetscCall(VecRestoreArray(diag,&pd));
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }
272: /* ------------------------------------------------------------------- */
273: PetscErrorCode MatDestroy_Fun(Mat A)
274: {
275: MatCtx *ctx;
277: PetscFunctionBegin;
278: PetscCall(MatShellGetContext(A,&ctx));
279: PetscCall(PetscFree(ctx));
280: PetscFunctionReturn(PETSC_SUCCESS);
281: }
283: /* ------------------------------------------------------------------- */
284: PetscErrorCode MatDuplicate_Fun(Mat A,MatDuplicateOption op,Mat *B)
285: {
286: MatCtx *actx,*bctx;
287: PetscInt m,n,M,N;
288: MPI_Comm comm;
290: PetscFunctionBegin;
291: PetscCall(MatShellGetContext(A,&actx));
292: PetscCall(MatGetSize(A,&M,&N));
293: PetscCall(MatGetLocalSize(A,&m,&n));
295: PetscCall(PetscNew(&bctx));
296: bctx->h = actx->h;
297: bctx->kappa = actx->kappa;
298: bctx->lambda = actx->lambda;
299: bctx->next = actx->next;
300: bctx->prev = actx->prev;
302: PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
303: PetscCall(MatCreateShell(comm,m,n,M,N,(void*)bctx,B));
304: PetscCall(MatShellSetOperation(*B,MATOP_MULT,(void(*)(void))MatMult_Fun));
305: PetscCall(MatShellSetOperation(*B,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Fun));
306: PetscCall(MatShellSetOperation(*B,MATOP_DESTROY,(void(*)(void))MatDestroy_Fun));
307: PetscCall(MatShellSetOperation(*B,MATOP_DUPLICATE,(void(*)(void))MatDuplicate_Fun));
308: PetscFunctionReturn(PETSC_SUCCESS);
309: }
311: /* ------------------------------------------------------------------- */
312: PetscErrorCode MatMult_Jac(Mat A,Vec x,Vec y)
313: {
314: MatCtx *ctx;
315: PetscInt i,n;
316: const PetscScalar *px;
317: PetscScalar *py,c,de,oe,upper=0.0,lower=0.0;
318: PetscReal h;
319: MPI_Comm comm;
321: PetscFunctionBeginUser;
322: PetscCall(MatShellGetContext(A,&ctx));
323: PetscCall(VecGetArrayRead(x,&px));
324: PetscCall(VecGetArray(y,&py));
325: PetscCall(VecGetLocalSize(x,&n));
327: PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
328: PetscCallMPI(MPI_Sendrecv(px,1,MPIU_SCALAR,ctx->prev,0,&lower,1,MPIU_SCALAR,ctx->next,0,comm,MPI_STATUS_IGNORE));
329: PetscCallMPI(MPI_Sendrecv(px+n-1,1,MPIU_SCALAR,ctx->next,0,&upper,1,MPIU_SCALAR,ctx->prev,0,comm,MPI_STATUS_IGNORE));
331: h = ctx->h;
332: c = ctx->kappa/(ctx->lambda-ctx->kappa);
333: de = -2.0*h/3.0; /* diagonal entry */
334: oe = -h/6.0; /* offdiagonal entry */
335: py[0] = oe*upper + de*px[0] + oe*px[1];
336: for (i=1;i<n-1;i++) py[i] = oe*px[i-1] +de*px[i] + oe*px[i+1];
337: if (ctx->next==MPI_PROC_NULL) de = -h/3.0-c*c; /* diagonal entry of last row */
338: py[n-1] = oe*px[n-2] + de*px[n-1] + oe*lower;
340: PetscCall(VecRestoreArrayRead(x,&px));
341: PetscCall(VecRestoreArray(y,&py));
342: PetscFunctionReturn(PETSC_SUCCESS);
343: }
345: /* ------------------------------------------------------------------- */
346: PetscErrorCode MatGetDiagonal_Jac(Mat A,Vec diag)
347: {
348: MatCtx *ctx;
349: PetscInt n;
350: PetscScalar *pd,c;
351: PetscReal h;
353: PetscFunctionBeginUser;
354: PetscCall(MatShellGetContext(A,&ctx));
355: PetscCall(VecGetLocalSize(diag,&n));
356: h = ctx->h;
357: c = ctx->kappa/(ctx->lambda-ctx->kappa);
358: PetscCall(VecSet(diag,-2.0*h/3.0));
359: PetscCall(VecGetArray(diag,&pd));
360: pd[n-1] = -h/3.0-c*c;
361: PetscCall(VecRestoreArray(diag,&pd));
362: PetscFunctionReturn(PETSC_SUCCESS);
363: }
365: /* ------------------------------------------------------------------- */
366: PetscErrorCode MatDestroy_Jac(Mat A)
367: {
368: MatCtx *ctx;
370: PetscFunctionBegin;
371: PetscCall(MatShellGetContext(A,&ctx));
372: PetscCall(PetscFree(ctx));
373: PetscFunctionReturn(PETSC_SUCCESS);
374: }
376: /*TEST
378: testset:
379: nsize: {{1 2}}
380: args: -terse
381: requires: !single
382: output_file: output/ex21_1.out
383: filter: sed -e "s/[+-]0\.0*i//g" -e "s/+0i//g"
384: test:
385: suffix: 1_rii
386: args: -nep_type rii -nep_target 4
387: test:
388: suffix: 1_slp
389: args: -nep_type slp -nep_slp_pc_type jacobi -nep_slp_ksp_type bcgs -nep_target 10
391: TEST*/