Actual source code: ex21.c

slepc-3.17.2 2022-08-09
Report Typos and Errors
  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
  8:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  9: */

 11: static char help[] = "Simple 1-D nonlinear eigenproblem (matrix-free version).\n\n"
 12:   "The command line options are:\n"
 13:   "  -n <n>, where <n> = number of grid subdivisions\n\n";

 15: /*
 16:    Solve 1-D PDE
 17:             -u'' = lambda*u
 18:    on [0,1] subject to
 19:             u(0)=0, u'(1)=u(1)*lambda*kappa/(kappa-lambda)
 20: */

 22: #include <slepcnep.h>

 24: /*
 25:    User-defined routines
 26: */
 27: PetscErrorCode FormFunction(NEP,PetscScalar,Mat,Mat,void*);
 28: PetscErrorCode FormJacobian(NEP,PetscScalar,Mat,void*);

 30: /*
 31:    Matrix operations and context
 32: */
 33: PetscErrorCode MatMult_Fun(Mat,Vec,Vec);
 34: PetscErrorCode MatGetDiagonal_Fun(Mat,Vec);
 35: PetscErrorCode MatDestroy_Fun(Mat);
 36: PetscErrorCode MatDuplicate_Fun(Mat,MatDuplicateOption,Mat*);
 37: PetscErrorCode MatMult_Jac(Mat,Vec,Vec);
 38: PetscErrorCode MatGetDiagonal_Jac(Mat,Vec);
 39: PetscErrorCode MatDestroy_Jac(Mat);

 41: typedef struct {
 42:   PetscScalar lambda,kappa;
 43:   PetscReal   h;
 44:   PetscMPIInt next,prev;
 45: } MatCtx;

 47: /*
 48:    User-defined application context
 49: */
 50: typedef struct {
 51:   PetscScalar kappa;   /* ratio between stiffness of spring and attached mass */
 52:   PetscReal   h;       /* mesh spacing */
 53: } ApplicationCtx;

 55: int main(int argc,char **argv)
 56: {
 57:   NEP            nep;             /* nonlinear eigensolver context */
 58:   Mat            F,J;             /* Function and Jacobian matrices */
 59:   ApplicationCtx ctx;             /* user-defined context */
 60:   MatCtx         *ctxF,*ctxJ;     /* contexts for shell matrices */
 61:   PetscInt       n=128,nev;
 62:   KSP            ksp;
 63:   PC             pc;
 64:   PetscMPIInt    rank,size;
 65:   PetscBool      terse;

 67:   SlepcInitialize(&argc,&argv,(char*)0,help);
 68:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 69:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 70:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 71:   PetscPrintf(PETSC_COMM_WORLD,"\n1-D Nonlinear Eigenproblem, n=%" PetscInt_FMT "\n\n",n);
 72:   ctx.h = 1.0/(PetscReal)n;
 73:   ctx.kappa = 1.0;

 75:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 76:      Create nonlinear eigensolver context
 77:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 79:   NEPCreate(PETSC_COMM_WORLD,&nep);

 81:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 82:      Create matrix data structure; set Function evaluation routine
 83:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 85:   PetscNew(&ctxF);
 86:   ctxF->h = ctx.h;
 87:   ctxF->kappa = ctx.kappa;
 88:   ctxF->next = rank==size-1? MPI_PROC_NULL: rank+1;
 89:   ctxF->prev = rank==0? MPI_PROC_NULL: rank-1;

 91:   MatCreateShell(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,n,(void*)ctxF,&F);
 92:   MatShellSetOperation(F,MATOP_MULT,(void(*)(void))MatMult_Fun);
 93:   MatShellSetOperation(F,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Fun);
 94:   MatShellSetOperation(F,MATOP_DESTROY,(void(*)(void))MatDestroy_Fun);
 95:   MatShellSetOperation(F,MATOP_DUPLICATE,(void(*)(void))MatDuplicate_Fun);

 97:   /*
 98:      Set Function matrix data structure and default Function evaluation
 99:      routine
100:   */
101:   NEPSetFunction(nep,F,F,FormFunction,NULL);

103:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
104:      Create matrix data structure; set Jacobian evaluation routine
105:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

107:   PetscNew(&ctxJ);
108:   ctxJ->h = ctx.h;
109:   ctxJ->kappa = ctx.kappa;
110:   ctxJ->next = rank==size-1? MPI_PROC_NULL: rank+1;
111:   ctxJ->prev = rank==0? MPI_PROC_NULL: rank-1;

113:   MatCreateShell(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,n,(void*)ctxJ,&J);
114:   MatShellSetOperation(J,MATOP_MULT,(void(*)(void))MatMult_Jac);
115:   MatShellSetOperation(J,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Jac);
116:   MatShellSetOperation(J,MATOP_DESTROY,(void(*)(void))MatDestroy_Jac);

118:   /*
119:      Set Jacobian matrix data structure and default Jacobian evaluation
120:      routine
121:   */
122:   NEPSetJacobian(nep,J,FormJacobian,NULL);

124:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125:      Customize nonlinear solver; set runtime options
126:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

128:   NEPSetType(nep,NEPRII);
129:   NEPRIISetLagPreconditioner(nep,0);
130:   NEPRIIGetKSP(nep,&ksp);
131:   KSPSetType(ksp,KSPBCGS);
132:   KSPGetPC(ksp,&pc);
133:   PCSetType(pc,PCJACOBI);

135:   /*
136:      Set solver parameters at runtime
137:   */
138:   NEPSetFromOptions(nep);

140:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141:                       Solve the eigensystem
142:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

144:   NEPSolve(nep);
145:   NEPGetDimensions(nep,&nev,NULL,NULL);
146:   PetscPrintf(PETSC_COMM_WORLD," Number of requested eigenvalues: %" PetscInt_FMT "\n",nev);

148:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:                     Display solution and clean up
150:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

152:   /* show detailed info unless -terse option is given by user */
153:   PetscOptionsHasName(NULL,NULL,"-terse",&terse);
154:   if (terse) NEPErrorView(nep,NEP_ERROR_RELATIVE,NULL);
155:   else {
156:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
157:     NEPConvergedReasonView(nep,PETSC_VIEWER_STDOUT_WORLD);
158:     NEPErrorView(nep,NEP_ERROR_RELATIVE,PETSC_VIEWER_STDOUT_WORLD);
159:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
160:   }
161:   NEPDestroy(&nep);
162:   MatDestroy(&F);
163:   MatDestroy(&J);
164:   SlepcFinalize();
165:   return 0;
166: }

168: /* ------------------------------------------------------------------- */
169: /*
170:    FormFunction - Computes Function matrix  T(lambda)

172:    Input Parameters:
173: .  nep    - the NEP context
174: .  lambda - real part of the scalar argument
175: .  ctx    - optional user-defined context, as set by NEPSetFunction()

177:    Output Parameters:
178: .  fun - Function matrix
179: .  B   - optionally different preconditioning matrix
180: */
181: PetscErrorCode FormFunction(NEP nep,PetscScalar lambda,Mat fun,Mat B,void *ctx)
182: {
183:   MatCtx         *ctxF;

186:   MatShellGetContext(fun,&ctxF);
187:   ctxF->lambda = lambda;
188:   PetscFunctionReturn(0);
189: }

191: /* ------------------------------------------------------------------- */
192: /*
193:    FormJacobian - Computes Jacobian matrix  T'(lambda)

195:    Input Parameters:
196: .  nep    - the NEP context
197: .  lambda - real part of the scalar argument
198: .  ctx    - optional user-defined context, as set by NEPSetJacobian()

200:    Output Parameters:
201: .  jac - Jacobian matrix
202: */
203: PetscErrorCode FormJacobian(NEP nep,PetscScalar lambda,Mat jac,void *ctx)
204: {
205:   MatCtx         *ctxJ;

208:   MatShellGetContext(jac,&ctxJ);
209:   ctxJ->lambda = lambda;
210:   PetscFunctionReturn(0);
211: }

213: /* ------------------------------------------------------------------- */
214: PetscErrorCode MatMult_Fun(Mat A,Vec x,Vec y)
215: {
216:   MatCtx            *ctx;
217:   PetscInt          i,n,N;
218:   const PetscScalar *px;
219:   PetscScalar       *py,c,d,de,oe,upper=0.0,lower=0.0;
220:   PetscReal         h;
221:   MPI_Comm          comm;

224:   MatShellGetContext(A,&ctx);
225:   VecGetArrayRead(x,&px);
226:   VecGetArray(y,&py);
227:   VecGetSize(x,&N);
228:   VecGetLocalSize(x,&n);

230:   PetscObjectGetComm((PetscObject)A,&comm);
231:   MPI_Sendrecv(px,1,MPIU_SCALAR,ctx->prev,0,&lower,1,MPIU_SCALAR,ctx->next,0,comm,MPI_STATUS_IGNORE);
232:   MPI_Sendrecv(px+n-1,1,MPIU_SCALAR,ctx->next,0,&upper,1,MPIU_SCALAR,ctx->prev,0,comm,MPI_STATUS_IGNORE);

234:   h = ctx->h;
235:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
236:   d = N;
237:   de = 2.0*(d-ctx->lambda*h/3.0);   /* diagonal entry */
238:   oe = -d-ctx->lambda*h/6.0;        /* offdiagonal entry */
239:   py[0] = oe*upper + de*px[0] + oe*px[1];
240:   for (i=1;i<n-1;i++) py[i] = oe*px[i-1] +de*px[i] + oe*px[i+1];
241:   if (ctx->next==MPI_PROC_NULL) de = d-ctx->lambda*h/3.0+ctx->lambda*c;   /* diagonal entry of last row */
242:   py[n-1] = oe*px[n-2] + de*px[n-1] + oe*lower;

244:   VecRestoreArrayRead(x,&px);
245:   VecRestoreArray(y,&py);
246:   PetscFunctionReturn(0);
247: }

249: /* ------------------------------------------------------------------- */
250: PetscErrorCode MatGetDiagonal_Fun(Mat A,Vec diag)
251: {
252:   MatCtx         *ctx;
253:   PetscInt       n,N;
254:   PetscScalar    *pd,c,d;
255:   PetscReal      h;

258:   MatShellGetContext(A,&ctx);
259:   VecGetSize(diag,&N);
260:   VecGetLocalSize(diag,&n);
261:   h = ctx->h;
262:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
263:   d = N;
264:   VecSet(diag,2.0*(d-ctx->lambda*h/3.0));
265:   VecGetArray(diag,&pd);
266:   pd[n-1] = d-ctx->lambda*h/3.0+ctx->lambda*c;
267:   VecRestoreArray(diag,&pd);
268:   PetscFunctionReturn(0);
269: }

271: /* ------------------------------------------------------------------- */
272: PetscErrorCode MatDestroy_Fun(Mat A)
273: {
274:   MatCtx         *ctx;

276:   MatShellGetContext(A,&ctx);
277:   PetscFree(ctx);
278:   PetscFunctionReturn(0);
279: }

281: /* ------------------------------------------------------------------- */
282: PetscErrorCode MatDuplicate_Fun(Mat A,MatDuplicateOption op,Mat *B)
283: {
284:   MatCtx         *actx,*bctx;
285:   PetscInt       m,n,M,N;
286:   MPI_Comm       comm;

288:   MatShellGetContext(A,&actx);
289:   MatGetSize(A,&M,&N);
290:   MatGetLocalSize(A,&m,&n);

292:   PetscNew(&bctx);
293:   bctx->h      = actx->h;
294:   bctx->kappa  = actx->kappa;
295:   bctx->lambda = actx->lambda;
296:   bctx->next   = actx->next;
297:   bctx->prev   = actx->prev;

299:   PetscObjectGetComm((PetscObject)A,&comm);
300:   MatCreateShell(comm,m,n,M,N,(void*)bctx,B);
301:   MatShellSetOperation(*B,MATOP_MULT,(void(*)(void))MatMult_Fun);
302:   MatShellSetOperation(*B,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Fun);
303:   MatShellSetOperation(*B,MATOP_DESTROY,(void(*)(void))MatDestroy_Fun);
304:   MatShellSetOperation(*B,MATOP_DUPLICATE,(void(*)(void))MatDuplicate_Fun);
305:   PetscFunctionReturn(0);
306: }

308: /* ------------------------------------------------------------------- */
309: PetscErrorCode MatMult_Jac(Mat A,Vec x,Vec y)
310: {
311:   MatCtx            *ctx;
312:   PetscInt          i,n;
313:   const PetscScalar *px;
314:   PetscScalar       *py,c,de,oe,upper=0.0,lower=0.0;
315:   PetscReal         h;
316:   MPI_Comm          comm;

319:   MatShellGetContext(A,&ctx);
320:   VecGetArrayRead(x,&px);
321:   VecGetArray(y,&py);
322:   VecGetLocalSize(x,&n);

324:   PetscObjectGetComm((PetscObject)A,&comm);
325:   MPI_Sendrecv(px,1,MPIU_SCALAR,ctx->prev,0,&lower,1,MPIU_SCALAR,ctx->next,0,comm,MPI_STATUS_IGNORE);
326:   MPI_Sendrecv(px+n-1,1,MPIU_SCALAR,ctx->next,0,&upper,1,MPIU_SCALAR,ctx->prev,0,comm,MPI_STATUS_IGNORE);

328:   h = ctx->h;
329:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
330:   de = -2.0*h/3.0;    /* diagonal entry */
331:   oe = -h/6.0;        /* offdiagonal entry */
332:   py[0] = oe*upper + de*px[0] + oe*px[1];
333:   for (i=1;i<n-1;i++) py[i] = oe*px[i-1] +de*px[i] + oe*px[i+1];
334:   if (ctx->next==MPI_PROC_NULL) de = -h/3.0-c*c;    /* diagonal entry of last row */
335:   py[n-1] = oe*px[n-2] + de*px[n-1] + oe*lower;

337:   VecRestoreArrayRead(x,&px);
338:   VecRestoreArray(y,&py);
339:   PetscFunctionReturn(0);
340: }

342: /* ------------------------------------------------------------------- */
343: PetscErrorCode MatGetDiagonal_Jac(Mat A,Vec diag)
344: {
345:   MatCtx         *ctx;
346:   PetscInt       n;
347:   PetscScalar    *pd,c;
348:   PetscReal      h;

351:   MatShellGetContext(A,&ctx);
352:   VecGetLocalSize(diag,&n);
353:   h = ctx->h;
354:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
355:   VecSet(diag,-2.0*h/3.0);
356:   VecGetArray(diag,&pd);
357:   pd[n-1] = -h/3.0-c*c;
358:   VecRestoreArray(diag,&pd);
359:   PetscFunctionReturn(0);
360: }

362: /* ------------------------------------------------------------------- */
363: PetscErrorCode MatDestroy_Jac(Mat A)
364: {
365:   MatCtx         *ctx;

367:   MatShellGetContext(A,&ctx);
368:   PetscFree(ctx);
369:   PetscFunctionReturn(0);
370: }

372: /*TEST

374:    testset:
375:       nsize: {{1 2}}
376:       args: -terse
377:       requires: !single
378:       output_file: output/ex21_1.out
379:       filter: sed -e "s/[+-]0\.0*i//g" -e "s/+0i//g"
380:       test:
381:          suffix: 1_rii
382:          args: -nep_type rii -nep_target 4
383:       test:
384:          suffix: 1_slp
385:          args: -nep_type slp -nep_slp_pc_type jacobi -nep_slp_ksp_type bcgs -nep_target 10

387: TEST*/