Actual source code: test43.c

slepc-3.22.1 2024-10-28
Report Typos and Errors
  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
  8:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  9: */

 11: static char help[] = "Solves a linear system using PCHPDDM.\n"
 12:   "Modification of ${PETSC_DIR}/src/ksp/ksp/tutorials/ex76.c where concurrent EPS are instantiated explicitly by the user.\n\n";

 14: #include <slepceps.h>

 16: int main(int argc,char **args)
 17: {
 18:   Mat            A,aux,a,P,B,X;
 19:   Vec            b;
 20:   KSP            ksp;
 21:   PC             pc;
 22:   EPS            eps;
 23:   ST             st;
 24:   IS             is,sizes;
 25:   const PetscInt *idx;
 26:   PetscInt       m,rstart,rend,location,nev,nconv;
 27:   PetscMPIInt    rank,size;
 28:   PetscViewer    viewer;
 29:   char           dir[PETSC_MAX_PATH_LEN],name[PETSC_MAX_PATH_LEN];
 30:   PetscBool      flg;

 32:   PetscFunctionBeginUser;
 33:   PetscCall(SlepcInitialize(&argc,&args,NULL,help));
 34:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
 35:   PetscCheck(size == 4,PETSC_COMM_WORLD,PETSC_ERR_USER,"This example requires 4 processes");
 36:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
 37:   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
 38:   PetscCall(MatCreate(PETSC_COMM_SELF,&aux));
 39:   PetscCall(ISCreate(PETSC_COMM_SELF,&is));
 40:   PetscCall(PetscStrcpy(dir,"."));
 41:   PetscCall(PetscOptionsGetString(NULL,NULL,"-load_dir",dir,sizeof(dir),NULL));
 42:   /* loading matrices */
 43:   PetscCall(PetscSNPrintf(name,sizeof(name),"%s/sizes_%d_%d.dat",dir,rank,size));
 44:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer));
 45:   PetscCall(ISCreate(PETSC_COMM_SELF,&sizes));
 46:   PetscCall(ISLoad(sizes,viewer));
 47:   PetscCall(ISGetIndices(sizes,&idx));
 48:   PetscCall(MatSetSizes(A,idx[0],idx[1],idx[2],idx[3]));
 49:   PetscCall(ISRestoreIndices(sizes,&idx));
 50:   PetscCall(ISDestroy(&sizes));
 51:   PetscCall(PetscViewerDestroy(&viewer));
 52:   PetscCall(PetscSNPrintf(name,sizeof(name),"%s/A.dat",dir));
 53:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer));
 54:   PetscCall(MatLoad(A,viewer));
 55:   PetscCall(PetscViewerDestroy(&viewer));
 56:   PetscCall(PetscSNPrintf(name,sizeof(name),"%s/is_%d_%d.dat",dir,rank,size));
 57:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer));
 58:   PetscCall(ISLoad(is,viewer));
 59:   PetscCall(PetscViewerDestroy(&viewer));
 60:   PetscCall(ISSetBlockSize(is,2));
 61:   PetscCall(PetscSNPrintf(name,sizeof(name),"%s/Neumann_%d_%d.dat",dir,rank,size));
 62:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer));
 63:   PetscCall(MatLoad(aux,viewer));
 64:   PetscCall(PetscViewerDestroy(&viewer));
 65:   PetscCall(MatSetBlockSizesFromMats(aux,A,A));
 66:   PetscCall(MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE));
 67:   PetscCall(MatSetOption(aux,MAT_SYMMETRIC,PETSC_TRUE));
 68:   /* ready for testing */
 69:   PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp));
 70:   PetscCall(KSPSetOperators(ksp,A,A));
 71:   PetscCall(KSPGetPC(ksp,&pc));
 72:   PetscCall(PCSetType(pc,PCHPDDM));
 73:   PetscCall(MatDuplicate(aux,MAT_DO_NOT_COPY_VALUES,&B)); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
 74:   PetscCall(MatGetDiagonalBlock(A,&a));
 75:   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
 76:   PetscCall(ISGetLocalSize(is,&m));
 77:   PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,rend-rstart,m,1,NULL,&P));
 78:   for (m = rstart; m < rend; ++m) {
 79:     PetscCall(ISLocate(is,m,&location));
 80:     PetscCheck(location >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"IS of the auxiliary Mat does not include all local rows of A");
 81:     PetscCall(MatSetValue(P,m-rstart,location,1.0,INSERT_VALUES));
 82:   }
 83:   PetscCall(MatAssemblyBegin(P,MAT_FINAL_ASSEMBLY));
 84:   PetscCall(MatAssemblyEnd(P,MAT_FINAL_ASSEMBLY));
 85:   PetscCall(MatPtAP(a,P,MAT_INITIAL_MATRIX,1.0,&X));
 86:   PetscCall(MatDestroy(&P));
 87:   PetscCall(MatAXPY(B,1.0,X,SUBSET_NONZERO_PATTERN));
 88:   PetscCall(MatDestroy(&X));
 89:   PetscCall(MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE));
 90:   PetscCall(EPSCreate(PETSC_COMM_SELF,&eps));
 91:   PetscCall(EPSSetOperators(eps,aux,B));
 92:   PetscCall(EPSSetProblemType(eps,EPS_GHEP));
 93:   PetscCall(EPSSetTarget(eps,0.0));
 94:   PetscCall(EPSSetWhichEigenpairs(eps,EPS_TARGET_MAGNITUDE));
 95:   PetscCall(EPSGetST(eps,&st));
 96:   PetscCall(STSetType(st,STSINVERT));
 97:   PetscCall(EPSSetFromOptions(eps));
 98:   PetscCall(EPSGetDimensions(eps,&nev,NULL,NULL));
 99:   PetscCall(EPSSolve(eps));
100:   PetscCall(EPSGetConverged(eps,&nconv));
101:   nev = PetscMin(nev,nconv);
102:   PetscCall(ISGetLocalSize(is,&m));
103:   PetscCall(MatCreateSeqDense(PETSC_COMM_SELF,m,nev,NULL,&P));
104:   for (m = 0; m < nev; ++m) {
105:     PetscCall(MatDenseGetColumnVecWrite(P,m,&b));
106:     PetscCall(EPSGetEigenvector(eps,m,b,NULL));
107:     PetscCall(MatDenseRestoreColumnVecWrite(P,m,&b));
108:   }
109:   PetscCall(EPSDestroy(&eps));
110:   PetscCall(MatDestroy(&B));
111:   PetscCall(PCHPDDMSetDeflationMat(pc,is,P));
112:   PetscCall(MatDestroy(&P));
113:   PetscCall(MatDestroy(&aux));
114:   PetscCall(KSPSetFromOptions(ksp));
115:   PetscCall(ISDestroy(&is));
116:   PetscCall(MatCreateVecs(A,NULL,&b));
117:   PetscCall(VecSet(b,1.0));
118:   PetscCall(KSPSolve(ksp,b,b));
119:   PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCHPDDM,&flg));
120:   if (flg) {
121:     PetscCall(PCHPDDMGetSTShareSubKSP(pc,&flg));
122:     /* since EPSSolve() is called outside PCSetUp_HPDDM() and there is not mechanism (yet) to pass the underlying ST, */
123:     /* PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE when using PCHPDDMSetDeflationMat()                     */
124:     PetscCheck(!flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE");
125:   }
126:   PetscCall(VecDestroy(&b));
127:   PetscCall(KSPDestroy(&ksp));
128:   PetscCall(MatDestroy(&A));
129:   PetscCall(SlepcFinalize());
130:   return 0;
131: }

133: /*TEST

135:    build:
136:       requires: hpddm

138:    test:
139:       requires: double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES)
140:       nsize: 4
141:       filter: grep -v "    total: nonzeros=" | grep -v "    rows=" | grep -v "       factor fill ratio given " | grep -v "      using I-node" | sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 5/Linear solve converged due to CONVERGED_RTOL iterations 4/g" -e "s/amount of overlap = 1/user-defined overlap/g"
142:       # similar output as ex76 with -pc_hpddm_levels_eps_nev val instead of just -eps_nev val
143:       args: -ksp_rtol 1e-3 -ksp_max_it 10 -ksp_error_if_not_converged -ksp_converged_reason -ksp_view -ksp_type hpddm -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_define_subdomains {{false true}shared output} -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -eps_nev 10 -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -matload_block_size 2

145: TEST*/