Line data Source code
1 : /*
2 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3 : SLEPc - Scalable Library for Eigenvalue Problem Computations
4 : Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
5 :
6 : This file is part of SLEPc.
7 : SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9 : */
10 :
11 : static char help[] = "Solves a linear system using PCHPDDM.\n"
12 : "Modification of ${PETSC_DIR}/src/ksp/ksp/tutorials/ex76.c where concurrent EPS are instantiated explicitly by the user.\n\n";
13 :
14 : #include <slepceps.h>
15 :
16 8 : int main(int argc,char **args)
17 : {
18 8 : Mat A,aux,a,P,B,X;
19 8 : Vec b;
20 8 : KSP ksp;
21 8 : PC pc;
22 8 : EPS eps;
23 8 : ST st;
24 8 : IS is,sizes;
25 8 : const PetscInt *idx;
26 8 : PetscInt m,rstart,rend,location,nev,nconv;
27 8 : PetscMPIInt rank,size;
28 8 : PetscViewer viewer;
29 8 : char dir[PETSC_MAX_PATH_LEN],name[PETSC_MAX_PATH_LEN];
30 8 : PetscBool flg;
31 :
32 8 : PetscFunctionBeginUser;
33 8 : PetscCall(SlepcInitialize(&argc,&args,NULL,help));
34 8 : PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
35 8 : PetscCheck(size == 4,PETSC_COMM_WORLD,PETSC_ERR_USER,"This example requires 4 processes");
36 8 : PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
37 8 : PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
38 8 : PetscCall(MatCreate(PETSC_COMM_SELF,&aux));
39 8 : PetscCall(ISCreate(PETSC_COMM_SELF,&is));
40 8 : PetscCall(PetscStrcpy(dir,"."));
41 8 : PetscCall(PetscOptionsGetString(NULL,NULL,"-load_dir",dir,sizeof(dir),NULL));
42 : /* loading matrices */
43 8 : PetscCall(PetscSNPrintf(name,sizeof(name),"%s/sizes_%d_%d.dat",dir,rank,size));
44 8 : PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer));
45 8 : PetscCall(ISCreate(PETSC_COMM_SELF,&sizes));
46 8 : PetscCall(ISLoad(sizes,viewer));
47 8 : PetscCall(ISGetIndices(sizes,&idx));
48 8 : PetscCall(MatSetSizes(A,idx[0],idx[1],idx[2],idx[3]));
49 8 : PetscCall(ISRestoreIndices(sizes,&idx));
50 8 : PetscCall(ISDestroy(&sizes));
51 8 : PetscCall(PetscViewerDestroy(&viewer));
52 8 : PetscCall(PetscSNPrintf(name,sizeof(name),"%s/A.dat",dir));
53 8 : PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer));
54 8 : PetscCall(MatLoad(A,viewer));
55 8 : PetscCall(PetscViewerDestroy(&viewer));
56 8 : PetscCall(PetscSNPrintf(name,sizeof(name),"%s/is_%d_%d.dat",dir,rank,size));
57 8 : PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer));
58 8 : PetscCall(ISLoad(is,viewer));
59 8 : PetscCall(PetscViewerDestroy(&viewer));
60 8 : PetscCall(ISSetBlockSize(is,2));
61 8 : PetscCall(PetscSNPrintf(name,sizeof(name),"%s/Neumann_%d_%d.dat",dir,rank,size));
62 8 : PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer));
63 8 : PetscCall(MatLoad(aux,viewer));
64 8 : PetscCall(PetscViewerDestroy(&viewer));
65 8 : PetscCall(MatSetBlockSizesFromMats(aux,A,A));
66 8 : PetscCall(MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE));
67 8 : PetscCall(MatSetOption(aux,MAT_SYMMETRIC,PETSC_TRUE));
68 : /* ready for testing */
69 8 : PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp));
70 8 : PetscCall(KSPSetOperators(ksp,A,A));
71 8 : PetscCall(KSPGetPC(ksp,&pc));
72 8 : PetscCall(PCSetType(pc,PCHPDDM));
73 8 : PetscCall(MatDuplicate(aux,MAT_DO_NOT_COPY_VALUES,&B)); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
74 8 : PetscCall(MatGetDiagonalBlock(A,&a));
75 8 : PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
76 8 : PetscCall(ISGetLocalSize(is,&m));
77 8 : PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,rend-rstart,m,1,NULL,&P));
78 3452 : for (m = rstart; m < rend; ++m) {
79 3444 : PetscCall(ISLocate(is,m,&location));
80 3444 : PetscCheck(location >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"IS of the auxiliary Mat does not include all local rows of A");
81 3444 : PetscCall(MatSetValue(P,m-rstart,location,1.0,INSERT_VALUES));
82 : }
83 8 : PetscCall(MatAssemblyBegin(P,MAT_FINAL_ASSEMBLY));
84 8 : PetscCall(MatAssemblyEnd(P,MAT_FINAL_ASSEMBLY));
85 8 : PetscCall(MatPtAP(a,P,MAT_INITIAL_MATRIX,1.0,&X));
86 8 : PetscCall(MatDestroy(&P));
87 8 : PetscCall(MatAXPY(B,1.0,X,SUBSET_NONZERO_PATTERN));
88 8 : PetscCall(MatDestroy(&X));
89 8 : PetscCall(MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE));
90 8 : PetscCall(EPSCreate(PETSC_COMM_SELF,&eps));
91 8 : PetscCall(EPSSetOperators(eps,aux,B));
92 8 : PetscCall(EPSSetProblemType(eps,EPS_GHEP));
93 8 : PetscCall(EPSSetTarget(eps,0.0));
94 8 : PetscCall(EPSSetWhichEigenpairs(eps,EPS_TARGET_MAGNITUDE));
95 8 : PetscCall(EPSGetST(eps,&st));
96 8 : PetscCall(STSetType(st,STSINVERT));
97 8 : PetscCall(EPSSetFromOptions(eps));
98 8 : PetscCall(EPSGetDimensions(eps,&nev,NULL,NULL));
99 8 : PetscCall(EPSSolve(eps));
100 8 : PetscCall(EPSGetConverged(eps,&nconv));
101 8 : nev = PetscMin(nev,nconv);
102 8 : PetscCall(ISGetLocalSize(is,&m));
103 8 : PetscCall(MatCreateSeqDense(PETSC_COMM_SELF,m,nev,NULL,&P));
104 88 : for (m = 0; m < nev; ++m) {
105 80 : PetscCall(MatDenseGetColumnVecWrite(P,m,&b));
106 80 : PetscCall(EPSGetEigenvector(eps,m,b,NULL));
107 80 : PetscCall(MatDenseRestoreColumnVecWrite(P,m,&b));
108 : }
109 8 : PetscCall(EPSDestroy(&eps));
110 8 : PetscCall(MatDestroy(&B));
111 8 : PetscCall(PCHPDDMSetDeflationMat(pc,is,P));
112 8 : PetscCall(MatDestroy(&P));
113 8 : PetscCall(MatDestroy(&aux));
114 8 : PetscCall(KSPSetFromOptions(ksp));
115 8 : PetscCall(ISDestroy(&is));
116 8 : PetscCall(MatCreateVecs(A,NULL,&b));
117 8 : PetscCall(VecSet(b,1.0));
118 8 : PetscCall(KSPSolve(ksp,b,b));
119 8 : PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCHPDDM,&flg));
120 8 : if (flg) {
121 8 : PetscCall(PCHPDDMGetSTShareSubKSP(pc,&flg));
122 : /* since EPSSolve() is called outside PCSetUp_HPDDM() and there is not mechanism (yet) to pass the underlying ST, */
123 : /* PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE when using PCHPDDMSetDeflationMat() */
124 8 : PetscCheck(!flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE");
125 : }
126 8 : PetscCall(VecDestroy(&b));
127 8 : PetscCall(KSPDestroy(&ksp));
128 8 : PetscCall(MatDestroy(&A));
129 8 : PetscCall(SlepcFinalize());
130 : return 0;
131 : }
132 :
133 : /*TEST
134 :
135 : build:
136 : requires: hpddm
137 :
138 : test:
139 : requires: double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES)
140 : nsize: 4
141 : filter: grep -v " total: nonzeros=" | grep -v " rows=" | grep -v " factor fill ratio given " | grep -v " using I-node" | sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 5/Linear solve converged due to CONVERGED_RTOL iterations 4/g" -e "s/amount of overlap = 1/user-defined overlap/g"
142 : # similar output as ex76 with -pc_hpddm_levels_eps_nev val instead of just -eps_nev val
143 : args: -ksp_rtol 1e-3 -ksp_max_it 10 -ksp_error_if_not_converged -ksp_converged_reason -ksp_view -ksp_type hpddm -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_define_subdomains {{false true}shared output} -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -eps_nev 10 -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -matload_block_size 2
144 :
145 : TEST*/
|