Line data Source code
1 : /*
2 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3 : SLEPc - Scalable Library for Eigenvalue Problem Computations
4 : Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
5 :
6 : This file is part of SLEPc.
7 : SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9 : */
10 :
11 : static char help[] = "Computes a partial GSVD of two matrices from IR Tools example.\n"
12 : "The command line options are:\n"
13 : " -n <n>, where <n> = number of grid subdivisions in x and y dimensions.\n\n";
14 :
15 : #include <slepcsvd.h>
16 :
17 : /* LookUp: returns an index i such that X(i) <= y < X(i+1), where X = linspace(0,1,N).
18 : Only elements start..end-1 are considered */
19 5130 : PetscErrorCode LookUp(PetscInt N,PetscInt start,PetscInt end,PetscReal y,PetscInt *i)
20 : {
21 5130 : PetscInt n=end-start,j=n/2;
22 5130 : PetscReal h=1.0/(N-1);
23 :
24 5130 : PetscFunctionBeginUser;
25 5130 : if (y<(start+j)*h) PetscCall(LookUp(N,start,start+j,y,i));
26 2955 : else if (y<(start+j+1)*h) *i = start+j;
27 1419 : else PetscCall(LookUp(N,start+j,end,y,i));
28 5130 : PetscFunctionReturn(PETSC_SUCCESS);
29 : }
30 :
31 : /*
32 : PermuteMatrices - Symmetric permutation of A using MatPartitioning, then permute
33 : columns of B in the same way.
34 : */
35 2 : PetscErrorCode PermuteMatrices(Mat *A,Mat *B)
36 : {
37 2 : MatPartitioning part;
38 2 : IS isn,is,id;
39 2 : PetscInt *nlocal,Istart,Iend;
40 2 : PetscMPIInt size,rank;
41 2 : MPI_Comm comm;
42 2 : Mat in=*A,out;
43 :
44 2 : PetscFunctionBegin;
45 2 : PetscCall(PetscObjectGetComm((PetscObject)in,&comm));
46 2 : PetscCallMPI(MPI_Comm_size(comm,&size));
47 2 : PetscCallMPI(MPI_Comm_rank(comm,&rank));
48 2 : PetscCall(MatPartitioningCreate(comm,&part));
49 2 : PetscCall(MatPartitioningSetAdjacency(part,in));
50 2 : PetscCall(MatPartitioningSetFromOptions(part));
51 2 : PetscCall(MatPartitioningApply(part,&is)); /* get owner of each vertex */
52 2 : PetscCall(ISPartitioningToNumbering(is,&isn)); /* get new global number of each vertex */
53 2 : PetscCall(PetscMalloc1(size,&nlocal));
54 2 : PetscCall(ISPartitioningCount(is,size,nlocal)); /* count vertices assigned to each process */
55 2 : PetscCall(ISDestroy(&is));
56 :
57 : /* get old global number of each new global number */
58 2 : PetscCall(ISInvertPermutation(isn,nlocal[rank],&is));
59 2 : PetscCall(PetscFree(nlocal));
60 2 : PetscCall(ISDestroy(&isn));
61 2 : PetscCall(MatPartitioningDestroy(&part));
62 :
63 : /* symmetric permutation of A */
64 2 : PetscCall(MatCreateSubMatrix(in,is,is,MAT_INITIAL_MATRIX,&out));
65 2 : PetscCall(MatDestroy(A));
66 2 : *A = out;
67 :
68 : /* column permutation of B */
69 2 : PetscCall(MatGetOwnershipRange(*B,&Istart,&Iend));
70 2 : PetscCall(ISCreateStride(comm,Iend-Istart,Istart,1,&id));
71 2 : PetscCall(ISSetIdentity(id));
72 2 : PetscCall(MatCreateSubMatrix(*B,id,is,MAT_INITIAL_MATRIX,&out));
73 2 : PetscCall(MatDestroy(B));
74 2 : *B = out;
75 2 : PetscCall(ISDestroy(&id));
76 2 : PetscCall(ISDestroy(&is));
77 2 : PetscFunctionReturn(PETSC_SUCCESS);
78 : }
79 :
80 4 : int main(int argc,char **argv)
81 : {
82 4 : Mat A,B; /* operator matrices */
83 4 : SVD svd; /* singular value problem solver context */
84 4 : KSP ksp;
85 4 : PetscInt n=32,N,i,i2,j,k,xidx,yidx,bl,Istart,Iend,col[3];
86 4 : PetscScalar vals[] = { 1, -2, 1 },X,Y;
87 4 : PetscBool flg,terse,permute=PETSC_FALSE;
88 4 : PetscRandom rctx;
89 :
90 4 : PetscFunctionBeginUser;
91 4 : PetscCall(SlepcInitialize(&argc,&argv,NULL,help));
92 :
93 4 : PetscCall(PetscOptionsGetInt(NULL,NULL,"-n",&n,&flg));
94 4 : N = n*n;
95 4 : PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nGSVD of inverse interpolation problem, (%" PetscInt_FMT "+%" PetscInt_FMT ")x%" PetscInt_FMT "\n\n",N,2*N,N));
96 :
97 : /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98 : Build the matrices
99 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100 :
101 4 : PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rctx));
102 4 : PetscCall(PetscRandomSetInterval(rctx,0,1));
103 4 : PetscCall(PetscRandomSetFromOptions(rctx));
104 :
105 4 : PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
106 4 : PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,N,N));
107 4 : PetscCall(MatSetFromOptions(A));
108 4 : PetscCall(MatGetOwnershipRange(A,&Istart,&Iend));
109 :
110 : /* make sure that the matrix is the same irrespective of the number of MPI processes */
111 4 : PetscCall(PetscRandomSetSeed(rctx,0x12345678));
112 4 : PetscCall(PetscRandomSeed(rctx));
113 132 : for (k=0;k<Istart;k++) {
114 128 : PetscCall(PetscRandomGetValue(rctx,&X));
115 128 : PetscCall(PetscRandomGetValue(rctx,&Y));
116 : }
117 :
118 772 : for (k=0;k<Iend-Istart;k++) {
119 768 : PetscCall(PetscRandomGetValue(rctx,&X));
120 768 : PetscCall(LookUp(n,0,n,PetscRealPart(X),&xidx));
121 768 : X = X*(n-1)-xidx; /* scale value to a 1-spaced grid */
122 768 : PetscCall(PetscRandomGetValue(rctx,&Y));
123 768 : PetscCall(LookUp(n,0,n,PetscRealPart(Y),&yidx));
124 768 : Y = Y*(n-1)-yidx; /* scale value to a 1-spaced grid */
125 13056 : for (j=0;j<n;j++) {
126 208896 : for (i=0;i<n;i++) {
127 196608 : if (i<n-1 && j<n-1 && xidx==j && yidx==i) PetscCall(MatSetValue(A,Istart+k,i+j*n,1.0-X-Y+X*Y,ADD_VALUES));
128 196608 : if (i<n-1 && j>0 && xidx==j-1 && yidx==i) PetscCall(MatSetValue(A,Istart+k,i+j*n,X-X*Y,ADD_VALUES));
129 196608 : if (i>0 && j<n-1 && xidx==j && yidx==i-1) PetscCall(MatSetValue(A,Istart+k,i+j*n,Y-X*Y,ADD_VALUES));
130 196608 : if (i>0 && j>0 && xidx==j-1 && yidx==i-1) PetscCall(MatSetValue(A,Istart+k,i+j*n,X*Y,ADD_VALUES));
131 : }
132 : }
133 : }
134 4 : PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
135 4 : PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
136 4 : PetscCall(PetscRandomDestroy(&rctx));
137 :
138 4 : PetscCall(MatCreate(PETSC_COMM_WORLD,&B));
139 4 : PetscCall(MatSetSizes(B,PETSC_DECIDE,PETSC_DECIDE,2*N,N));
140 4 : PetscCall(MatSetFromOptions(B));
141 :
142 772 : for (i=Istart;i<Iend;i++) {
143 : /* upper block: kron(speye(n),T1) where T1 is tridiagonal */
144 768 : i2 = i+Istart;
145 768 : if (i%n==0) PetscCall(MatSetValue(B,i2,i,1.0,INSERT_VALUES));
146 720 : else if (i%n==n-1) {
147 48 : PetscCall(MatSetValue(B,i2,i-1,-1.0,INSERT_VALUES));
148 48 : PetscCall(MatSetValue(B,i2,i,1.0,INSERT_VALUES));
149 : } else {
150 672 : col[0]=i-1; col[1]=i; col[2]=i+1;
151 672 : PetscCall(MatSetValues(B,1,&i2,3,col,vals,INSERT_VALUES));
152 : }
153 : /* lower block: kron(T2,speye(n)) where T2 is tridiagonal */
154 768 : i2 = i+Iend;
155 768 : bl = i/n; /* index of block */
156 768 : j = i-bl*n; /* index within block */
157 768 : if (bl==0 || bl==n-1) PetscCall(MatSetValue(B,i2,i,1.0,INSERT_VALUES));
158 : else {
159 672 : col[0]=i-n; col[1]=i; col[2]=i+n;
160 768 : PetscCall(MatSetValues(B,1,&i2,3,col,vals,INSERT_VALUES));
161 : }
162 : }
163 4 : PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
164 4 : PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
165 :
166 4 : PetscCall(PetscOptionsGetBool(NULL,NULL,"-permute",&permute,NULL));
167 4 : if (permute) {
168 2 : PetscCall(PetscPrintf(PETSC_COMM_WORLD," Permuting to optimize parallel matvec\n"));
169 2 : PetscCall(PermuteMatrices(&A,&B));
170 : }
171 :
172 : /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173 : Create the singular value solver and set various options
174 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175 :
176 4 : PetscCall(SVDCreate(PETSC_COMM_WORLD,&svd));
177 4 : PetscCall(SVDSetOperators(svd,A,B));
178 4 : PetscCall(SVDSetProblemType(svd,SVD_GENERALIZED));
179 :
180 4 : PetscCall(SVDSetType(svd,SVDTRLANCZOS));
181 4 : PetscCall(SVDSetDimensions(svd,6,PETSC_DETERMINE,PETSC_DETERMINE));
182 4 : PetscCall(SVDTRLanczosSetExplicitMatrix(svd,PETSC_TRUE));
183 4 : PetscCall(SVDTRLanczosSetScale(svd,-10));
184 4 : PetscCall(SVDTRLanczosGetKSP(svd,&ksp));
185 4 : PetscCall(KSPSetTolerances(ksp,1e-12,PETSC_CURRENT,PETSC_CURRENT,PETSC_CURRENT));
186 :
187 4 : PetscCall(SVDSetFromOptions(svd));
188 :
189 : /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190 : Solve the problem and print solution
191 : - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
192 :
193 4 : PetscCall(SVDSolve(svd));
194 :
195 : /* show detailed info unless -terse option is given by user */
196 4 : PetscCall(PetscOptionsHasName(NULL,NULL,"-terse",&terse));
197 4 : if (terse) PetscCall(SVDErrorView(svd,SVD_ERROR_NORM,NULL));
198 : else {
199 0 : PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL));
200 0 : PetscCall(SVDConvergedReasonView(svd,PETSC_VIEWER_STDOUT_WORLD));
201 0 : PetscCall(SVDErrorView(svd,SVD_ERROR_NORM,PETSC_VIEWER_STDOUT_WORLD));
202 0 : PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
203 : }
204 4 : PetscCall(SVDDestroy(&svd));
205 4 : PetscCall(MatDestroy(&A));
206 4 : PetscCall(MatDestroy(&B));
207 4 : PetscCall(SlepcFinalize());
208 : return 0;
209 : }
210 :
211 : /*TEST
212 :
213 : testset:
214 : args: -n 16 -terse
215 : requires: double
216 : output_file: output/ex51_1.out
217 : test:
218 : args: -svd_trlanczos_gbidiag {{upper lower}} -svd_trlanczos_oneside
219 : suffix: 1
220 : test:
221 : suffix: 2
222 : nsize: 2
223 : args: -permute
224 : filter: grep -v Permuting
225 :
226 : TEST*/
|