LCOV - code coverage report
Current view: top level - sys/mat - matutil.c (source / functions) Hit Total Coverage
Test: SLEPc Lines: 227 234 97.0 %
Date: 2024-04-20 00:51:30 Functions: 5 5 100.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*
       2             :    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
       3             :    SLEPc - Scalable Library for Eigenvalue Problem Computations
       4             :    Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
       5             : 
       6             :    This file is part of SLEPc.
       7             :    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
       8             :    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
       9             : */
      10             : 
      11             : #include <slepc/private/slepcimpl.h>            /*I "slepcsys.h" I*/
      12             : 
      13          47 : static PetscErrorCode MatCreateTile_Seq(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
      14             : {
      15          47 :   PetscInt          i,j,M1,M2,N1,N2,ncols,*scols;
      16          47 :   PetscScalar       *svals,*buf;
      17          47 :   const PetscInt    *cols;
      18          47 :   const PetscScalar *vals;
      19             : 
      20          47 :   PetscFunctionBegin;
      21          47 :   PetscCall(MatGetSize(A,&M1,&N1));
      22          47 :   PetscCall(MatGetSize(D,&M2,&N2));
      23             : 
      24          47 :   PetscCall(PetscMalloc2(PetscMax(N1,N2),&buf,PetscMax(N1,N2),&scols));
      25             :   /* Transfer A */
      26          47 :   if (a!=0.0) {
      27        3963 :     for (i=0;i<M1;i++) {
      28        3923 :       PetscCall(MatGetRow(A,i,&ncols,&cols,&vals));
      29        3923 :       if (a!=1.0) {
      30         214 :         svals=buf;
      31        1090 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*a;
      32        3709 :       } else svals=(PetscScalar*)vals;
      33        3923 :       PetscCall(MatSetValues(G,1,&i,ncols,cols,svals,INSERT_VALUES));
      34        3923 :       PetscCall(MatRestoreRow(A,i,&ncols,&cols,&vals));
      35             :     }
      36             :   }
      37             :   /* Transfer B */
      38          47 :   if (b!=0.0) {
      39        3912 :     for (i=0;i<M1;i++) {
      40        3873 :       PetscCall(MatGetRow(B,i,&ncols,&cols,&vals));
      41        3873 :       if (b!=1.0) {
      42         194 :         svals=buf;
      43         916 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*b;
      44        3679 :       } else svals=(PetscScalar*)vals;
      45       18592 :       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
      46        3873 :       PetscCall(MatSetValues(G,1,&i,ncols,scols,svals,INSERT_VALUES));
      47        3873 :       PetscCall(MatRestoreRow(B,i,&ncols,&cols,&vals));
      48             :     }
      49             :   }
      50             :   /* Transfer C */
      51          47 :   if (c!=0.0) {
      52        2379 :     for (i=0;i<M2;i++) {
      53        2341 :       PetscCall(MatGetRow(C,i,&ncols,&cols,&vals));
      54        2341 :       if (c!=1.0) {
      55         424 :         svals=buf;
      56        2192 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*c;
      57        1917 :       } else svals=(PetscScalar*)vals;
      58        2341 :       j = i+M1;
      59        2341 :       PetscCall(MatSetValues(G,1,&j,ncols,cols,svals,INSERT_VALUES));
      60        2341 :       PetscCall(MatRestoreRow(C,i,&ncols,&cols,&vals));
      61             :     }
      62             :   }
      63             :   /* Transfer D */
      64          47 :   if (d!=0.0) {
      65        2912 :     for (i=0;i<M2;i++) {
      66        2869 :       PetscCall(MatGetRow(D,i,&ncols,&cols,&vals));
      67        2869 :       if (d!=1.0) {
      68         712 :         svals=buf;
      69        2142 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*d;
      70        2157 :       } else svals=(PetscScalar*)vals;
      71       11696 :       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
      72        2869 :       j = i+M1;
      73        2869 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
      74        2869 :       PetscCall(MatRestoreRow(D,i,&ncols,&cols,&vals));
      75             :     }
      76             :   }
      77          47 :   PetscCall(PetscFree2(buf,scols));
      78          47 :   PetscFunctionReturn(PETSC_SUCCESS);
      79             : }
      80             : 
      81           6 : static PetscErrorCode MatCreateTile_MPI(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
      82             : {
      83           6 :   PetscMPIInt       np;
      84           6 :   PetscInt          p,i,j,N1,N2,m1,m2,*map1,*map2;
      85           6 :   PetscInt          ncols,*scols,start,gstart;
      86           6 :   PetscScalar       *svals,*buf;
      87           6 :   const PetscInt    *cols,*mapptr1,*mapptr2;
      88           6 :   const PetscScalar *vals;
      89             : 
      90           6 :   PetscFunctionBegin;
      91           6 :   PetscCall(MatGetSize(A,NULL,&N1));
      92           6 :   PetscCall(MatGetLocalSize(A,&m1,NULL));
      93           6 :   PetscCall(MatGetSize(D,NULL,&N2));
      94           6 :   PetscCall(MatGetLocalSize(D,&m2,NULL));
      95             : 
      96             :   /* Create mappings */
      97           6 :   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)G),&np));
      98           6 :   PetscCall(MatGetOwnershipRangesColumn(A,&mapptr1));
      99           6 :   PetscCall(MatGetOwnershipRangesColumn(B,&mapptr2));
     100           6 :   PetscCall(PetscMalloc4(PetscMax(N1,N2),&buf,PetscMax(N1,N2),&scols,N1,&map1,N2,&map2));
     101          18 :   for (p=0;p<np;p++) {
     102         152 :     for (i=mapptr1[p];i<mapptr1[p+1];i++) map1[i] = i+mapptr2[p];
     103             :   }
     104          18 :   for (p=0;p<np;p++) {
     105         152 :     for (i=mapptr2[p];i<mapptr2[p+1];i++) map2[i] = i+mapptr1[p+1];
     106             :   }
     107           6 :   PetscCall(MatGetOwnershipRange(G,&gstart,NULL));
     108             : 
     109             :   /* Transfer A */
     110           6 :   if (a!=0.0) {
     111           4 :     PetscCall(MatGetOwnershipRange(A,&start,NULL));
     112          44 :     for (i=0;i<m1;i++) {
     113          40 :       PetscCall(MatGetRow(A,i+start,&ncols,&cols,&vals));
     114          40 :       if (a!=1.0) {
     115          10 :         svals=buf;
     116          38 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*a;
     117          30 :       } else svals=(PetscScalar*)vals;
     118          98 :       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
     119          40 :       j = gstart+i;
     120          40 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     121          40 :       PetscCall(MatRestoreRow(A,i+start,&ncols,&cols,&vals));
     122             :     }
     123             :   }
     124             :   /* Transfer B */
     125           6 :   if (b!=0.0) {
     126           4 :     PetscCall(MatGetOwnershipRange(B,&start,NULL));
     127          44 :     for (i=0;i<m1;i++) {
     128          40 :       PetscCall(MatGetRow(B,i+start,&ncols,&cols,&vals));
     129          40 :       if (b!=1.0) {
     130          10 :         svals=buf;
     131          20 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*b;
     132          30 :       } else svals=(PetscScalar*)vals;
     133          80 :       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
     134          40 :       j = gstart+i;
     135          40 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     136          40 :       PetscCall(MatRestoreRow(B,i+start,&ncols,&cols,&vals));
     137             :     }
     138             :   }
     139             :   /* Transfer C */
     140           6 :   if (c!=0.0) {
     141           2 :     PetscCall(MatGetOwnershipRange(C,&start,NULL));
     142          32 :     for (i=0;i<m2;i++) {
     143          30 :       PetscCall(MatGetRow(C,i+start,&ncols,&cols,&vals));
     144          30 :       if (c!=1.0) {
     145          30 :         svals=buf;
     146         118 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*c;
     147           0 :       } else svals=(PetscScalar*)vals;
     148         118 :       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
     149          30 :       j = gstart+m1+i;
     150          30 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     151          30 :       PetscCall(MatRestoreRow(C,i+start,&ncols,&cols,&vals));
     152             :     }
     153             :   }
     154             :   /* Transfer D */
     155           6 :   if (d!=0.0) {
     156           6 :     PetscCall(MatGetOwnershipRange(D,&start,NULL));
     157          76 :     for (i=0;i<m2;i++) {
     158          70 :       PetscCall(MatGetRow(D,i+start,&ncols,&cols,&vals));
     159          70 :       if (d!=1.0) {
     160          40 :         svals=buf;
     161         156 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*d;
     162          30 :       } else svals=(PetscScalar*)vals;
     163         216 :       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
     164          70 :       j = gstart+m1+i;
     165          70 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     166          70 :       PetscCall(MatRestoreRow(D,i+start,&ncols,&cols,&vals));
     167             :     }
     168             :   }
     169           6 :   PetscCall(PetscFree4(buf,scols,map1,map2));
     170           6 :   PetscFunctionReturn(PETSC_SUCCESS);
     171             : }
     172             : 
     173             : /*@
     174             :    MatCreateTile - Explicitly build a matrix from four blocks, G = [ a*A b*B; c*C d*D ].
     175             : 
     176             :    Collective
     177             : 
     178             :    Input Parameters:
     179             : +  A - matrix for top-left block
     180             : .  a - scaling factor for block A
     181             : .  B - matrix for top-right block
     182             : .  b - scaling factor for block B
     183             : .  C - matrix for bottom-left block
     184             : .  c - scaling factor for block C
     185             : .  D - matrix for bottom-right block
     186             : -  d - scaling factor for block D
     187             : 
     188             :    Output Parameter:
     189             : .  G  - the resulting matrix
     190             : 
     191             :    Notes:
     192             :    In the case of a parallel matrix, a permuted version of G is returned. The permutation
     193             :    is a perfect shuffle such that the local parts of A, B, C, D remain in the local part of
     194             :    G for the same process.
     195             : 
     196             :    Matrix G must be destroyed by the user.
     197             : 
     198             :    The blocks can be of different type. They can be either ConstantDiagonal, or a standard
     199             :    type such as AIJ, or any other type provided that it supports the MatGetRow operation.
     200             :    The type of the output matrix will be the same as the first block that is not
     201             :    ConstantDiagonal (checked in the A,B,C,D order).
     202             : 
     203             :    Level: developer
     204             : 
     205             : .seealso: MatCreateNest()
     206             : @*/
     207          53 : PetscErrorCode MatCreateTile(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat *G)
     208             : {
     209          53 :   PetscInt       i,k,M1,M2,N1,N2,M,N,m1,m2,n1,n2,m,n,bs;
     210          53 :   PetscBool      diag[4];
     211          53 :   Mat            block[4] = {A,B,C,D};
     212          53 :   MatType        type[4];
     213          53 :   PetscMPIInt    size;
     214             : 
     215          53 :   PetscFunctionBegin;
     216          53 :   PetscValidHeaderSpecific(A,MAT_CLASSID,2);
     217          53 :   PetscValidHeaderSpecific(B,MAT_CLASSID,4);
     218          53 :   PetscValidHeaderSpecific(C,MAT_CLASSID,6);
     219          53 :   PetscValidHeaderSpecific(D,MAT_CLASSID,8);
     220          53 :   PetscCheckSameTypeAndComm(A,2,B,4);
     221          53 :   PetscCheckSameTypeAndComm(A,2,C,6);
     222          53 :   PetscCheckSameTypeAndComm(A,2,D,8);
     223         212 :   PetscValidLogicalCollectiveScalar(A,a,1);
     224         212 :   PetscValidLogicalCollectiveScalar(A,b,3);
     225         212 :   PetscValidLogicalCollectiveScalar(A,c,5);
     226         212 :   PetscValidLogicalCollectiveScalar(A,d,7);
     227          53 :   PetscAssertPointer(G,9);
     228             : 
     229             :   /* check row 1 */
     230          53 :   PetscCall(MatGetSize(A,&M1,NULL));
     231          53 :   PetscCall(MatGetLocalSize(A,&m1,NULL));
     232          53 :   PetscCall(MatGetSize(B,&M,NULL));
     233          53 :   PetscCall(MatGetLocalSize(B,&m,NULL));
     234          53 :   PetscCheck(M==M1 && m==m1,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     235             :   /* check row 2 */
     236          53 :   PetscCall(MatGetSize(C,&M2,NULL));
     237          53 :   PetscCall(MatGetLocalSize(C,&m2,NULL));
     238          53 :   PetscCall(MatGetSize(D,&M,NULL));
     239          53 :   PetscCall(MatGetLocalSize(D,&m,NULL));
     240          53 :   PetscCheck(M==M2 && m==m2,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     241             :   /* check column 1 */
     242          53 :   PetscCall(MatGetSize(A,NULL,&N1));
     243          53 :   PetscCall(MatGetLocalSize(A,NULL,&n1));
     244          53 :   PetscCall(MatGetSize(C,NULL,&N));
     245          53 :   PetscCall(MatGetLocalSize(C,NULL,&n));
     246          53 :   PetscCheck(N==N1 && n==n1,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     247             :   /* check column 2 */
     248          53 :   PetscCall(MatGetSize(B,NULL,&N2));
     249          53 :   PetscCall(MatGetLocalSize(B,NULL,&n2));
     250          53 :   PetscCall(MatGetSize(D,NULL,&N));
     251          53 :   PetscCall(MatGetLocalSize(D,NULL,&n));
     252          53 :   PetscCheck(N==N2 && n==n2,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     253             : 
     254             :   /* check matrix types */
     255         265 :   for (i=0;i<4;i++) {
     256         212 :     PetscCall(MatGetType(block[i],&type[i]));
     257         212 :     PetscCall(PetscStrcmp(type[i],MATCONSTANTDIAGONAL,&diag[i]));
     258             :   }
     259          65 :   for (k=0;k<4;k++) if (!diag[k]) break;
     260          53 :   PetscCheck(k<4,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not implemented for 4 diagonal blocks");
     261             : 
     262          53 :   PetscCall(MatGetBlockSize(block[k],&bs));
     263          53 :   PetscCall(MatCreate(PetscObjectComm((PetscObject)block[k]),G));
     264          53 :   PetscCall(MatSetSizes(*G,m1+m2,n1+n2,M1+M2,N1+N2));
     265          53 :   PetscCall(MatSetType(*G,type[k]));
     266          53 :   PetscCall(MatSetBlockSize(*G,bs));
     267          53 :   PetscCall(MatSetUp(*G));
     268             : 
     269          53 :   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)*G),&size));
     270          53 :   if (size>1) PetscCall(MatCreateTile_MPI(a,A,b,B,c,C,d,D,*G));
     271          47 :   else PetscCall(MatCreateTile_Seq(a,A,b,B,c,C,d,D,*G));
     272          53 :   PetscCall(MatAssemblyBegin(*G,MAT_FINAL_ASSEMBLY));
     273          53 :   PetscCall(MatAssemblyEnd(*G,MAT_FINAL_ASSEMBLY));
     274          53 :   PetscFunctionReturn(PETSC_SUCCESS);
     275             : }
     276             : 
     277             : /*@C
     278             :    MatCreateVecsEmpty - Get vector(s) compatible with the matrix, i.e. with the same
     279             :    parallel layout, but without internal array.
     280             : 
     281             :    Collective
     282             : 
     283             :    Input Parameter:
     284             : .  mat - the matrix
     285             : 
     286             :    Output Parameters:
     287             : +  right - (optional) vector that the matrix can be multiplied against
     288             : -  left - (optional) vector that the matrix vector product can be stored in
     289             : 
     290             :    Note:
     291             :    This is similar to MatCreateVecs(), but the new vectors do not have an internal
     292             :    array, so the intended usage is with VecPlaceArray().
     293             : 
     294             :    Level: developer
     295             : 
     296             : .seealso: VecDuplicateEmpty()
     297             : @*/
     298        3474 : PetscErrorCode MatCreateVecsEmpty(Mat mat,Vec *right,Vec *left)
     299             : {
     300        3474 :   PetscBool      standard,cuda=PETSC_FALSE,skip=PETSC_FALSE;
     301        3474 :   PetscInt       M,N,mloc,nloc,rbs,cbs;
     302        3474 :   PetscMPIInt    size;
     303        3474 :   Vec            v;
     304             : 
     305        3474 :   PetscFunctionBegin;
     306        3474 :   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
     307        3474 :   PetscValidType(mat,1);
     308             : 
     309        3474 :   PetscCall(PetscObjectTypeCompareAny((PetscObject)mat,&standard,MATSEQAIJ,MATMPIAIJ,MATSEQBAIJ,MATMPIBAIJ,MATSEQSBAIJ,MATMPISBAIJ,MATSEQDENSE,MATMPIDENSE,""));
     310        3474 :   PetscCall(PetscObjectTypeCompareAny((PetscObject)mat,&cuda,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,""));
     311        3474 :   if (!standard && !cuda) {
     312         238 :     PetscCall(MatCreateVecs(mat,right,left));
     313         238 :     v = right? *right: *left;
     314         238 :     if (v) {
     315         238 :       PetscCall(PetscObjectTypeCompareAny((PetscObject)v,&standard,VECSEQ,VECMPI,""));
     316         238 :       PetscCall(PetscObjectTypeCompareAny((PetscObject)v,&cuda,VECSEQCUDA,VECMPICUDA,""));
     317             :     }
     318         238 :     if (!standard && !cuda) skip = PETSC_TRUE;
     319             :     else {
     320         238 :       if (right) PetscCall(VecDestroy(right));
     321         238 :       if (left) PetscCall(VecDestroy(left));
     322             :     }
     323             :   }
     324             :   if (!skip) {
     325        3474 :     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
     326        3474 :     PetscCall(MatGetLocalSize(mat,&mloc,&nloc));
     327        3474 :     PetscCall(MatGetSize(mat,&M,&N));
     328        3474 :     PetscCall(MatGetBlockSizes(mat,&rbs,&cbs));
     329        3474 :     if (right) {
     330        1855 :       if (cuda) {
     331             : #if defined(PETSC_HAVE_CUDA)
     332             :         if (size>1) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)mat),cbs,nloc,N,NULL,right));
     333             :         else PetscCall(VecCreateSeqCUDAWithArray(PetscObjectComm((PetscObject)mat),cbs,N,NULL,right));
     334             : #endif
     335             :       } else {
     336        1855 :         if (size>1) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),cbs,nloc,N,NULL,right));
     337        1592 :         else PetscCall(VecCreateSeqWithArray(PetscObjectComm((PetscObject)mat),cbs,N,NULL,right));
     338             :       }
     339             :     }
     340        3474 :     if (left) {
     341        1821 :       if (cuda) {
     342             : #if defined(PETSC_HAVE_CUDA)
     343             :         if (size>1) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)mat),rbs,mloc,M,NULL,left));
     344             :         else PetscCall(VecCreateSeqCUDAWithArray(PetscObjectComm((PetscObject)mat),rbs,M,NULL,left));
     345             : #endif
     346             :       } else {
     347        1821 :         if (size>1) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),rbs,mloc,M,NULL,left));
     348        1635 :         else PetscCall(VecCreateSeqWithArray(PetscObjectComm((PetscObject)mat),rbs,M,NULL,left));
     349             :       }
     350             :     }
     351             :   }
     352        3474 :   PetscFunctionReturn(PETSC_SUCCESS);
     353             : }
     354             : 
     355             : /*@C
     356             :    MatNormEstimate - Estimate the 2-norm of a matrix.
     357             : 
     358             :    Collective
     359             : 
     360             :    Input Parameters:
     361             : +  A   - the matrix
     362             : .  vrn - random vector with normally distributed entries (can be NULL)
     363             : -  w   - workspace vector (can be NULL)
     364             : 
     365             :    Output Parameter:
     366             : .  nrm - the norm estimate
     367             : 
     368             :    Notes:
     369             :    Does not need access to the matrix entries, just performs a matrix-vector product.
     370             :    Based on work by I. Ipsen and coworkers https://ipsen.math.ncsu.edu/ps/slides_ima.pdf
     371             : 
     372             :    The input vector vrn must have unit 2-norm.
     373             :    If vrn is NULL, then it is created internally and filled with VecSetRandomNormal().
     374             : 
     375             :    Level: developer
     376             : 
     377             : .seealso: VecSetRandomNormal()
     378             : @*/
     379          69 : PetscErrorCode MatNormEstimate(Mat A,Vec vrn,Vec w,PetscReal *nrm)
     380             : {
     381          69 :   PetscInt       n;
     382          69 :   Vec            vv=NULL,ww=NULL;
     383             : 
     384          69 :   PetscFunctionBegin;
     385          69 :   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
     386          69 :   PetscValidType(A,1);
     387          69 :   if (vrn) PetscValidHeaderSpecific(vrn,VEC_CLASSID,2);
     388          69 :   if (w) PetscValidHeaderSpecific(w,VEC_CLASSID,3);
     389          69 :   PetscAssertPointer(nrm,4);
     390             : 
     391          69 :   if (!vrn) {
     392           0 :     PetscCall(MatCreateVecs(A,&vv,NULL));
     393           0 :     vrn = vv;
     394           0 :     PetscCall(VecSetRandomNormal(vv,NULL,NULL,NULL));
     395           0 :     PetscCall(VecNormalize(vv,NULL));
     396             :   }
     397          69 :   if (!w) {
     398           0 :     PetscCall(MatCreateVecs(A,&ww,NULL));
     399           0 :     w = ww;
     400             :   }
     401             : 
     402          69 :   PetscCall(MatGetSize(A,&n,NULL));
     403          69 :   PetscCall(MatMult(A,vrn,w));
     404          69 :   PetscCall(VecNorm(w,NORM_2,nrm));
     405          69 :   *nrm *= PetscSqrtReal((PetscReal)n);
     406             : 
     407          69 :   PetscCall(VecDestroy(&vv));
     408          69 :   PetscCall(VecDestroy(&ww));
     409          69 :   PetscFunctionReturn(PETSC_SUCCESS);
     410             : }

Generated by: LCOV version 1.14