LCOV - code coverage report
Current view: top level - sys/mat - matutil.c (source / functions) Hit Total Coverage
Test: SLEPc Lines: 227 234 97.0 %
Date: 2024-04-24 00:34:25 Functions: 5 5 100.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*
       2             :    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
       3             :    SLEPc - Scalable Library for Eigenvalue Problem Computations
       4             :    Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
       5             : 
       6             :    This file is part of SLEPc.
       7             :    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
       8             :    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
       9             : */
      10             : 
      11             : #include <slepc/private/slepcimpl.h>            /*I "slepcsys.h" I*/
      12             : 
      13          43 : static PetscErrorCode MatCreateTile_Seq(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
      14             : {
      15          43 :   PetscInt          i,j,M1,M2,N1,N2,ncols,*scols;
      16          43 :   PetscScalar       *svals,*buf;
      17          43 :   const PetscInt    *cols;
      18          43 :   const PetscScalar *vals;
      19             : 
      20          43 :   PetscFunctionBegin;
      21          43 :   PetscCall(MatGetSize(A,&M1,&N1));
      22          43 :   PetscCall(MatGetSize(D,&M2,&N2));
      23             : 
      24          43 :   PetscCall(PetscMalloc2(PetscMax(N1,N2),&buf,PetscMax(N1,N2),&scols));
      25             :   /* Transfer A */
      26          43 :   if (a!=0.0) {
      27        4165 :     for (i=0;i<M1;i++) {
      28        4129 :       PetscCall(MatGetRow(A,i,&ncols,&cols,&vals));
      29        4129 :       if (a!=1.0) {
      30         214 :         svals=buf;
      31        1090 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*a;
      32        3915 :       } else svals=(PetscScalar*)vals;
      33        4129 :       PetscCall(MatSetValues(G,1,&i,ncols,cols,svals,INSERT_VALUES));
      34        4129 :       PetscCall(MatRestoreRow(A,i,&ncols,&cols,&vals));
      35             :     }
      36             :   }
      37             :   /* Transfer B */
      38          43 :   if (b!=0.0) {
      39        4114 :     for (i=0;i<M1;i++) {
      40        4079 :       PetscCall(MatGetRow(B,i,&ncols,&cols,&vals));
      41        4079 :       if (b!=1.0) {
      42         194 :         svals=buf;
      43         916 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*b;
      44        3885 :       } else svals=(PetscScalar*)vals;
      45       70210 :       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
      46        4079 :       PetscCall(MatSetValues(G,1,&i,ncols,scols,svals,INSERT_VALUES));
      47        4079 :       PetscCall(MatRestoreRow(B,i,&ncols,&cols,&vals));
      48             :     }
      49             :   }
      50             :   /* Transfer C */
      51          43 :   if (c!=0.0) {
      52        2580 :     for (i=0;i<M2;i++) {
      53        2546 :       PetscCall(MatGetRow(C,i,&ncols,&cols,&vals));
      54        2546 :       if (c!=1.0) {
      55         424 :         svals=buf;
      56        2192 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*c;
      57        2122 :       } else svals=(PetscScalar*)vals;
      58        2546 :       j = i+M1;
      59        2546 :       PetscCall(MatSetValues(G,1,&j,ncols,cols,svals,INSERT_VALUES));
      60        2546 :       PetscCall(MatRestoreRow(C,i,&ncols,&cols,&vals));
      61             :     }
      62             :   }
      63             :   /* Transfer D */
      64          43 :   if (d!=0.0) {
      65        3113 :     for (i=0;i<M2;i++) {
      66        3074 :       PetscCall(MatGetRow(D,i,&ncols,&cols,&vals));
      67        3074 :       if (d!=1.0) {
      68         712 :         svals=buf;
      69        2142 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*d;
      70        2362 :       } else svals=(PetscScalar*)vals;
      71        8790 :       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
      72        3074 :       j = i+M1;
      73        3074 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
      74        3074 :       PetscCall(MatRestoreRow(D,i,&ncols,&cols,&vals));
      75             :     }
      76             :   }
      77          43 :   PetscCall(PetscFree2(buf,scols));
      78          43 :   PetscFunctionReturn(PETSC_SUCCESS);
      79             : }
      80             : 
      81           6 : static PetscErrorCode MatCreateTile_MPI(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
      82             : {
      83           6 :   PetscMPIInt       np;
      84           6 :   PetscInt          p,i,j,N1,N2,m1,m2,*map1,*map2;
      85           6 :   PetscInt          ncols,*scols,start,gstart;
      86           6 :   PetscScalar       *svals,*buf;
      87           6 :   const PetscInt    *cols,*mapptr1,*mapptr2;
      88           6 :   const PetscScalar *vals;
      89             : 
      90           6 :   PetscFunctionBegin;
      91           6 :   PetscCall(MatGetSize(A,NULL,&N1));
      92           6 :   PetscCall(MatGetLocalSize(A,&m1,NULL));
      93           6 :   PetscCall(MatGetSize(D,NULL,&N2));
      94           6 :   PetscCall(MatGetLocalSize(D,&m2,NULL));
      95             : 
      96             :   /* Create mappings */
      97           6 :   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)G),&np));
      98           6 :   PetscCall(MatGetOwnershipRangesColumn(A,&mapptr1));
      99           6 :   PetscCall(MatGetOwnershipRangesColumn(B,&mapptr2));
     100           6 :   PetscCall(PetscMalloc4(PetscMax(N1,N2),&buf,PetscMax(N1,N2),&scols,N1,&map1,N2,&map2));
     101          18 :   for (p=0;p<np;p++) {
     102         152 :     for (i=mapptr1[p];i<mapptr1[p+1];i++) map1[i] = i+mapptr2[p];
     103             :   }
     104          18 :   for (p=0;p<np;p++) {
     105         152 :     for (i=mapptr2[p];i<mapptr2[p+1];i++) map2[i] = i+mapptr1[p+1];
     106             :   }
     107           6 :   PetscCall(MatGetOwnershipRange(G,&gstart,NULL));
     108             : 
     109             :   /* Transfer A */
     110           6 :   if (a!=0.0) {
     111           4 :     PetscCall(MatGetOwnershipRange(A,&start,NULL));
     112          44 :     for (i=0;i<m1;i++) {
     113          40 :       PetscCall(MatGetRow(A,i+start,&ncols,&cols,&vals));
     114          40 :       if (a!=1.0) {
     115          10 :         svals=buf;
     116          38 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*a;
     117          30 :       } else svals=(PetscScalar*)vals;
     118          98 :       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
     119          40 :       j = gstart+i;
     120          40 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     121          40 :       PetscCall(MatRestoreRow(A,i+start,&ncols,&cols,&vals));
     122             :     }
     123             :   }
     124             :   /* Transfer B */
     125           6 :   if (b!=0.0) {
     126           4 :     PetscCall(MatGetOwnershipRange(B,&start,NULL));
     127          44 :     for (i=0;i<m1;i++) {
     128          40 :       PetscCall(MatGetRow(B,i+start,&ncols,&cols,&vals));
     129          40 :       if (b!=1.0) {
     130          10 :         svals=buf;
     131          20 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*b;
     132          30 :       } else svals=(PetscScalar*)vals;
     133          80 :       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
     134          40 :       j = gstart+i;
     135          40 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     136          40 :       PetscCall(MatRestoreRow(B,i+start,&ncols,&cols,&vals));
     137             :     }
     138             :   }
     139             :   /* Transfer C */
     140           6 :   if (c!=0.0) {
     141           2 :     PetscCall(MatGetOwnershipRange(C,&start,NULL));
     142          32 :     for (i=0;i<m2;i++) {
     143          30 :       PetscCall(MatGetRow(C,i+start,&ncols,&cols,&vals));
     144          30 :       if (c!=1.0) {
     145          30 :         svals=buf;
     146         118 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*c;
     147           0 :       } else svals=(PetscScalar*)vals;
     148         118 :       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
     149          30 :       j = gstart+m1+i;
     150          30 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     151          30 :       PetscCall(MatRestoreRow(C,i+start,&ncols,&cols,&vals));
     152             :     }
     153             :   }
     154             :   /* Transfer D */
     155           6 :   if (d!=0.0) {
     156           6 :     PetscCall(MatGetOwnershipRange(D,&start,NULL));
     157          76 :     for (i=0;i<m2;i++) {
     158          70 :       PetscCall(MatGetRow(D,i+start,&ncols,&cols,&vals));
     159          70 :       if (d!=1.0) {
     160          40 :         svals=buf;
     161         156 :         for (j=0;j<ncols;j++) svals[j] = vals[j]*d;
     162          30 :       } else svals=(PetscScalar*)vals;
     163         216 :       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
     164          70 :       j = gstart+m1+i;
     165          70 :       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
     166          70 :       PetscCall(MatRestoreRow(D,i+start,&ncols,&cols,&vals));
     167             :     }
     168             :   }
     169           6 :   PetscCall(PetscFree4(buf,scols,map1,map2));
     170           6 :   PetscFunctionReturn(PETSC_SUCCESS);
     171             : }
     172             : 
     173             : /*@
     174             :    MatCreateTile - Explicitly build a matrix from four blocks, G = [ a*A b*B; c*C d*D ].
     175             : 
     176             :    Collective
     177             : 
     178             :    Input Parameters:
     179             : +  A - matrix for top-left block
     180             : .  a - scaling factor for block A
     181             : .  B - matrix for top-right block
     182             : .  b - scaling factor for block B
     183             : .  C - matrix for bottom-left block
     184             : .  c - scaling factor for block C
     185             : .  D - matrix for bottom-right block
     186             : -  d - scaling factor for block D
     187             : 
     188             :    Output Parameter:
     189             : .  G  - the resulting matrix
     190             : 
     191             :    Notes:
     192             :    In the case of a parallel matrix, a permuted version of G is returned. The permutation
     193             :    is a perfect shuffle such that the local parts of A, B, C, D remain in the local part of
     194             :    G for the same process.
     195             : 
     196             :    Matrix G must be destroyed by the user.
     197             : 
     198             :    The blocks can be of different type. They can be either ConstantDiagonal, or a standard
     199             :    type such as AIJ, or any other type provided that it supports the MatGetRow operation.
     200             :    The type of the output matrix will be the same as the first block that is not
     201             :    ConstantDiagonal (checked in the A,B,C,D order).
     202             : 
     203             :    Level: developer
     204             : 
     205             : .seealso: MatCreateNest()
     206             : @*/
     207          49 : PetscErrorCode MatCreateTile(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat *G)
     208             : {
     209          49 :   PetscInt       i,k,M1,M2,N1,N2,M,N,m1,m2,n1,n2,m,n,bs;
     210          49 :   PetscBool      diag[4];
     211          49 :   Mat            block[4] = {A,B,C,D};
     212          49 :   MatType        type[4];
     213          49 :   PetscMPIInt    size;
     214             : 
     215          49 :   PetscFunctionBegin;
     216          49 :   PetscValidHeaderSpecific(A,MAT_CLASSID,2);
     217          49 :   PetscValidHeaderSpecific(B,MAT_CLASSID,4);
     218          49 :   PetscValidHeaderSpecific(C,MAT_CLASSID,6);
     219          49 :   PetscValidHeaderSpecific(D,MAT_CLASSID,8);
     220          49 :   PetscCheckSameTypeAndComm(A,2,B,4);
     221          49 :   PetscCheckSameTypeAndComm(A,2,C,6);
     222          49 :   PetscCheckSameTypeAndComm(A,2,D,8);
     223         196 :   PetscValidLogicalCollectiveScalar(A,a,1);
     224         196 :   PetscValidLogicalCollectiveScalar(A,b,3);
     225         196 :   PetscValidLogicalCollectiveScalar(A,c,5);
     226         196 :   PetscValidLogicalCollectiveScalar(A,d,7);
     227          49 :   PetscAssertPointer(G,9);
     228             : 
     229             :   /* check row 1 */
     230          49 :   PetscCall(MatGetSize(A,&M1,NULL));
     231          49 :   PetscCall(MatGetLocalSize(A,&m1,NULL));
     232          49 :   PetscCall(MatGetSize(B,&M,NULL));
     233          49 :   PetscCall(MatGetLocalSize(B,&m,NULL));
     234          49 :   PetscCheck(M==M1 && m==m1,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     235             :   /* check row 2 */
     236          49 :   PetscCall(MatGetSize(C,&M2,NULL));
     237          49 :   PetscCall(MatGetLocalSize(C,&m2,NULL));
     238          49 :   PetscCall(MatGetSize(D,&M,NULL));
     239          49 :   PetscCall(MatGetLocalSize(D,&m,NULL));
     240          49 :   PetscCheck(M==M2 && m==m2,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     241             :   /* check column 1 */
     242          49 :   PetscCall(MatGetSize(A,NULL,&N1));
     243          49 :   PetscCall(MatGetLocalSize(A,NULL,&n1));
     244          49 :   PetscCall(MatGetSize(C,NULL,&N));
     245          49 :   PetscCall(MatGetLocalSize(C,NULL,&n));
     246          49 :   PetscCheck(N==N1 && n==n1,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     247             :   /* check column 2 */
     248          49 :   PetscCall(MatGetSize(B,NULL,&N2));
     249          49 :   PetscCall(MatGetLocalSize(B,NULL,&n2));
     250          49 :   PetscCall(MatGetSize(D,NULL,&N));
     251          49 :   PetscCall(MatGetLocalSize(D,NULL,&n));
     252          49 :   PetscCheck(N==N2 && n==n2,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
     253             : 
     254             :   /* check matrix types */
     255         245 :   for (i=0;i<4;i++) {
     256         196 :     PetscCall(MatGetType(block[i],&type[i]));
     257         196 :     PetscCall(PetscStrcmp(type[i],MATCONSTANTDIAGONAL,&diag[i]));
     258             :   }
     259          59 :   for (k=0;k<4;k++) if (!diag[k]) break;
     260          49 :   PetscCheck(k<4,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not implemented for 4 diagonal blocks");
     261             : 
     262          49 :   PetscCall(MatGetBlockSize(block[k],&bs));
     263          49 :   PetscCall(MatCreate(PetscObjectComm((PetscObject)block[k]),G));
     264          49 :   PetscCall(MatSetSizes(*G,m1+m2,n1+n2,M1+M2,N1+N2));
     265          49 :   PetscCall(MatSetType(*G,type[k]));
     266          49 :   PetscCall(MatSetBlockSize(*G,bs));
     267          49 :   PetscCall(MatSetUp(*G));
     268             : 
     269          49 :   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)*G),&size));
     270          49 :   if (size>1) PetscCall(MatCreateTile_MPI(a,A,b,B,c,C,d,D,*G));
     271          43 :   else PetscCall(MatCreateTile_Seq(a,A,b,B,c,C,d,D,*G));
     272          49 :   PetscCall(MatAssemblyBegin(*G,MAT_FINAL_ASSEMBLY));
     273          49 :   PetscCall(MatAssemblyEnd(*G,MAT_FINAL_ASSEMBLY));
     274          49 :   PetscFunctionReturn(PETSC_SUCCESS);
     275             : }
     276             : 
     277             : /*@C
     278             :    MatCreateVecsEmpty - Get vector(s) compatible with the matrix, i.e. with the same
     279             :    parallel layout, but without internal array.
     280             : 
     281             :    Collective
     282             : 
     283             :    Input Parameter:
     284             : .  mat - the matrix
     285             : 
     286             :    Output Parameters:
     287             : +  right - (optional) vector that the matrix can be multiplied against
     288             : -  left - (optional) vector that the matrix vector product can be stored in
     289             : 
     290             :    Note:
     291             :    This is similar to MatCreateVecs(), but the new vectors do not have an internal
     292             :    array, so the intended usage is with VecPlaceArray().
     293             : 
     294             :    Level: developer
     295             : 
     296             : .seealso: VecDuplicateEmpty()
     297             : @*/
     298        3225 : PetscErrorCode MatCreateVecsEmpty(Mat mat,Vec *right,Vec *left)
     299             : {
     300        3225 :   PetscBool      standard,cuda=PETSC_FALSE,skip=PETSC_FALSE;
     301        3225 :   PetscInt       M,N,mloc,nloc,rbs,cbs;
     302        3225 :   PetscMPIInt    size;
     303        3225 :   Vec            v;
     304             : 
     305        3225 :   PetscFunctionBegin;
     306        3225 :   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
     307        3225 :   PetscValidType(mat,1);
     308             : 
     309        3225 :   PetscCall(PetscObjectTypeCompareAny((PetscObject)mat,&standard,MATSEQAIJ,MATMPIAIJ,MATSEQBAIJ,MATMPIBAIJ,MATSEQSBAIJ,MATMPISBAIJ,MATSEQDENSE,MATMPIDENSE,""));
     310        3225 :   PetscCall(PetscObjectTypeCompareAny((PetscObject)mat,&cuda,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,""));
     311        3225 :   if (!standard && !cuda) {
     312         225 :     PetscCall(MatCreateVecs(mat,right,left));
     313         225 :     v = right? *right: *left;
     314         225 :     if (v) {
     315         225 :       PetscCall(PetscObjectTypeCompareAny((PetscObject)v,&standard,VECSEQ,VECMPI,""));
     316         225 :       PetscCall(PetscObjectTypeCompareAny((PetscObject)v,&cuda,VECSEQCUDA,VECMPICUDA,""));
     317             :     }
     318         225 :     if (!standard && !cuda) skip = PETSC_TRUE;
     319             :     else {
     320         225 :       if (right) PetscCall(VecDestroy(right));
     321         225 :       if (left) PetscCall(VecDestroy(left));
     322             :     }
     323             :   }
     324             :   if (!skip) {
     325        3225 :     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
     326        3225 :     PetscCall(MatGetLocalSize(mat,&mloc,&nloc));
     327        3225 :     PetscCall(MatGetSize(mat,&M,&N));
     328        3225 :     PetscCall(MatGetBlockSizes(mat,&rbs,&cbs));
     329        3225 :     if (right) {
     330        1812 :       if (cuda) {
     331             : #if defined(PETSC_HAVE_CUDA)
     332             :         if (size>1) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)mat),cbs,nloc,N,NULL,right));
     333             :         else PetscCall(VecCreateSeqCUDAWithArray(PetscObjectComm((PetscObject)mat),cbs,N,NULL,right));
     334             : #endif
     335             :       } else {
     336        1812 :         if (size>1) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),cbs,nloc,N,NULL,right));
     337        1547 :         else PetscCall(VecCreateSeqWithArray(PetscObjectComm((PetscObject)mat),cbs,N,NULL,right));
     338             :       }
     339             :     }
     340        3225 :     if (left) {
     341        1581 :       if (cuda) {
     342             : #if defined(PETSC_HAVE_CUDA)
     343             :         if (size>1) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)mat),rbs,mloc,M,NULL,left));
     344             :         else PetscCall(VecCreateSeqCUDAWithArray(PetscObjectComm((PetscObject)mat),rbs,M,NULL,left));
     345             : #endif
     346             :       } else {
     347        1581 :         if (size>1) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),rbs,mloc,M,NULL,left));
     348        1427 :         else PetscCall(VecCreateSeqWithArray(PetscObjectComm((PetscObject)mat),rbs,M,NULL,left));
     349             :       }
     350             :     }
     351             :   }
     352        3225 :   PetscFunctionReturn(PETSC_SUCCESS);
     353             : }
     354             : 
     355             : /*@C
     356             :    MatNormEstimate - Estimate the 2-norm of a matrix.
     357             : 
     358             :    Collective
     359             : 
     360             :    Input Parameters:
     361             : +  A   - the matrix
     362             : .  vrn - random vector with normally distributed entries (can be NULL)
     363             : -  w   - workspace vector (can be NULL)
     364             : 
     365             :    Output Parameter:
     366             : .  nrm - the norm estimate
     367             : 
     368             :    Notes:
     369             :    Does not need access to the matrix entries, just performs a matrix-vector product.
     370             :    Based on work by I. Ipsen and coworkers https://ipsen.math.ncsu.edu/ps/slides_ima.pdf
     371             : 
     372             :    The input vector vrn must have unit 2-norm.
     373             :    If vrn is NULL, then it is created internally and filled with VecSetRandomNormal().
     374             : 
     375             :    Level: developer
     376             : 
     377             : .seealso: VecSetRandomNormal()
     378             : @*/
     379          66 : PetscErrorCode MatNormEstimate(Mat A,Vec vrn,Vec w,PetscReal *nrm)
     380             : {
     381          66 :   PetscInt       n;
     382          66 :   Vec            vv=NULL,ww=NULL;
     383             : 
     384          66 :   PetscFunctionBegin;
     385          66 :   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
     386          66 :   PetscValidType(A,1);
     387          66 :   if (vrn) PetscValidHeaderSpecific(vrn,VEC_CLASSID,2);
     388          66 :   if (w) PetscValidHeaderSpecific(w,VEC_CLASSID,3);
     389          66 :   PetscAssertPointer(nrm,4);
     390             : 
     391          66 :   if (!vrn) {
     392           0 :     PetscCall(MatCreateVecs(A,&vv,NULL));
     393           0 :     vrn = vv;
     394           0 :     PetscCall(VecSetRandomNormal(vv,NULL,NULL,NULL));
     395           0 :     PetscCall(VecNormalize(vv,NULL));
     396             :   }
     397          66 :   if (!w) {
     398           0 :     PetscCall(MatCreateVecs(A,&ww,NULL));
     399           0 :     w = ww;
     400             :   }
     401             : 
     402          66 :   PetscCall(MatGetSize(A,&n,NULL));
     403          66 :   PetscCall(MatMult(A,vrn,w));
     404          66 :   PetscCall(VecNorm(w,NORM_2,nrm));
     405          66 :   *nrm *= PetscSqrtReal((PetscReal)n);
     406             : 
     407          66 :   PetscCall(VecDestroy(&vv));
     408          66 :   PetscCall(VecDestroy(&ww));
     409          66 :   PetscFunctionReturn(PETSC_SUCCESS);
     410             : }

Generated by: LCOV version 1.14