Commits

Mark Adams committed f9426fe

refactored residuals by adding a 'residual' method to Mat

Comments (0)

Files changed (20)

include/finclude/petscmat.h

       PetscEnum MATOP_SET_VALUES_BATCH
       PetscEnum MATOP_SET_BLOCK_SIZES
       PetscEnum MATOP_AYPX
+      PetscEnum MATOP_RESIDUAL
 
       parameter(MATOP_SET_VALUES=0)
       parameter(MATOP_GET_ROW=1)
       parameter(MATOP_SET_VALUES_BATCH=129)
       parameter(MATOP_SET_BLOCK_SIZES=139)
       parameter(MATOP_AYPX=140)
+      parameter(MATOP_RESIDUAL=141)
 !
 !
 !

include/petsc-private/matimpl.h

   /*139*/
   PetscErrorCode (*setblocksizes)(Mat,PetscInt,PetscInt);
   PetscErrorCode (*aypx)(Mat,PetscScalar,Mat,MatStructure);
+  PetscErrorCode (*residual)(Mat,Vec,Vec,Vec);
 };
 /*
     If you add MatOps entries above also add them to the MATOP enum
 PETSC_EXTERN PetscLogEvent MAT_GetMultiProcBlock;
 PETSC_EXTERN PetscLogEvent MAT_CUSPCopyToGPU, MAT_CUSPARSECopyToGPU, MAT_SetValuesBatch, MAT_SetValuesBatchI, MAT_SetValuesBatchII, MAT_SetValuesBatchIII, MAT_SetValuesBatchIV;
 PETSC_EXTERN PetscLogEvent MAT_ViennaCLCopyToGPU;
-PETSC_EXTERN PetscLogEvent MAT_Merge;
+PETSC_EXTERN PetscLogEvent MAT_Merge,MAT_Residual;
 
 #endif

include/petscmat.h

 PETSC_EXTERN PetscErrorCode MatMultConstrained(Mat,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatMatSolve(Mat,Mat,Mat);
+PETSC_EXTERN PetscErrorCode MatResidual(Mat,Vec,Vec,Vec);
 
 /*E
     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
                MATOP_RART_SYMBOLIC=137,
                MATOP_RART_NUMERIC=138,
                MATOP_SET_BLOCK_SIZES=139,
-               MATOP_AYPX=140
+               MATOP_AYPX=140,
+               MATOP_RESIDUAL=141
              } MatOperation;
 PETSC_EXTERN PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *);
 PETSC_EXTERN PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void));

src/ksp/pc/impls/mg/mgfunc.c

 
 #include <../src/ksp/pc/impls/mg/mgimpl.h>       /*I "petscksp.h" I*/
 
+/* ---------------------------------------------------------------------------*/
 #undef __FUNCT__
 #define __FUNCT__ "PCMGResidualDefault"
 /*@C
   PetscErrorCode ierr;
 
   PetscFunctionBegin;
-  ierr = MatMult(mat,x,r);CHKERRQ(ierr);
-  ierr = VecAYPX(r,-1.0,b);CHKERRQ(ierr);
+  ierr = MatResidual(mat,b,x,r);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
-/* ---------------------------------------------------------------------------*/
-
 #undef __FUNCT__
 #define __FUNCT__ "PCMGGetCoarseSolve"
 /*@
   if (!mglevels[l]->residual) mglevels[l]->residual = PCMGResidualDefault;
   if (mat) {ierr = PetscObjectReference((PetscObject)mat);CHKERRQ(ierr);}
   ierr = MatDestroy(&mglevels[l]->A);CHKERRQ(ierr);
-
   mglevels[l]->A = mat;
   PetscFunctionReturn(0);
 }

src/mat/impls/adj/mpi/mpiadj.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/aij/mpi/mpiaij.c

 
     ierr = MatCreate(subcomm,&C);CHKERRQ(ierr);
 
-    /* get local size of redundant matrix 
+    /* get local size of redundant matrix
        - mloc_sub is chosen for PETSC_SUBCOMM_INTERLACED, works for other types, but may not efficient! */
     ierr = MatGetOwnershipRanges(mat,&range);CHKERRQ(ierr);
-    rstart_sub = range[nsubcomm*subrank]; 
+    rstart_sub = range[nsubcomm*subrank];
     if (subrank+1 < subsize) { /* not the last proc in subcomm */
       rend_sub = range[nsubcomm*(subrank+1)];
     } else {
     }
     mloc_sub = rend_sub - rstart_sub;
 
-    if (M == N) { 
+    if (M == N) {
       ierr = MatSetSizes(C,mloc_sub,mloc_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
     } else { /* non-square matrix */
-      ierr = MatSetSizes(C,mloc_sub,PETSC_DECIDE,PETSC_DECIDE,mat->cmap->N);CHKERRQ(ierr); 
+      ierr = MatSetSizes(C,mloc_sub,PETSC_DECIDE,PETSC_DECIDE,mat->cmap->N);CHKERRQ(ierr);
     }
     ierr = MatSetBlockSizes(C,mat->rmap->bs,mat->cmap->bs);CHKERRQ(ierr);
     ierr = MatSetFromOptions(C);CHKERRQ(ierr);
   PetscMPIInt    size,subsize;
   PetscInt       mloc_sub,rstart,rend,M=mat->rmap->N,N=mat->cmap->N;
   Mat_Redundant  *redund=NULL;
-  PetscSubcomm   psubcomm=NULL; 
+  PetscSubcomm   psubcomm=NULL;
   MPI_Comm       subcomm_in=subcomm;
   Mat            *matseq;
   IS             isrow,iscol;
- 
+
   PetscFunctionBegin;
   if (subcomm_in == MPI_COMM_NULL) { /* user does not provide subcomm */
     if (reuse ==  MAT_INITIAL_MATRIX) {
       ierr = PetscSubcommSetFromOptions(psubcomm);CHKERRQ(ierr);
       subcomm = psubcomm->comm;
     } else { /* retrieve psubcomm and subcomm */
-      ierr = PetscObjectGetComm((PetscObject)(*matredundant),&subcomm);CHKERRQ(ierr);      
+      ierr = PetscObjectGetComm((PetscObject)(*matredundant),&subcomm);CHKERRQ(ierr);
       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
       if (subsize == 1) {
         Mat_SeqAIJ *c = (Mat_SeqAIJ*)(*matredundant)->data;
       ierr = MatGetRedundantMatrix_MPIAIJ_interlaced(mat,nsubcomm,subcomm,reuse,matredundant);CHKERRQ(ierr);
       if (reuse ==  MAT_INITIAL_MATRIX) { /* psubcomm is created in this routine, free it in MatDestroy_MatRedundant() */
         ierr = MPI_Comm_size(psubcomm->comm,&subsize);CHKERRQ(ierr);
-        if (subsize == 1) {  
+        if (subsize == 1) {
           Mat_SeqAIJ *c = (Mat_SeqAIJ*)(*matredundant)->data;
           c->redundant->psubcomm = psubcomm;
         } else {
         }
       }
       PetscFunctionReturn(0);
-    }   
-  } 
+    }
+  }
 
   /* use MPI subcomm via MatGetSubMatrices(); use subcomm_in or psubcomm->comm (psubcomm->type != INTERLACED) */
   ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
   if (reuse == MAT_INITIAL_MATRIX) {
     /* create a local sequential matrix matseq[0] */
     mloc_sub = PETSC_DECIDE;
-    ierr = PetscSplitOwnership(subcomm,&mloc_sub,&M);CHKERRQ(ierr); 
+    ierr = PetscSplitOwnership(subcomm,&mloc_sub,&M);CHKERRQ(ierr);
     ierr = MPI_Scan(&mloc_sub,&rend,1,MPIU_INT,MPI_SUM,subcomm);CHKERRQ(ierr);
     rstart = rend - mloc_sub;
     ierr = ISCreateStride(PETSC_COMM_SELF,mloc_sub,rstart,1,&isrow);CHKERRQ(ierr);
       Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*matredundant)->data;
       redund = c->redundant;
     }
-      
+
     isrow  = redund->isrow;
     iscol  = redund->iscol;
     matseq = redund->matseq;
   }
   ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,reuse,&matseq);CHKERRQ(ierr);
   ierr = MatCreateMPIAIJConcatenateSeqAIJ(subcomm,matseq[0],PETSC_DECIDE,reuse,matredundant);CHKERRQ(ierr);
-    
+
   if (reuse == MAT_INITIAL_MATRIX) {
     /* create a supporting struct and attach it to C for reuse */
     ierr = PetscNewLog(*matredundant,Mat_Redundant,&redund);CHKERRQ(ierr);
     redund->isrow    = isrow;
     redund->iscol    = iscol;
     redund->matseq   = matseq;
-    redund->psubcomm = psubcomm; 
+    redund->psubcomm = psubcomm;
     redund->Destroy               = (*matredundant)->ops->destroy;
     (*matredundant)->ops->destroy = MatDestroy_MatRedundant;
   }
                                        0,
                                        0,
                                 /*139*/0,
+                                       0,
                                        0
 };
 

src/mat/impls/aij/seq/aij.c

                                         MatRARtSymbolic_SeqAIJ_SeqAIJ,
                                         MatRARtNumeric_SeqAIJ_SeqAIJ,
                                  /*139*/0,
+                                        0,
                                         0
 };
 

src/mat/impls/baij/mpi/mpibaij.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/baij/seq/baij.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/blockmat/seq/blockmat.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/composite/mcomposite.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/dense/mpi/mpidense.c

                                         0,
                                         0,
                                 /*139*/ 0,
+                                        0,
                                         0
 };
 

src/mat/impls/dense/seq/dense.c

                                         0,
                                         0,
                                 /*139*/ 0,
+                                        0,
                                         0
 };
 

src/mat/impls/sbaij/mpi/mpisbaij.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/sbaij/seq/sbaij.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/scatter/mscatter.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/impls/shell/shell.c

                                        0,
                                        0,
                                /*139*/ 0,
+                                       0,
                                        0
 };
 

src/mat/interface/dlregismat.c

   ierr = PetscLogEventRegister("MatCopy",          MAT_CLASSID,&MAT_Copy);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("MatConvert",       MAT_CLASSID,&MAT_Convert);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("MatScale",         MAT_CLASSID,&MAT_Scale);CHKERRQ(ierr);
+  ierr = PetscLogEventRegister("MatResidual",      MAT_CLASSID,&MAT_Residual);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("MatAssemblyBegin", MAT_CLASSID,&MAT_AssemblyBegin);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("MatAssemblyEnd",   MAT_CLASSID,&MAT_AssemblyEnd);CHKERRQ(ierr);
   ierr = PetscLogEventRegister("MatSetValues",     MAT_CLASSID,&MAT_SetValues);CHKERRQ(ierr);

src/mat/interface/ftn-custom/zmatrixf.c

 {
   *__ierr = MatFactorInfoInitialize(info);
 }
-

src/mat/interface/matrix.c

 PetscLogEvent MAT_GetMultiProcBlock;
 PetscLogEvent MAT_CUSPCopyToGPU, MAT_CUSPARSECopyToGPU, MAT_SetValuesBatch, MAT_SetValuesBatchI, MAT_SetValuesBatchII, MAT_SetValuesBatchIII, MAT_SetValuesBatchIV;
 PetscLogEvent MAT_ViennaCLCopyToGPU;
-PetscLogEvent MAT_Merge;
+PetscLogEvent MAT_Merge,MAT_Residual;
 
 const char *const MatFactorTypes[] = {"NONE","LU","CHOLESKY","ILU","ICC","ILUDT","MatFactorType","MAT_FACTOR_",0};
 
 }
 
 #undef __FUNCT__
+#define __FUNCT__ "MatResidual"
+/*@
+   MatResidual - Default routine to calculate the residual.
+
+   Collective on Mat and Vec
+
+   Input Parameters:
++  mat - the matrix
+.  b   - the right-hand-side
+-  x   - the approximate solution
+
+   Output Parameter:
+.  r - location to store the residual
+
+   Level: developer
+
+.keywords: MG, default, multigrid, residual
+
+.seealso: PCMGSetResidual()
+@*/
+PetscErrorCode  MatResidual(Mat mat,Vec b,Vec x,Vec r)
+{
+  PetscErrorCode ierr;
+
+  PetscFunctionBegin;
+  PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
+  PetscValidHeaderSpecific(b,VEC_CLASSID,2);
+  PetscValidHeaderSpecific(x,VEC_CLASSID,3);
+  PetscValidHeaderSpecific(r,VEC_CLASSID,4);
+  PetscValidType(mat,1);
+  MatCheckPreallocated(mat,1);
+  ierr  = PetscLogEventBegin(MAT_Residual,mat,0,0,0);CHKERRQ(ierr);
+  if (!mat->ops->residual) {
+    ierr = MatMult(mat,x,r);CHKERRQ(ierr);
+    ierr = VecAYPX(r,-1.0,b);CHKERRQ(ierr);
+  } else {
+    ierr  = (*mat->ops->residual)(mat,b,x,r);CHKERRQ(ierr);
+  }
+  ierr  = PetscLogEventEnd(MAT_Residual,mat,0,0,0);CHKERRQ(ierr);
+  PetscFunctionReturn(0);
+}
+
+#undef __FUNCT__
 #define __FUNCT__ "MatGetRowIJ"
 /*@C
     MatGetRowIJ - Returns the compressed row storage i and j indices for sequential matrices.