Commits

BarryFSmith  committed 3d2707b Merge with conflicts

Merge branch 'master' into barry/nonlinearsolvertable

Conflicts:
include/petscsnes.h

  • Participants
  • Parent commits be95d8f, 6bfd642

Comments (0)

Files changed (398)

File bin/matlab/classes/matlabheader.h

 int KSPSetDM(KSP,DM);
 int KSPGetPC(KSP,PC*);
 int KSPSetFromOptions(KSP);
-int KSPSetOperators(KSP,Mat,Mat,MatStructure);
+int KSPSetOperators(KSP,Mat,Mat);
 int KSPSolve(KSP,Vec,Vec);
 int KSPSetUp(KSP);
 int KSPGetSolution(KSP,Vec*);
           grep -v "tentative definition of size" | \
           grep -v "Extra instructions" | \
           grep -v "Unused external reference" | \
+          grep -v "Warning: attribute unused is unsupported and will be skipped" | \
           grep -v "f90 continuing despite warning messages" | \
           grep -v "symbol if the" | \
           grep -v "ignoring symbol version info" | \

File config/BuildSystem/config/compilers.py

     for flag in ['-D', '-WF,-D']:
       if self.setCompilers.checkCompilerFlag(flag+'Testing', body = '#define dummy \n           dummy\n#ifndef Testing\n       fooey\n#endif'):
         self.FortranDefineCompilerOption = flag
+        self.framework.addMakeMacro('FC_DEFINE_FLAG',self.FortranDefineCompilerOption)
         self.setCompilers.popLanguage()
         self.logPrint('Fortran uses '+flag+' for defining macro', 3, 'compilers')
         return

File config/BuildSystem/config/packages/MPI.py

       if config.setCompilers.Configure.isCygwin() and not config.setCompilers.Configure.isGNU(self.setCompilers.CC):
         raise RuntimeError('Sorry, cannot download-install MPICH on Windows. Sugest installing windows version of MPICH manually')
       self.liblist      = [[]]
-      if config.setCompilers.Configure.isSolaris():
+      if config.setCompilers.Configure.isSolaris() or self.framework.argDB['with-gcov']:
         self.download         = self.download_mpich_sol
       else:
         self.download         = self.download_mpich

File config/BuildSystem/config/packages/netcdf.py

     config.package.GNUPackage.__init__(self, framework)
     self.downloadpath    = 'http://www.unidata.ucar.edu/downloads/netcdf/ftp/'
     self.downloadext     = 'tar.gz'
-    self.downloadversion = '4.2.1.1'
+    self.downloadversion = '4.3.1.1'
     self.functions       = ['nccreate']
     self.includes        = ['netcdf.h']
     self.liblist         = [['libnetcdf.a']]

File include/finclude/petscmat.h

       parameter (MAT_DO_NOT_COPY_VALUES=0,MAT_COPY_VALUES=1)
       parameter (MAT_SHARE_NONZERO_PATTERN=2)
 !
-!  Flags for PCSetOperators()
+!  Flags for MatCopy, MatAXPY
 !
       PetscEnum DIFFERENT_NONZERO_PATTERN
       PetscEnum SUBSET_NONZERO_PATTERN
       PetscEnum SAME_NONZERO_PATTERN
-      PetscEnum SAME_PRECONDITIONER
 
       parameter (DIFFERENT_NONZERO_PATTERN = 0,SUBSET_NONZERO_PATTERN=1)
-      parameter (SAME_NONZERO_PATTERN = 2,SAME_PRECONDITIONER = 3)
+      parameter (SAME_NONZERO_PATTERN = 2)
+
 #if !(PETSC_USE_FORTRAN_DATATYPES_)
 #include "finclude/petscmatinfosize.h"
 #endif

File include/finclude/petscsysdef.h

 #else
 #define PetscInt integer4
 #endif
+#define Petsc64bitInt integer8
+#define PetscObjectState Petsc64bitInt
 
 #if (PETSC_SIZEOF_INT == 4)
 #define PetscFortranInt integer4

File include/petsc-private/kspimpl.h

 typedef struct _p_DMKSP *DMKSP;
 typedef struct _DMKSPOps *DMKSPOps;
 struct _DMKSPOps {
-  PetscErrorCode (*computeoperators)(KSP,Mat,Mat,MatStructure*,void*);
+  PetscErrorCode (*computeoperators)(KSP,Mat,Mat,void*);
   PetscErrorCode (*computerhs)(KSP,Vec,void*);
   PetscErrorCode (*computeinitialguess)(KSP,Vec,void*);
   PetscErrorCode (*destroy)(DMKSP*);

File include/petsc-private/matimpl.h

   PetscErrorCode (*placeholder_73)(Mat,void*);
   /*74*/
   PetscErrorCode (*setvaluesadifor)(Mat,PetscInt,void*);
-  PetscErrorCode (*fdcoloringapply)(Mat,MatFDColoring,Vec,MatStructure*,void*);
+  PetscErrorCode (*fdcoloringapply)(Mat,MatFDColoring,Vec,void*);
   PetscErrorCode (*setfromoptions)(Mat);
   PetscErrorCode (*multconstrained)(Mat,Vec,Vec);
   PetscErrorCode (*multtransposeconstrained)(Mat,Vec,Vec);
   PetscErrorCode (*residual)(Mat,Vec,Vec,Vec);
   PetscErrorCode (*fdcoloringsetup)(Mat,ISColoring,MatFDColoring);
   PetscErrorCode (*findoffblockdiagonalentries)(Mat,IS*);
+  /*144*/
 };
 /*
     If you add MatOps entries above also add them to the MATOP enum
   PetscBool              assembled;        /* is the matrix assembled? */
   PetscBool              was_assembled;    /* new values inserted into assembled mat */
   PetscInt               num_ass;          /* number of times matrix has been assembled */
-  PetscBool              same_nonzero;     /* matrix has same nonzero pattern as previous */
+  PetscObjectState       nonzerostate;     /* each time new nonzeros locations are introduced into the matrix this is updated */
   MatInfo                info;             /* matrix information */
   InsertMode             insertmode;       /* have values been inserted in matrix or added? */
   MatStash               stash,bstash;     /* used for assembling off-proc mat emements */
   PetscCUSPFlag          valid_GPU_matrix; /* flag pointing to the matrix on the gpu*/
 #endif
 #if defined(PETSC_HAVE_VIENNACL)
-  PetscViennaCLFlag          valid_GPU_matrix; /* flag pointing to the matrix on the gpu*/
+  PetscViennaCLFlag      valid_GPU_matrix; /* flag pointing to the matrix on the gpu*/
 #endif
   void                   *spptr;          /* pointer for special library like SuperLU */
   MatSolverPackage       solvertype;

File include/petsc-private/pcimpl.h

 */
 struct _p_PC {
   PETSCHEADER(struct _PCOps);
-  DM             dm;
-  PetscInt       setupcalled;
-  PetscInt       setfromoptionscalled;
-  MatStructure   flag;
-  Mat            mat,pmat;
-  Vec            diagonalscaleright,diagonalscaleleft; /* used for time integration scaling */
-  PetscBool      diagonalscale;
-  PetscBool      nonzero_guess; /* used by PCKSP, PCREDUNDANT */
-  PetscBool      useAmat; /* used by several PC that including applying the operator inside the preconditioner */
-  PetscErrorCode (*modifysubmatrices)(PC,PetscInt,const IS[],const IS[],Mat[],void*); /* user provided routine */
-  void           *modifysubmatricesP; /* context for user routine */
-  void           *data;
-  PetscInt       presolvedone;  /* has PCPreSolve() already been run */
-  void           *user;             /* optional user-defined context */
+  DM               dm;
+  PetscInt         setupcalled;
+  PetscObjectState matstate,matnonzerostate;          /* last known nonzero state of the pmat associated with this PC */
+  PetscReal        reusepreconditioner;
+  MatStructure     flag;                              /* reset each PCSetUp() to indicate to PC implementations if nonzero structure has changed */ 
+
+  PetscInt         setfromoptionscalled;
+
+  Mat              mat,pmat;
+  Vec              diagonalscaleright,diagonalscaleleft; /* used for time integration scaling */
+  PetscBool        diagonalscale;
+  PetscBool        nonzero_guess; /* used by PCKSP, PCREDUNDANT */
+  PetscBool        useAmat; /* used by several PC that including applying the operator inside the preconditioner */
+  PetscErrorCode   (*modifysubmatrices)(PC,PetscInt,const IS[],const IS[],Mat[],void*); /* user provided routine */
+  void             *modifysubmatricesP; /* context for user routine */
+  void             *data;
+  PetscInt         presolvedone;  /* has PCPreSolve() already been run */
+  void             *user;             /* optional user-defined context */
 };
 
 PETSC_EXTERN PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;

File include/petsc-private/snesimpl.h

   PetscErrorCode (*userdestroy)(void**);
   PetscErrorCode (*computevariablebounds)(SNES,Vec,Vec);        /* user provided routine to set box constrained variable bounds */
   PetscErrorCode (*computepfunction)(SNES,Vec,Vec,void*);
-  PetscErrorCode (*computepjacobian)(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
+  PetscErrorCode (*computepjacobian)(SNES,Vec,Mat,Mat,void*);
   PetscErrorCode (*load)(SNES,PetscViewer);
 };
 
 typedef struct _DMSNESOps *DMSNESOps;
 struct _DMSNESOps {
   PetscErrorCode (*computefunction)(SNES,Vec,Vec,void*);
-  PetscErrorCode (*computejacobian)(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
+  PetscErrorCode (*computejacobian)(SNES,Vec,Mat,Mat,void*);
 
   /* objective */
   PetscErrorCode (*computeobjective)(SNES,Vec,PetscReal*,void*);
 
   /* Picard iteration functions */
   PetscErrorCode (*computepfunction)(SNES,Vec,Vec,void*);
-  PetscErrorCode (*computepjacobian)(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
+  PetscErrorCode (*computepjacobian)(SNES,Vec,Mat,Mat,void*);
 
   /* User-defined smoother */
   PetscErrorCode (*computegs)(SNES,Vec,Vec,void*);

File include/petsc-private/taodmimpl.h

   PetscErrorCode (*computeobjectiveandgradient)(Tao,Vec,PetscReal*,Vec,void*);
   PetscErrorCode (*computeobjective)(Tao,Vec,PetscReal*,void*);
   PetscErrorCode (*computegradient)(Tao,Vec,Vec,void*);
-  PetscErrorCode (*computehessian)(Tao,Vec,Mat*,Mat*,MatStructure*,void*);
+  PetscErrorCode (*computehessian)(Tao,Vec,Mat,Mat,MatStructure*,void*);
   PetscErrorCode (*computebounds)(TaoDM, Vec, Vec);
   PetscErrorCode (*computeinitialguess)(TaoDM, Vec);
 };

File include/petsc-private/taoimpl.h

     PetscErrorCode (*computeobjective)(Tao, Vec, PetscReal*, void*);
     PetscErrorCode (*computeobjectiveandgradient)(Tao, Vec, PetscReal*, Vec, void*);
     PetscErrorCode (*computegradient)(Tao, Vec, Vec, void*);
-    PetscErrorCode (*computehessian)(Tao, Vec, Mat*, Mat*, MatStructure*, void*);
+    PetscErrorCode (*computehessian)(Tao, Vec, Mat, Mat,  void*);
     PetscErrorCode (*computeseparableobjective)(Tao, Vec, Vec, void*);
     PetscErrorCode (*computeconstraints)(Tao, Vec, Vec, void*);
     PetscErrorCode (*computeinequalityconstraints)(Tao, Vec, Vec, void*);
     PetscErrorCode (*computeequalityconstraints)(Tao, Vec, Vec, void*);
-    PetscErrorCode (*computejacobian)(Tao, Vec, Mat*, Mat*, MatStructure*, void*);
-    PetscErrorCode (*computejacobianstate)(Tao, Vec, Mat*, Mat*, Mat*, MatStructure*, void*);
-    PetscErrorCode (*computejacobiandesign)(Tao, Vec, Mat*, void*);
-    PetscErrorCode (*computejacobianinequality)(Tao, Vec, Mat*, Mat*, MatStructure*, void*);
-    PetscErrorCode (*computejacobianequality)(Tao, Vec, Mat*, Mat*, MatStructure*, void*);
+    PetscErrorCode (*computejacobian)(Tao, Vec, Mat, Mat,  void*);
+    PetscErrorCode (*computejacobianstate)(Tao, Vec, Mat, Mat, Mat,  void*);
+    PetscErrorCode (*computejacobiandesign)(Tao, Vec, Mat, void*);
+    PetscErrorCode (*computejacobianinequality)(Tao, Vec, Mat, Mat,  void*);
+    PetscErrorCode (*computejacobianequality)(Tao, Vec, Mat, Mat,  void*);
     PetscErrorCode (*computebounds)(Tao, Vec, Vec, void*);
 
     PetscErrorCode (*convergencetest)(Tao,void*);

File include/petsc-private/tsimpl.h

 
 struct _TSOps {
   PetscErrorCode (*snesfunction)(SNES,Vec,Vec,TS);
-  PetscErrorCode (*snesjacobian)(SNES,Vec,Mat*,Mat*,MatStructure*,TS);
+  PetscErrorCode (*snesjacobian)(SNES,Vec,Mat,Mat,TS);
   PetscErrorCode (*setup)(TS);
   PetscErrorCode (*step)(TS);
   PetscErrorCode (*solve)(TS);

File include/petscksp.h

 PETSC_EXTERN PetscErrorCode KSPSolveTranspose(KSP,Vec,Vec);
 PETSC_EXTERN PetscErrorCode KSPReset(KSP);
 PETSC_EXTERN PetscErrorCode KSPDestroy(KSP*);
+PETSC_EXTERN PetscErrorCode KSPSetReusePreconditioner(KSP,PetscBool);
 
 PETSC_EXTERN PetscFunctionList KSPList;
 PETSC_EXTERN PetscBool         KSPRegisterAllCalled;
 PETSC_EXTERN PetscErrorCode KSPUnwindPreconditioner(KSP,Vec,Vec);
 PETSC_EXTERN PetscErrorCode KSPInitialResidual(KSP,Vec,Vec,Vec,Vec,Vec);
 
-PETSC_EXTERN PetscErrorCode KSPSetOperators(KSP,Mat,Mat,MatStructure);
-PETSC_EXTERN PetscErrorCode KSPGetOperators(KSP,Mat*,Mat*,MatStructure*);
+PETSC_EXTERN PetscErrorCode KSPSetOperators(KSP,Mat,Mat);
+PETSC_EXTERN PetscErrorCode KSPGetOperators(KSP,Mat*,Mat*);
 PETSC_EXTERN PetscErrorCode KSPGetOperatorsSet(KSP,PetscBool *,PetscBool *);
 PETSC_EXTERN PetscErrorCode KSPSetOptionsPrefix(KSP,const char[]);
 PETSC_EXTERN PetscErrorCode KSPAppendOptionsPrefix(KSP,const char[]);
 PETSC_EXTERN PetscErrorCode MatSchurComplementGetKSP(Mat,KSP*);
 PETSC_EXTERN PetscErrorCode MatSchurComplementSetKSP(Mat,KSP);
 PETSC_EXTERN PetscErrorCode MatSchurComplementSetSubMatrices(Mat,Mat,Mat,Mat,Mat,Mat);
-PETSC_EXTERN PetscErrorCode MatSchurComplementUpdateSubMatrices(Mat,Mat,Mat,Mat,Mat,Mat,MatStructure);
+PETSC_EXTERN PetscErrorCode MatSchurComplementUpdateSubMatrices(Mat,Mat,Mat,Mat,Mat,Mat);
 PETSC_EXTERN PetscErrorCode MatSchurComplementGetSubMatrices(Mat,Mat*,Mat*,Mat*,Mat*,Mat*);
 PETSC_EXTERN PetscErrorCode MatSchurComplementSetAinvType(Mat,MatSchurComplementAinvType);
 PETSC_EXTERN PetscErrorCode MatSchurComplementGetAinvType(Mat,MatSchurComplementAinvType*);
 PETSC_EXTERN PetscErrorCode KSPSetApplicationContext(KSP,void*);
 PETSC_EXTERN PetscErrorCode KSPGetApplicationContext(KSP,void*);
 PETSC_EXTERN PetscErrorCode KSPSetComputeRHS(KSP,PetscErrorCode (*func)(KSP,Vec,void*),void *);
-PETSC_EXTERN PetscErrorCode KSPSetComputeOperators(KSP,PetscErrorCode(*)(KSP,Mat,Mat,MatStructure*,void*),void*);
+PETSC_EXTERN PetscErrorCode KSPSetComputeOperators(KSP,PetscErrorCode(*)(KSP,Mat,Mat,void*),void*);
 PETSC_EXTERN PetscErrorCode KSPSetComputeInitialGuess(KSP,PetscErrorCode(*)(KSP,Vec,void*),void*);
-PETSC_EXTERN PetscErrorCode DMKSPSetComputeOperators(DM,PetscErrorCode(*)(KSP,Mat,Mat,MatStructure*,void*),void*);
-PETSC_EXTERN PetscErrorCode DMKSPGetComputeOperators(DM,PetscErrorCode(**)(KSP,Mat,Mat,MatStructure*,void*),void*);
+PETSC_EXTERN PetscErrorCode DMKSPSetComputeOperators(DM,PetscErrorCode(*)(KSP,Mat,Mat,void*),void*);
+PETSC_EXTERN PetscErrorCode DMKSPGetComputeOperators(DM,PetscErrorCode(**)(KSP,Mat,Mat,void*),void*);
 PETSC_EXTERN PetscErrorCode DMKSPSetComputeRHS(DM,PetscErrorCode(*)(KSP,Vec,void*),void*);
 PETSC_EXTERN PetscErrorCode DMKSPGetComputeRHS(DM,PetscErrorCode(**)(KSP,Vec,void*),void*);
 PETSC_EXTERN PetscErrorCode DMKSPSetComputeInitialGuess(DM,PetscErrorCode(*)(KSP,Vec,void*),void*);

File include/petscmat.h

 PETSC_EXTERN PetscFunctionList MatCoarsenList;
 
 /*E
-    MatStructure - Indicates if the matrix has the same nonzero structure
+    MatStructure - Indicates if two matrices have the same nonzero structure
 
     Level: beginner
 
    Any additions/changes here MUST also be made in include/finclude/petscmat.h
 
-.seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
+.seealso: MatCopy(), MatAXPY()
 E*/
-typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
+typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN} MatStructure;
 
 PETSC_EXTERN PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
 PETSC_EXTERN PetscErrorCode MatCreateDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
 
 PETSC_EXTERN PetscErrorCode MatSetUp(Mat);
 PETSC_EXTERN PetscErrorCode MatDestroy(Mat*);
+PETSC_EXTERN PetscErrorCode MatGetNonzeroState(Mat,PetscObjectState*);
 
 PETSC_EXTERN PetscErrorCode MatConjugate(Mat);
 PETSC_EXTERN PetscErrorCode MatRealPart(Mat);
 PETSC_EXTERN PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
 PETSC_EXTERN PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
 PETSC_EXTERN PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
+PETSC_EXTERN PetscErrorCode MatGetLayouts(Mat,PetscLayout*,PetscLayout*);
 PETSC_EXTERN PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
 PETSC_EXTERN PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
 PETSC_EXTERN PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring);
-PETSC_EXTERN PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
+PETSC_EXTERN PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,void *);
 PETSC_EXTERN PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec);
 PETSC_EXTERN PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
 PETSC_EXTERN PetscErrorCode MatFDColoringSetUp(Mat,ISColoring,MatFDColoring);

File include/petscpc.h

 PETSC_EXTERN PetscErrorCode PCApplyTranspose(PC,Vec,Vec);
 PETSC_EXTERN PetscErrorCode PCApplyTransposeExists(PC,PetscBool *);
 PETSC_EXTERN PetscErrorCode PCApplyBAorABTranspose(PC,PCSide,Vec,Vec,Vec);
+PETSC_EXTERN PetscErrorCode PCSetReusePreconditioner(PC,PetscBool);
 
 #define PC_FILE_CLASSID 1211222
 
 PETSC_EXTERN PetscErrorCode PCSetModifySubMatrices(PC,PetscErrorCode(*)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void*);
 PETSC_EXTERN PetscErrorCode PCModifySubMatrices(PC,PetscInt,const IS[],const IS[],Mat[],void*);
 
-PETSC_EXTERN PetscErrorCode PCSetOperators(PC,Mat,Mat,MatStructure);
-PETSC_EXTERN PetscErrorCode PCGetOperators(PC,Mat*,Mat*,MatStructure*);
+PETSC_EXTERN PetscErrorCode PCSetOperators(PC,Mat,Mat);
+PETSC_EXTERN PetscErrorCode PCGetOperators(PC,Mat*,Mat*);
 PETSC_EXTERN PetscErrorCode PCGetOperatorsSet(PC,PetscBool *,PetscBool *);
 
 PETSC_EXTERN PetscErrorCode PCView(PC,PetscViewer);

File include/petscsnes.h

 PETSC_EXTERN PetscErrorCode SNESSetFromOptions(SNES);
 
 PETSC_EXTERN PetscErrorCode MatCreateSNESMF(SNES,Mat*);
-PETSC_EXTERN PetscErrorCode MatMFFDComputeJacobian(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN PetscErrorCode MatMFFDComputeJacobian(SNES,Vec,Mat,Mat,void*);
 
 PETSC_EXTERN PetscErrorCode MatDAADSetSNES(Mat,SNES);
 
 PETSC_EXTERN PetscErrorCode SNESGetFunction(SNES,Vec*,PetscErrorCode (**)(SNES,Vec,Vec,void*),void**);
 PETSC_EXTERN PetscErrorCode SNESComputeFunction(SNES,Vec,Vec);
 
-PETSC_EXTERN PetscErrorCode SNESSetJacobian(SNES,Mat,Mat,PetscErrorCode (*)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void*);
-PETSC_EXTERN PetscErrorCode SNESGetJacobian(SNES,Mat*,Mat*,PetscErrorCode (**)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void**);
+PETSC_EXTERN PetscErrorCode SNESSetJacobian(SNES,Mat,Mat,PetscErrorCode (*)(SNES,Vec,Mat,Mat,void*),void*);
+PETSC_EXTERN PetscErrorCode SNESGetJacobian(SNES,Mat*,Mat*,PetscErrorCode (**)(SNES,Vec,Mat,Mat,void*),void**);
 PETSC_EXTERN PetscErrorCode SNESObjectiveComputeFunctionDefaultFD(SNES,Vec,Vec,void*);
-PETSC_EXTERN PetscErrorCode SNESComputeJacobianDefault(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
-PETSC_EXTERN PetscErrorCode SNESComputeJacobianDefaultColor(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN PetscErrorCode SNESComputeJacobianDefault(SNES,Vec,Mat,Mat,void*);
+PETSC_EXTERN PetscErrorCode SNESComputeJacobianDefaultColor(SNES,Vec,Mat,Mat,void*);
 PETSC_EXTERN PetscErrorCode SNESSetComputeInitialGuess(SNES,PetscErrorCode (*)(SNES,Vec,void*),void*);
-PETSC_EXTERN PetscErrorCode SNESSetPicard(SNES,Vec,PetscErrorCode (*)(SNES,Vec,Vec,void*),Mat,Mat,PetscErrorCode (*SNESJacobianFunction)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void*);
-PETSC_EXTERN PetscErrorCode SNESGetPicard(SNES,Vec*,PetscErrorCode (**)(SNES,Vec,Vec,void*),Mat*,Mat*,PetscErrorCode (**SNESJacobianFunction)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void**);
+PETSC_EXTERN PetscErrorCode SNESSetPicard(SNES,Vec,PetscErrorCode (*)(SNES,Vec,Vec,void*),Mat,Mat,PetscErrorCode (*)(SNES,Vec,Mat,Mat,void*),void*);
+PETSC_EXTERN PetscErrorCode SNESGetPicard(SNES,Vec*,PetscErrorCode (**)(SNES,Vec,Vec,void*),Mat*,Mat*,PetscErrorCode (**)(SNES,Vec,Mat,Mat,void*),void**);
 PETSC_EXTERN PetscErrorCode SNESSetInitialFunction(SNES,Vec);
 
 PETSC_EXTERN PetscErrorCode SNESSetObjective(SNES,PetscErrorCode (*)(SNES,Vec,PetscReal *,void*),void*);
 PETSC_EXTERN PetscErrorCode SNESTestLocalMin(SNES);
 
 /* Should this routine be private? */
-PETSC_EXTERN PetscErrorCode SNESComputeJacobian(SNES,Vec,Mat*,Mat*,MatStructure*);
+PETSC_EXTERN PetscErrorCode SNESComputeJacobian(SNES,Vec,Mat,Mat);
 
 PETSC_EXTERN PetscErrorCode SNESSetDM(SNES,DM);
 PETSC_EXTERN PetscErrorCode SNESGetDM(SNES,DM*);
 PETSC_EXTERN PetscErrorCode DMSNESGetFunction(DM,PetscErrorCode(**)(SNES,Vec,Vec,void*),void**);
 PETSC_EXTERN PetscErrorCode DMSNESSetNGS(DM,PetscErrorCode(*)(SNES,Vec,Vec,void*),void*);
 PETSC_EXTERN PetscErrorCode DMSNESGetNGS(DM,PetscErrorCode(**)(SNES,Vec,Vec,void*),void**);
-PETSC_EXTERN PetscErrorCode DMSNESSetJacobian(DM,PetscErrorCode(*)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void*);
-PETSC_EXTERN PetscErrorCode DMSNESGetJacobian(DM,PetscErrorCode(**)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void**);
-PETSC_EXTERN PetscErrorCode DMSNESSetPicard(DM,PetscErrorCode(*)(SNES,Vec,Vec,void*),PetscErrorCode(*)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void*);
-PETSC_EXTERN PetscErrorCode DMSNESGetPicard(DM,PetscErrorCode(**)(SNES,Vec,Vec,void*),PetscErrorCode(**)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void**);
+PETSC_EXTERN PetscErrorCode DMSNESSetJacobian(DM,PetscErrorCode(*)(SNES,Vec,Mat,Mat,void*),void*);
+PETSC_EXTERN PetscErrorCode DMSNESGetJacobian(DM,PetscErrorCode(**)(SNES,Vec,Mat,Mat,void*),void**);
+PETSC_EXTERN PetscErrorCode DMSNESSetPicard(DM,PetscErrorCode(*)(SNES,Vec,Vec,void*),PetscErrorCode(*)(SNES,Vec,Mat,Mat,void*),void*);
+PETSC_EXTERN PetscErrorCode DMSNESGetPicard(DM,PetscErrorCode(**)(SNES,Vec,Vec,void*),PetscErrorCode(**)(SNES,Vec,Mat,Mat,void*),void**);
 PETSC_EXTERN PetscErrorCode DMSNESSetObjective(DM,PetscErrorCode (*)(SNES,Vec,PetscReal *,void*),void*);
 PETSC_EXTERN PetscErrorCode DMSNESGetObjective(DM,PetscErrorCode (**)(SNES,Vec,PetscReal *,void*),void**);
 
 PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDASNESFunction)(DMDALocalInfo*,void*,void*,void*);
-PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDASNESJacobian)(DMDALocalInfo*,void*,Mat,Mat,MatStructure*,void*);
+PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDASNESJacobian)(DMDALocalInfo*,void*,Mat,Mat,void*);
 PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDASNESObjective)(DMDALocalInfo*,void*,PetscReal*,void*);
 
 PETSC_EXTERN PetscErrorCode DMDASNESSetFunctionLocal(DM,InsertMode,DMDASNESFunction,void*);
 PETSC_EXTERN PetscErrorCode DMDASNESSetJacobianLocal(DM,DMDASNESJacobian,void*);
 PETSC_EXTERN PetscErrorCode DMDASNESSetObjectiveLocal(DM,DMDASNESObjective,void*);
-PETSC_EXTERN PetscErrorCode DMDASNESSetPicardLocal(DM,InsertMode,PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*),PetscErrorCode (*)(DMDALocalInfo*,void*,Mat,Mat,MatStructure*,void*),void*);
+PETSC_EXTERN PetscErrorCode DMDASNESSetPicardLocal(DM,InsertMode,PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*),PetscErrorCode (*)(DMDALocalInfo*,void*,Mat,Mat,void*),void*);
 
 PETSC_EXTERN PetscErrorCode DMSNESSetFunctionLocal(DM,PetscErrorCode (*)(DM,Vec,Vec,void*),void*);
-PETSC_EXTERN PetscErrorCode DMSNESSetJacobianLocal(DM,PetscErrorCode (*)(DM,Vec,Mat,Mat,MatStructure*,void*),void*);
+PETSC_EXTERN PetscErrorCode DMSNESSetJacobianLocal(DM,PetscErrorCode (*)(DM,Vec,Mat,Mat,void*),void*);
 
 /* Routines for Multiblock solver */
 PETSC_EXTERN PetscErrorCode SNESMultiblockSetFields(SNES, const char [], PetscInt, const PetscInt *);

File include/petscsys.h

 typedef int PetscInt;
 #define MPIU_INT MPI_INT
 #endif
+#if defined(PETSC_HAVE_MPI_INT64_T)
+#  define MPIU_INT64 MPI_INT64_T
+#else
+#  define MPIU_INT64 MPI_LONG_LONG_INT
+#endif
 
 
 /*MC

File include/petsctao.h

 PETSC_EXTERN PetscErrorCode TaoSetObjectiveRoutine(Tao, PetscErrorCode(*)(Tao, Vec, PetscReal*,void*), void*);
 PETSC_EXTERN PetscErrorCode TaoSetGradientRoutine(Tao, PetscErrorCode(*)(Tao, Vec, Vec, void*), void*);
 PETSC_EXTERN PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao, PetscErrorCode(*)(Tao, Vec, PetscReal*, Vec, void*), void*);
-PETSC_EXTERN PetscErrorCode TaoSetHessianRoutine(Tao,Mat,Mat, PetscErrorCode(*)(Tao,Vec, Mat*, Mat*, MatStructure*, void*), void*);
+PETSC_EXTERN PetscErrorCode TaoSetHessianRoutine(Tao,Mat,Mat,PetscErrorCode(*)(Tao,Vec, Mat, Mat, void*), void*);
 PETSC_EXTERN PetscErrorCode TaoSetSeparableObjectiveRoutine(Tao, Vec, PetscErrorCode(*)(Tao, Vec, Vec, void*), void*);
 PETSC_EXTERN PetscErrorCode TaoSetConstraintsRoutine(Tao, Vec, PetscErrorCode(*)(Tao, Vec, Vec, void*), void*);
 PETSC_EXTERN PetscErrorCode TaoSetInequalityConstraintsRoutine(Tao, Vec, PetscErrorCode(*)(Tao, Vec, Vec, void*), void*);
 PETSC_EXTERN PetscErrorCode TaoSetEqualityConstraintsRoutine(Tao, Vec, PetscErrorCode(*)(Tao, Vec, Vec, void*), void*);
-PETSC_EXTERN PetscErrorCode TaoSetJacobianRoutine(Tao,Mat,Mat, PetscErrorCode(*)(Tao,Vec, Mat*, Mat*, MatStructure*, void*), void*);
-PETSC_EXTERN PetscErrorCode TaoSetJacobianStateRoutine(Tao,Mat,Mat,Mat, PetscErrorCode(*)(Tao,Vec, Mat*, Mat*, Mat*, MatStructure*, void*), void*);
-PETSC_EXTERN PetscErrorCode TaoSetJacobianDesignRoutine(Tao,Mat,PetscErrorCode(*)(Tao,Vec, Mat*, void*), void*);
-PETSC_EXTERN PetscErrorCode TaoSetJacobianInequalityRoutine(Tao,Mat,Mat,PetscErrorCode(*)(Tao,Vec, Mat*, Mat*, MatStructure*, void*), void*);
-PETSC_EXTERN PetscErrorCode TaoSetJacobianEqualityRoutine(Tao,Mat,Mat,PetscErrorCode(*)(Tao,Vec, Mat*, Mat*, MatStructure*, void*), void*);
+PETSC_EXTERN PetscErrorCode TaoSetJacobianRoutine(Tao,Mat,Mat, PetscErrorCode(*)(Tao,Vec, Mat, Mat, void*), void*);
+PETSC_EXTERN PetscErrorCode TaoSetJacobianStateRoutine(Tao,Mat,Mat,Mat, PetscErrorCode(*)(Tao,Vec, Mat, Mat, Mat, void*), void*);
+PETSC_EXTERN PetscErrorCode TaoSetJacobianDesignRoutine(Tao,Mat,PetscErrorCode(*)(Tao,Vec, Mat, void*), void*);
+PETSC_EXTERN PetscErrorCode TaoSetJacobianInequalityRoutine(Tao,Mat,Mat,PetscErrorCode(*)(Tao,Vec, Mat, Mat, void*), void*);
+PETSC_EXTERN PetscErrorCode TaoSetJacobianEqualityRoutine(Tao,Mat,Mat,PetscErrorCode(*)(Tao,Vec, Mat, Mat, void*), void*);
 
 PETSC_EXTERN PetscErrorCode TaoSetStateDesignIS(Tao, IS, IS);
 
 PETSC_EXTERN PetscErrorCode TaoIsGradientDefined(Tao,PetscBool*);
 PETSC_EXTERN PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao,PetscBool*);
 
-PETSC_EXTERN PetscErrorCode TaoComputeHessian(Tao, Vec, Mat*, Mat*, MatStructure*);
-PETSC_EXTERN PetscErrorCode TaoComputeJacobian(Tao, Vec, Mat*, Mat*, MatStructure*);
-PETSC_EXTERN PetscErrorCode TaoComputeJacobianState(Tao, Vec, Mat*, Mat*, Mat*, MatStructure*);
-PETSC_EXTERN PetscErrorCode TaoComputeJacobianEquality(Tao, Vec, Mat*, Mat*, MatStructure*);
-PETSC_EXTERN PetscErrorCode TaoComputeJacobianInequality(Tao, Vec, Mat*, Mat*, MatStructure*);
-PETSC_EXTERN PetscErrorCode TaoComputeJacobianDesign(Tao, Vec, Mat*);
+PETSC_EXTERN PetscErrorCode TaoComputeHessian(Tao, Vec, Mat, Mat);
+PETSC_EXTERN PetscErrorCode TaoComputeJacobian(Tao, Vec, Mat, Mat);
+PETSC_EXTERN PetscErrorCode TaoComputeJacobianState(Tao, Vec, Mat, Mat, Mat);
+PETSC_EXTERN PetscErrorCode TaoComputeJacobianEquality(Tao, Vec, Mat, Mat);
+PETSC_EXTERN PetscErrorCode TaoComputeJacobianInequality(Tao, Vec, Mat, Mat);
+PETSC_EXTERN PetscErrorCode TaoComputeJacobianDesign(Tao, Vec, Mat);
 
-PETSC_EXTERN PetscErrorCode TaoDefaultComputeHessian(Tao, Vec, Mat*, Mat*, MatStructure*, void*);
-
-PETSC_EXTERN PetscErrorCode TaoDefaultComputeHessianColor(Tao, Vec, Mat*, Mat*, MatStructure*, void*);
+PETSC_EXTERN PetscErrorCode TaoDefaultComputeHessian(Tao, Vec, Mat, Mat, void*);
+PETSC_EXTERN PetscErrorCode TaoDefaultComputeHessianColor(Tao, Vec, Mat, Mat, void*);
 PETSC_EXTERN PetscErrorCode TaoComputeDualVariables(Tao, Vec, Vec);
 PETSC_EXTERN PetscErrorCode TaoComputeDualVariables(Tao, Vec, Vec);
 PETSC_EXTERN PetscErrorCode TaoSetVariableBounds(Tao, Vec, Vec);

File include/petscts.h

 PETSC_EXTERN PetscErrorCode TSSetTimeStep(TS,PetscReal);
 
 PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*TSRHSFunction)(TS,PetscReal,Vec,Vec,void*);
-PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*TSRHSJacobian)(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*TSRHSJacobian)(TS,PetscReal,Vec,Mat,Mat,void*);
 PETSC_EXTERN PetscErrorCode TSSetRHSFunction(TS,Vec,TSRHSFunction,void*);
 PETSC_EXTERN PetscErrorCode TSGetRHSFunction(TS,Vec*,TSRHSFunction*,void**);
 PETSC_EXTERN PetscErrorCode TSSetRHSJacobian(TS,Mat,Mat,TSRHSJacobian,void*);
 PETSC_EXTERN PetscErrorCode TSSetForcingFunction(TS,PetscErrorCode (*)(TS,PetscReal,Vec,void*),void*);
 
 PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*TSIFunction)(TS,PetscReal,Vec,Vec,Vec,void*);
-PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*TSIJacobian)(TS,PetscReal,Vec,Vec,PetscReal,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*TSIJacobian)(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);
 PETSC_EXTERN PetscErrorCode TSSetIFunction(TS,Vec,TSIFunction,void*);
 PETSC_EXTERN PetscErrorCode TSGetIFunction(TS,Vec*,TSIFunction*,void**);
 PETSC_EXTERN PetscErrorCode TSSetIJacobian(TS,Mat,Mat,TSIJacobian,void*);
 PETSC_EXTERN PetscErrorCode TSGetIJacobian(TS,Mat*,Mat*,TSIJacobian*,void**);
 
 PETSC_EXTERN PetscErrorCode TSComputeRHSFunctionLinear(TS,PetscReal,Vec,Vec,void*);
-PETSC_EXTERN PetscErrorCode TSComputeRHSJacobianConstant(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN PetscErrorCode TSComputeRHSJacobianConstant(TS,PetscReal,Vec,Mat,Mat,void*);
 PETSC_EXTERN PetscErrorCode TSComputeIFunctionLinear(TS,PetscReal,Vec,Vec,Vec,void*);
-PETSC_EXTERN PetscErrorCode TSComputeIJacobianConstant(TS,PetscReal,Vec,Vec,PetscReal,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN PetscErrorCode TSComputeIJacobianConstant(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);
 PETSC_EXTERN PetscErrorCode TSComputeSolutionFunction(TS,PetscReal,Vec);
 PETSC_EXTERN PetscErrorCode TSComputeForcingFunction(TS,PetscReal,Vec);
 
 PETSC_EXTERN PetscErrorCode TSPythonSetType(TS,const char[]);
 
 PETSC_EXTERN PetscErrorCode TSComputeRHSFunction(TS,PetscReal,Vec,Vec);
-PETSC_EXTERN PetscErrorCode TSComputeRHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*);
+PETSC_EXTERN PetscErrorCode TSComputeRHSJacobian(TS,PetscReal,Vec,Mat,Mat);
 PETSC_EXTERN PetscErrorCode TSComputeIFunction(TS,PetscReal,Vec,Vec,Vec,PetscBool);
-PETSC_EXTERN PetscErrorCode TSComputeIJacobian(TS,PetscReal,Vec,Vec,PetscReal,Mat*,Mat*,MatStructure*,PetscBool);
+PETSC_EXTERN PetscErrorCode TSComputeIJacobian(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,PetscBool);
 PETSC_EXTERN PetscErrorCode TSComputeLinearStability(TS,PetscReal,PetscReal,PetscReal*,PetscReal*);
 
 PETSC_EXTERN PetscErrorCode TSVISetVariableBounds(TS,Vec,Vec);
 PETSC_EXTERN PetscErrorCode DMTSSetIJacobianSerialize(DM,PetscErrorCode (*)(void*,PetscViewer),PetscErrorCode (*)(void**,PetscViewer));
 
 PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDATSRHSFunctionLocal)(DMDALocalInfo*,PetscReal,void*,void*,void*);
-PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDATSRHSJacobianLocal)(DMDALocalInfo*,PetscReal,void*,Mat,Mat,MatStructure*,void*);
+PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDATSRHSJacobianLocal)(DMDALocalInfo*,PetscReal,void*,Mat,Mat,void*);
 PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDATSIFunctionLocal)(DMDALocalInfo*,PetscReal,void*,void*,void*,void*);
-PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDATSIJacobianLocal)(DMDALocalInfo*,PetscReal,void*,void*,PetscReal,Mat,Mat,MatStructure*,void*);
+PETSC_EXTERN_TYPEDEF typedef PetscErrorCode (*DMDATSIJacobianLocal)(DMDALocalInfo*,PetscReal,void*,void*,PetscReal,Mat,Mat,void*);
 
 PETSC_EXTERN PetscErrorCode DMDATSSetRHSFunctionLocal(DM,InsertMode,PetscErrorCode (*)(DMDALocalInfo*,PetscReal,void*,void*,void*),void *);
-PETSC_EXTERN PetscErrorCode DMDATSSetRHSJacobianLocal(DM,PetscErrorCode (*)(DMDALocalInfo*,PetscReal,void*,Mat,Mat,MatStructure*,void*),void *);
+PETSC_EXTERN PetscErrorCode DMDATSSetRHSJacobianLocal(DM,PetscErrorCode (*)(DMDALocalInfo*,PetscReal,void*,Mat,Mat,void*),void *);
 PETSC_EXTERN PetscErrorCode DMDATSSetIFunctionLocal(DM,InsertMode,PetscErrorCode (*)(DMDALocalInfo*,PetscReal,void*,void*,void*,void*),void *);
-PETSC_EXTERN PetscErrorCode DMDATSSetIJacobianLocal(DM,PetscErrorCode (*)(DMDALocalInfo*,PetscReal,void*,void*,PetscReal,Mat,Mat,MatStructure*,void*),void *);
+PETSC_EXTERN PetscErrorCode DMDATSSetIJacobianLocal(DM,PetscErrorCode (*)(DMDALocalInfo*,PetscReal,void*,void*,PetscReal,Mat,Mat,void*),void *);
 
 typedef struct _n_TSMonitorLGCtx*  TSMonitorLGCtx;
 typedef struct {
 PETSC_EXTERN PetscErrorCode TSGetDM(TS,DM*);
 
 PETSC_EXTERN PetscErrorCode SNESTSFormFunction(SNES,Vec,Vec,void*);
-PETSC_EXTERN PetscErrorCode SNESTSFormJacobian(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
+PETSC_EXTERN PetscErrorCode SNESTSFormJacobian(SNES,Vec,Mat,Mat,void*);
 
 #endif
 # usage make allrcslabel NEW_RCS_LABEL=v_2_0_28
 allrcslabel:
 	-@${OMAKE} PETSC_ARCH=${PETSC_ARCH} NEW_RCS_LABEL=${NEW_RCS_LABEL} ACTION=rcslabel  alltree
+
+########################
+#
+# Create the include dependency graph (requires graphviz to be available)
+#
+includegraph:
+	-@${PETSC_DIR}/src/contrib/style/include-graph.sh includegraph.pdf
+	-@echo Include dependency graph written to includegraph.pdf
+
 #
 # -------------------------------------------------------------------------------
 #
 	sed "s/_$$//" | sort > /tmp/countfortranfunctions
 
 countcfunctions:
-	-@grep extern ${PETSC_DIR}/include/*.h  | grep "(" | tr -s ' ' | \
+	-@grep PETSC_EXTERN ${PETSC_DIR}/include/*.h  | grep "(" | tr -s ' ' | \
 	cut -d'(' -f1 | cut -d' ' -f3 | grep -v "\*" | tr -s '\012' |  \
-	tr 'A-Z' 'a-z' |  sort > /tmp/countcfunctions
+	tr 'A-Z' 'a-z' |  sort | uniq > /tmp/countcfunctions
 
 difffortranfunctions: countfortranfunctions countcfunctions
 	-@echo -------------- Functions missing in the fortran interface ---------------------

File src/contrib/fun3d/comp/flow.c

   PetscBool PreLoading;
 } AppCtx;                                      /*============================*/
 
-PetscErrorCode FormJacobian(SNES,Vec,Mat*,Mat*,MatStructure*,void*),
+PetscErrorCode FormJacobian(SNES,Vec,Mat,Mat,void*),
     FormFunction(SNES,Vec,Vec,void*),
     FormInitialGuess(SNES, GRID*),
     Monitor(SNES,PetscInt,double,void*),
 /*---------------------------------------------------------------------*/
 /* --------------------  Evaluate Jacobian F'(x) -------------------- */
 
-int FormJacobian(SNES snes, Vec x, Mat *Jac, Mat *B,MatStructure *flag, void *dummy)
+int FormJacobian(SNES snes, Vec x, Mat Jac, Mat jac,void *dummy)
 /*---------------------------------------------------------------------*/
 {
   AppCtx         *user  = (AppCtx*) dummy;
   GRID           *grid  = user->grid;
   TstepCtx       *tsCtx = user->tsCtx;
-  Mat            jac    = *B;
   Vec            localX = grid->qnodeLoc;
   PetscScalar    *qnode;
   PetscErrorCode ierr;
   /*ierr = PetscFortranObjectToCObject(ijac, &jac);CHKERRQ(ierr);*/
   /*ierr = MatView(jac,VIEWER_STDOUT_SELF);CHKERRQ(ierr);*/
   ierr  = VecRestoreArray(localX,&qnode);CHKERRQ(ierr);
-  ierr  = MatAssemblyBegin(*Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  ierr  = MatAssemblyEnd(*Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  *flag = SAME_NONZERO_PATTERN;
+  ierr  = MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
+  ierr  = MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   return 0;
 }
 
   PetscInt         nsnodeLoc, nvnodeLoc, nfnodeLoc;
   PetscInt         nnbound, nvbound, nfbound;
   PetscInt         bs = 5;
-  PetscInt         fdes;
+  PetscInt         fdes = 0;
   off_t       currentPos  = 0, newPos = 0;
   PetscInt         grid_param  = 13;
   PetscInt         cross_edges = 0;
   if (!flg) {
     ierr = PetscSortIntWithPermutation(nedgeLoc,tmp,eperm);CHKERRQ(ierr);
   }
-  ierr = PetscMallocValidate(__LINE__,__FUNCT__,__FILE__,0);CHKERRQ(ierr);
+  ierr = PetscMallocValidate(__LINE__,__FUNCT__,__FILE__);CHKERRQ(ierr);
   k = 0;
   for (i = 0; i < nedgeLoc; i++) {
 #if defined(INTERLACING)

File src/contrib/fun3d/incomp/flow.c

   PetscBool PreLoading;
 } AppCtx;                                      /*============================*/
 
-extern int  FormJacobian(SNES,Vec,Mat*,Mat*,MatStructure*,void*),
+extern int  FormJacobian(SNES,Vec,Mat,Mat,void*),
             FormFunction(SNES,Vec,Vec,void*),
             FormInitialGuess(SNES,GRID*),
             Update(SNES,void*),
 
 #undef __FUNCT__
 #define __FUNCT__ "FormJacobian"
-int FormJacobian(SNES snes,Vec x,Mat *Jac,Mat *B,MatStructure *flag,void *dummy)
+int FormJacobian(SNES snes,Vec x,Mat Jac,Mat pc_mat,void *dummy)
 /*---------------------------------------------------------------------*/
 {
   AppCtx      *user  = (AppCtx*) dummy;
   GRID        *grid  = user->grid;
   TstepCtx    *tsCtx = user->tsCtx;
-  Mat         pc_mat = *B;
   Vec         localX = grid->qnodeLoc;
   PetscScalar *qnode;
   int         ierr;
             grid->area,grid->xyzn,&tsCtx->cfl,
            &rank,&grid->nvertices);
   ierr  = VecRestoreArray(localX,&qnode);CHKERRQ(ierr);
-  ierr  = MatAssemblyBegin(*Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  ierr  = MatAssemblyEnd(*Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  *flag = SAME_NONZERO_PATTERN;
+  ierr  = MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
+  ierr  = MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 #if defined(MATRIX_VIEW)
   if ((tsCtx->itstep != 0) &&(tsCtx->itstep % tsCtx->print_freq) == 0) {
     PetscViewer viewer;

File src/contrib/style/include-graph.sh

 #
 
 # Set labels
-for f in `ls include/*.{h,hh}`
+for f in `ls include/*.h`
 do
   f2=${f#include/}
   f3=${f2/%.hh/2}
 # Set connections
 echo "  " >> $dotfilename
 echo "  //Connections to petsc-private:" >> $dotfilename
-for f in `ls include/*.{h,hh}`
+for f in `ls include/*.h`
 do
   f2=${f#include/}
   f3=${f2/%.hh/2}

File src/dm/impls/plex/plexcgns.c

         ierr = cg_ElementDataSize(cgid, 1, z, 1, &elementDataSize);CHKERRQ(ierr);
         ierr = PetscMalloc1(elementDataSize, &elements);CHKERRQ(ierr);
         ierr = cg_elements_read(cgid, 1, z, 1, elements, NULL);CHKERRQ(ierr);
-        for (c_loc = start, off = 0; c_loc < end; ++c_loc, ++c) {
+        for (c_loc = start, off = 0; c_loc <= end; ++c_loc, ++c) {
           switch (elements[off]) {
           case TRI_3:   numCorners = 3;break;
           case QUAD_4:  numCorners = 4;break;
         case HEXA_8:  numCorners = 8;break;
         default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid cell type %d", (int) cellType);
         }
-        for (c_loc = start; c_loc < end; ++c_loc, ++c) {
+        for (c_loc = start; c_loc <= end; ++c_loc, ++c) {
           ierr = DMPlexSetConeSize(*dm, c, numCorners);CHKERRQ(ierr);
         }
       }
       ierr = cg_elements_read(cgid, 1, z, 1, elements, NULL);CHKERRQ(ierr);
       if (cellType == MIXED) {
         /* CGNS uses Fortran-based indexing, sieve uses C-style and numbers cell first then vertices. */
-        for (c_loc = 0, v = 0; c_loc < numc; ++c_loc, ++c) {
+        for (c_loc = 0, v = 0; c_loc <= numc; ++c_loc, ++c) {
           switch (elements[v]) {
           case TRI_3:   numCorners = 3;break;
           case QUAD_4:  numCorners = 4;break;
             cone[v_loc] = elements[v]+numCells-1;
           }
           /* Tetrahedra are inverted */
-          if (cellType == TETRA_4) {
+          if (elements[v] == TETRA_4) {
             PetscInt tmp = cone[0];
             cone[0] = cone[1];
             cone[1] = tmp;
           }
           /* Hexahedra are inverted */
-          if (cellType == HEXA_8) {
+          if (elements[v] == HEXA_8) {
             PetscInt tmp = cone[1];
             cone[1] = cone[3];
             cone[3] = tmp;
         }
 
         /* CGNS uses Fortran-based indexing, sieve uses C-style and numbers cell first then vertices. */
-        for (c_loc = 0, v = 0; c_loc < numc; ++c_loc, ++c) {
+        for (c_loc = 0, v = 0; c_loc <= numc; ++c_loc, ++c) {
           for (v_loc = 0; v_loc < numCorners; ++v_loc, ++v) {
             cone[v_loc] = elements[v]+numCells-1;
           }
   if (!rank) {
     PetscInt off = 0;
     float   *x[3];
-    int      z, c, d;
+    int      z, d;
 
     ierr = PetscMalloc3(numVertices,&x[0],numVertices,&x[1],numVertices,&x[2]);CHKERRQ(ierr);
-    for (z = 1, c = 0; z <= nzones; ++z) {
+    for (z = 1; z <= nzones; ++z) {
       DataType_t datatype;
       cgsize_t   sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */
       cgsize_t   range_min[3] = {1, 1, 1};
       ierr = cg_ncoords(cgid, 1, z, &ncoords);CHKERRQ(ierr);
       if (ncoords != dim) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"CGNS file must have a coordinate array for each dimension, not %d\n",ncoords);
       for (d = 0; d < dim; ++d) {
-        ierr = cg_coord_info(cgid, 1, z, 1, &datatype, buffer);CHKERRQ(ierr);
+        ierr = cg_coord_info(cgid, 1, z, 1+d, &datatype, buffer);CHKERRQ(ierr);
         ierr = cg_coord_read(cgid, 1, z, buffer, RealSingle, range_min, range_max, x[d]);CHKERRQ(ierr);
       }
       if (dim > 0) {

File src/docs/doxygen/manual/user/introduction/getting-started.h

 with the following sequence of commands:
 \code
   KSPCreate(MPI_Comm comm,KSP *ksp);
-  KSPSetOperators(KSP ksp,Mat A,Mat PrecA,MatStructure flag);
+  KSPSetOperators(KSP ksp,Mat A,Mat PrecA);
   KSPSetFromOptions(KSP ksp);
   KSPSolve(KSP ksp,Vec b,Vec x);
   KSPDestroy(KSP ksp);

File src/docs/tao_tex/manual/part1.tex

         PetscErrorCode (*FormFGradient)(Tao,Vec,PetscReal*,Vec,void*), 
         void *user);
    TaoSetHessianRoutine(Tao tao, Mat H, Mat Hpre,
-        PetscErrorCode (*FormHessian)(Tao,Vec,Mat*,Mat*,MatStructure*,
+        PetscErrorCode (*FormHessian)(Tao,Vec,Mat,Mat,
         void*), void *user);
    TaoSolve(Tao tao);
    TaoDestroy(Tao tao);

File src/docs/tao_tex/manual/part2.tex

 Some optimization routines also require a Hessian matrix from the user.
 The routine that evaluates the Hessian should have the form 
 \begin{verbatim}
-   PetscErrorCode EvaluateHessian(Tao,Vec,Mat*,Mat*,MatStructure*,
+   PetscErrorCode EvaluateHessian(Tao,Vec,Mat,Mat,MatStructure*,
                      void*);
 \end{verbatim}
 where the first argument of this routine is a TAO solver object.  The
 \findex{TaoSetHessianRoutine()}
 \begin{verbatim}
    TaoSetHessianRoutine(Tao,Mat H, Mat Hpre,
-                     PetscErrorCode (*)(Tao,Vec,Mat*,Mat*,
-                     MatStructure*,void*), void *);
+                     PetscErrorCode (*)(Tao,Vec,Mat,Mat,
+                     void*), void *);
 \end{verbatim}
 routine. 
 The first argument is the TAO Solver object. The second and third arguments
 variable.  The evaluation of the Jacobian of $c$ should be performed 
 by calling the 
 \begin{verbatim}
-   PetscErrorCode JacobianState(Tao,Vec,Mat*,Mat*,Mat*,
+   PetscErrorCode JacobianState(Tao,Vec,Mat,Mat,Mat,
                      MatStructure*, void*);
    PetscErrorCode JacobianDesign(Tao,Vec,Mat*,void*);
 \end{verbatim}
 with TAO by using the 
 \begin{verbatim}
    TaoSetJacobianStateRoutine(Tao,Mat,Mat,Mat,
-                     PetscErrorCode (*)(Tao,Vec,Mat*,Mat*,
-                     MatStructure*,void*), void*);
+                     PetscErrorCode (*)(Tao,Vec,Mat,Mat,
+                     void*), void*);
    TaoSetJacobianDesignRoutine(Tao,Mat,
                      PetscErrorCode (*)(Tao,Vec,Mat*,void*), 
                      void*);
 The computation of the Jacobian of the separable objective routine 
 should be in a routine that looks like
 \begin{verbatim}
-   PetscErrorCode EvaluateJacobian(Tao,Vec,Mat*,Mat*,MatStructure*,
+   PetscErrorCode EvaluateJacobian(Tao,Vec,Mat,Mat,MatStructure*,
                      void*);
 \end{verbatim}
 This function can be registered with TAO by using the function
 \begin{verbatim}
    TaoSetJacobianRoutine(Tao,Mat J, Mat Jpre,
-                     PetscErrorCode (*)(Tao,Vec,Mat*,Mat*,
-                     MatStructure*,void*), void *);
+                     PetscErrorCode (*)(Tao,Vec,Mat,Mat,
+                     void*), void *);
 \end{verbatim}
 The first argument is the TAO solver object, the second and third arguments
 are the Mat object where the Jacobian will be stored and the Mat object
 The evaluation of the Jacobian of $C$ should be performed in a routine
 of the form
 \begin{verbatim}
-   PetscErrorCode EvaluateJacobian(Tao,Vec,Mat*,Mat*,MatStructure*,
+   PetscErrorCode EvaluateJacobian(Tao,Vec,Mat,Mat,MatStructure*,
                      void*);
 \end{verbatim}
 \noindent
 \findex{TaoAppSetJacobianRoutine()}
 \begin{verbatim}
    TaoSetJacobianRoutine(Tao,Mat J, Mat Jpre,
-                     PetscErrorCode (*)(Tao,Vec,Mat*,Mat*, 
-                     MatStructure*,void*), void*);
+                     PetscErrorCode (*)(Tao,Vec,Mat,Mat, 
+                     void*), void*);
 \end{verbatim}
 \noindent
 command.

File src/docs/tao_tex/manual/rosenbrock1.c

 
 /* -------------- User-defined routines ---------- */
 PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*);
-PetscErrorCode FormHessian(Tao,Vec,Mat*,Mat*,MatStructure*,void*);
+PetscErrorCode FormHessian(Tao,Vec,Mat,Mat,void*);
 
 int main(int argc,char **argv)
 {

File src/docs/tex/manual/part1.tex

 with the following sequence of commands:
 \begin{tabbing}
   KSPCreate(MPI\_Comm comm,KSP *ksp); \\
-  KSPSetOperators(KSP ksp,Mat Amat,Mat Pmat,MatStructure flag);\\
+  KSPSetOperators(KSP ksp,Mat Amat,Mat Pmat);\\
   KSPSetFromOptions(KSP ksp);\\
   KSPSolve(KSP ksp,Vec b,Vec x);\\
   KSPDestroy(KSP ksp);

File src/docs/tex/manual/part2.tex

 the following routine to set the matrices associated with the linear
 system:
 \begin{tabbing}
-  KSPSetOperators(KSP ksp,Mat Amat,Mat Pmat,MatStructure flag);
+  KSPSetOperators(KSP ksp,Mat Amat,Mat Pmat);
 \end{tabbing}
 The argument \trl{Amat}, representing the matrix that defines the
 linear system, is a symbolic place holder for any kind of matrix.
 occasionally these matrices differ (for instance, \sindex{preconditioning}
 when a preconditioning matrix is obtained from a lower order method than
 that employed to form the linear system matrix).
-The argument \trl{flag} can be used to eliminate unnecessary work when
-repeatedly solving linear systems of the same size with the same
-preconditioning method; when solving just one linear system, this flag is
-ignored.  The user can set \trl{flag} as follows:
-\begin{itemize}
-\item \trl{SAME_NONZERO_PATTERN} - the preconditioning matrix has the
-    same \findex{SAME_NONZERO_PATTERN} nonzero structure during successive
-    linear solves,
-\item \trl{DIFFERENT_NONZERO_PATTERN} - the preconditioning matrix does
-     not have the same nonzero structure during successive linear solves,
-   \findex{DIFFERENT_NONZERO_PATTERN}
-\item \trl{SAME_PRECONDITIONER} - the preconditioner matrix is identical
-   to that of the previous linear solve. \findex{SAME_PRECONDITIONER}
-\end{itemize}
-If the structure of a matrix is not known a priori, one should use
-the flag \trl{DIFFERENT_NONZERO_PATTERN}.
 
 Much of the power of KSP can be accessed through the single routine
 \begin{tabbing}
 as is typically done with
 \begin{tabbing}
   SNESSetJacobian(SNES snes,Mat Amat,Mat Pmat,PetscErrorCode (*FormJacobian)(SNES snes,\\
-          Vec x,Mat *A,Mat *B,MatStructure *flag,void *ctx),void *ctx);
+          Vec x,Mat A,Mat B,MatStructure *flag,void *ctx),void *ctx);
 \end{tabbing}
 The arguments of the routine \trl{
 FormJacobian()} are the current iterate, \trl{x}; the (approximate) Jacobian matrix,
 Section~\ref{sec_othermat} for details on the reuse of the matrix
 context.
 
-If the preconditioning matrix retains identical nonzero structure
-during successive nonlinear iterations, setting the parameter, \trl{flag},
-in the \trl{FormJacobian()} routine to be \trl{SAME_NONZERO_PATTERN}
-\findex{SAME_NONZERO_PATTERN} and reusing the matrix context can save
-considerable overhead.  For example, when one is using a parallel
-preconditioner such as incomplete factorization in solving the
-linearized Newton systems for such problems, matrix colorings and
-communication patterns can be determined a single time and then reused
-repeatedly throughout the solution process.  In addition, if using
-different matrices for the actual Jacobian and the preconditioner, the
-user can hold the preconditioner matrix fixed for multiple iterations
-by setting \trl{flag} to \trl{SAME_PRECONDITIONER}.  See the
-discussion of KSPSetOperators() in Section~\ref{sec_usingsles} for
-details.
-
 The directory \trl{${PETSC_DIR}/src/snes/examples/tutorials} provides
 a variety of examples.
 
   If using a fully implicit or semi-implict (IMEX) method one also can provide an appropriate (approximate) Jacobian matrix of $F()$.
   \begin{tabbing}
     TS\=SetIJacobian(TS ts,Mat A,Mat B,\\
-    \>PetscErrorCode (*fjac)(TS,PetscReal,Vec,Vec,PetscReal,Mat*,Mat*,MatStructure*,void*),void *jacP);
+    \>PetscErrorCode (*fjac)(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,MatStructure*,void*),void *jacP);
   \end{tabbing}
   The arguments for the function \trl{fjac()}
   are the timestep contex, current time, input state $u$, input
   If using a fully implicit method and the function $ G() $ is provided, one also can provide an appropriate (approximate) Jacobian matrix of $G()$.
   \begin{tabbing}
     TS\=SetRHSJacobian(TS ts,Mat A,Mat B,\\
-    \>PetscErrorCode (*fjac)(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*),void *jacP);
+    \>PetscErrorCode (*fjac)(TS,PetscReal,Vec,Mat,Mat,MatStructure*,void*),void *jacP);
   \end{tabbing}
   The arguments for the function \trl{fjac()}
   are the timestep contex, current time, input state $u$, matrix $A$, preconditioning matrix
 \item Provide the (approximate) Jacobian matrix of \trl{G(u)} and a
 function to compute it at each Newton iteration. This is done with the command
 \begin{tabbing}
- TSSetRHSJacobian(\=TS ts,Mat Amat, Mat Pmat,PetscErrorCode (*f)(TS,double,Vec,Mat*,Mat*,\\
+ TSSetRHSJacobian(\=TS ts,Mat Amat, Mat Pmat,PetscErrorCode (*f)(TS,double,Vec,Mat,Mat,\\
                   \>        MatStructure*,void*),void *fP);
 \end{tabbing}
 The  arguments for the function \trl{f()} are

File src/docs/website/documentation/changes/dev.html

       <h4>KSP:</h4>
       <ul>
         <li><tt>KSPSkipConverged()</tt> renamed to <tt>KSPConvergedSkip()</tt>.</li>
+        <li><tt>KSPSetOperators()</tt> no longer has the <tt>MatStructure</tt> argument. The Mat objects now track that information themselves. Use <tt>KPS/PCSetReusePreconditioner()</tt> to prevent the recomputation of the preconditioner if the operator changed in the way that <tt>SAME_PRECONDITIONER</tt> did with <tt>KSPSetOperators()</tt></li>
         <li><tt>KSPDefaultConverged()</tt>, <tt>KSPDefaultConvergedDestroy()</tt>, <tt>KSPDefaultConvergedCreate()</tt>, <tt>KSPDefaultConvergedSetUIRNorm()</tt>, and <tt>KSPDefaultConvergedSetUMIRNorm()</tt> are now <tt>KSPConvergedDefault()</tt>, <tt>KSPConvergedDefaultDestroy()</tt>, <tt>KSPConvergedDefaultCreate()</tt>, <tt>KSPConvergedDefaultSetUIRNorm()</tt>, and <tt>KSPConvergedDefaultSetUMIRNorm()</tt>. for consistency.</li>
       </ul>
       <h4>SNES:</h4>
       <ul>
+        <li>The matrix arguments to the user functions provided with <tt>SNESSetJacobian()</tt> and  <tt>SNESSetPicard()</tt> are now Mat not Mat*.</li>
+        <li>The <tt>MatStructure</tt> argument to the user functions provided with <tt>SNESSetJacobian()</tt> and  <tt>SNESSetPicard()</tt> are gone.</li>
         <li><tt>SNESSetInitialFunctionNorm()</tt> is removed as it's not necessary given that the norm is cached on the Vec.  Use only <tt>SNESSetInitialFunction()</tt>.</li>
         <li><tt>SNESSkipConverged()</tt> renamed to <tt>SNESConvergedSkip()</tt>.</li>
         <li><tt>GS</tt> and _gs used for nonlinear Gauss-Sidel changed to <tt>NGS</tt> and _ngs to match all the other nonlinear solver names.</li>
       </ul>
       <h4>SNESLineSearch:</h4>
       <h4>TS:</h4>
+      <ul>
+        <li>The matrix arguments to the user functions provided with <tt>TSSetRHSJacobian()</tt> and  <tt>TSSetIJacobian()</tt> are now Mat not Mat*.</li>
+        <li>The <tt>MatStructure</tt> argument to the user functions provided with <tt>TSSetRHSJacobian()</tt> and  <tt>TSSetIJacobian()</tt> are gone.</li>
+      </ul>
       <h4>DM/DA:</h4>
       <ul>
         <li>The MatType argument is removed from DMCreateMatrix(), you can use DMSetMatType() to indicate the type you want used with a DM, defaults to MATAIJ</li>

File src/docs/website/documentation/faq.html

 
       <h3><a name="gpus">Can PETSc use GPUs to speedup computations?</a></h3>
 
-      PETSc-dev has some support for running portions of the computation on
-      Nvidia GPUs. See <a href="http://www.mcs.anl.gov/petsc/features/gpus.html">PETSc GPUs</a> for
-      more information. PETSc has a Vec class VECCUSP that performs almost all
-      the vector operations on the GPU. The Mat class MATCUSP performs
-      matrix-vector products on the GPU but does not have matrix assembly on the
+      The <a href="https://bitbucket.org/petsc/petsc">PETSc developer repository</a> has some support for running portions of the computation on
+      GPUs. See <a href="http://www.mcs.anl.gov/petsc/features/gpus.html">PETSc GPUs</a> for
+      more information. PETSc has Vec classes VECCUSP and VECVIENNACL which perform almost all
+      the vector operations on the GPU. The Mat classes AIJCUSP and AIJVIENNACL perform
+      matrix-vector products on the GPU but do not have matrix assembly on the
       GPU yet. Both of these classes run in parallel with MPI. All KSP methods,
       except KSPIBCGS, run all their vector operations on the GPU thus, for
       example Jacobi preconditioned Krylov methods run completely on the GPU.

File src/docs/website/documentation/installation.html

             Thrust has been included in CUDA since the 4.0 release - so a separate install is not needed.
           </li>
 
-          <li>Currently using CUDA-4.1 or CUDA-4.2 with Cusp-v0.3.1</li>
+          <li>Currently using CUDA-4.1 or CUDA-4.2 with Cusp-v0.3.1 or CUDA-5 with Cusp-v0.4.0.</li>
 
           <li>
             On Linux - make sure you have  compatible <a href="http://developer.nvidia.com/cuda-downloads"> NVidia driver</a>
 
       <hr>
 
+      <div>
+        <h3><a name="cuda">Installing PETSc to use GPUs and accelerators via OpenCL (NVIDIA, AMD, Intel MIC)</a></h3>
+
+        <ul>
+          <li>
+            Requires <a href="http://viennacl.sourceforge.net/">ViennaCL</a> and the OpenCL shared library, which is shipped in the vendor graphics driver.
+            Make sure the OpenCL headers are available on your system by e.g. downloading them from the <a href="https://www.khronos.org/opencl/">Khronos Group</a> directly.
+            Package managers on Linux provide these headers through a package named 'opencl-headers' or similar.
+          </li>
+
+          <li>
+            Always make sure you have the latest GPU driver installed. There are several known issues with older driver versions.
+          </li>
+
+          <li>check <code>config/examples/arch-viennacl.py</code> for example usage.</li>
+        </ul>
+
+        <p>
+          OpenCL/ViennaCL builds of PETSc currently work on Apple OSX, Linux, and Microsoft Windows. OpenCL is less a burden on the build system than CUDA, so installation is often easier and works with any host compiler.
+        </p>
+
+        <a href="#" target="_top">Return to Installation Instructions</a>
+      </div> <!-- #opencl -->
+
+      <hr>
+
       <div id="threads">
         <h3><a name="threads">Installing PETSc to use Threads</a></h3>
 	<p>

File src/docs/website/features/details.html

             <li><span class="current">Component Details</span></li>
             <li><a href="diagram.html">Diagram</a></li>
             <li><a href="gpus.html">GPUs</a></li>
-            <li><a href="threads.html">Theads</a></li>
+            <li><a href="threads.html">Threads</a></li>
           </ul>
         </li>
         <li><a href="../documentation/index.html">Documentation</a></li>

File src/docs/website/features/diagram.html

             <li><a href="details.html">Component Details</a></li>
             <li><span class="current">Diagram</span></li>
             <li><a href="gpus.html">GPUs</a></li>
-            <li><a href="threads.html">Theads</a></li>
+            <li><a href="threads.html">Threads</a></li>
           </ul>
         </li>
         <li><a href="../documentation/index.html">Documentation</a></li>

File src/docs/website/features/gpus.html

   <head>
     <meta http-equiv="content-type" content="text/html;charset=utf-8">
     <link href="../style.css" rel="stylesheet" type="text/css">
-    <title>PETSc: Features: Nvidia GPU support</title>
+    <title>PETSc: Features: GPU support</title>
   </head>
   <body>
 
     </div>
 
     <div id="header">
-      <h1>Features: Nvidia GPU support</h1>
+      <h1>Features: GPU support</h1>
     </div>
 
     <hr>
             <li><a href="details.html">Component Details</a></li>
             <li><a href="diagram.html">Diagram</a></li>
             <li><span class="current">GPUs</span></li>
-            <li><a href="threads.html">Theads</a></li>
+            <li><a href="threads.html">Threads</a></li>
           </ul>
         </li>
         <li><a href="../documentation/index.html">Documentation</a></li>
     <div id="main">
 
       <p>
-        PETSc algebraic solvers now run on Nvidia GPU systems. This sometimes provides an
+        PETSc algebraic solvers now run on GPU systems from NVIDIA and AMD. This sometimes provides an
         alternative high-performance, low-cost solution technique.
       </p>
 
       </span>
 
       <ul>
-        <li><a href="http://www.mcs.anl.gov/petsc/documentation/installation.html#CUDA">Installing PETSc to use the Nvidia GPUs</a></li>
+        <li><a href="http://www.mcs.anl.gov/petsc/documentation/installation.html#CUDA">Installing PETSc to use NVIDIA GPUs (CUDA)</a></li>
+        <li><a href="http://www.mcs.anl.gov/petsc/documentation/installation.html#OpenCL">Installing PETSc to use GPUs independent of the vendor (OpenCL)</a></li>
         <li><a href="gpus.pdf">How the GPU solvers are implemented in PETSc</a></li>
         <li><a href="http://www.mcs.anl.gov/petsc/petsc-dev/src/snes/examples/tutorials/ex47cu.cu.html">Example that uses CUSP directly in the user function evaluation</a></li>
         <li><a href="http://vimeo.com/18768859">Presentation on some aspects of GPU usage from PETSc</a></li>
 
         <li>
-          Quick summary of usage:
+          Quick summary of usage with CUDA (provided by the <a href="http://cusplibrary.github.io/" title="CUSP library">CUSP library</a>):
           <ul>
             <li>
               The VecType <code>VECSEQCUSP</code>, <code>VECMPICUSP</code>, or
               If you are creating the vectors and matrices with a
               DM, you can use -dm_vec_type cusp and -dm_mat_type aijcusp
             </li>
+          </ul>
+        </li>
+
+        <li>
+          Quick summary of usage with OpenCL (provided by the <a href="http://viennacl.sourceforge.net/" title="ViennaCL">ViennaCL library</a>):
+          <ul>
+            <li>
+              The VecType <code>VECSEQVIENNACL</code>, <code>VECMPIVIENNACL</code>, or
+              <code>VECVIENNACL</code> may be used with <code>VecSetType()</code> or
+              -vec_type seqviennacl,mpiviennacl, or viennacl when <code>VecSetFromOptions()</code> is used.
+            </li>
 
             <li>
+              The MatType <code>MATSEQAIJVIENNACL</code>, <code>MATMPIAIJVIENNACL</code>, or
+              <code>MATAIJVIENNACL</code> maybe used with MatSetType or -mat_type
+              seqaijviennacl,mpiaijviennacl, or aijviennacl when <code>MatSetOptions()</code>
+              is used.
+            </li>
+
+            <li>
+              If you are creating the vectors and matrices with a
+              DM, you can use -dm_vec_type viennacl and -dm_mat_type aijviennacl
+            </li>
+          </ul>
+        </li>
+
+        <li>
+          General hints:
+          <ul>
+            <li>
               It is useful to develop your code with the default
               vectors and then run production runs with the command
               line options to use the GPU since debugging on GPUs is difficult.

File src/docs/website/index.html

         structures and routines for the scalable (parallel) solution of
         scientific applications modeled by partial differential equations. It
         supports MPI, <a href="features/threads.html">shared memory pthreads</a>,
-        and <a href="features/gpus.html">NVIDIA GPUs</a>, as well as hybrid
+        and <a href="features/gpus.html">GPUs through CUDA or OpenCL</a>, as well as hybrid
         MPI-shared memory pthreads or MPI-GPU parallelism.
       </p>
 

File src/docs/website/miscellaneous/external.html

 
         <li>BLAS and LAPACK</li>
 
+
+        <li><a href="http://cusplibrary.github.io/"> CUSP</a> - a C++ templated sparse matrix library for CUDA GPUs.</li>
+
         <li><a href="http://www.cs.sandia.gov/CRF/chac.html"> Chaco</a> - a graph partitioning package.</li>
 
         <li>ESSL - IBM's math library for fast sparse direct LU factorization.</li>
           - sparse direct solvers, developed by Timothy A. Davis.
         </li>
 
+        <li>
+          <a href="http://viennacl.sourceforge.net/">ViennaCL</a>
+          - Linear algebra library providing matrix and vector operations using OpenMP, CUDA, and OpenCL.
+        </li>
+
       </ul>
 
       <p> PETSc contains modifications of routines from</p>

File src/ksp/ksp/examples/tests/ex1.c

 
   /* solve linear system */
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
-  ierr = KSPSetOperators(ksp,C,C,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
+  ierr = KSPSetOperators(ksp,C,C);CHKERRQ(ierr);
   ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
   ierr = KSPSolve(ksp,b,u);CHKERRQ(ierr);
 

File src/ksp/ksp/examples/tests/ex10.c

 
   /* Solve linear system */
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
-  ierr = KSPSetOperators(ksp,mat,mat,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
+  ierr = KSPSetOperators(ksp,mat,mat);CHKERRQ(ierr);
   ierr = KSPGMRESSetRestart(ksp,2*m);CHKERRQ(ierr);
   ierr = KSPSetTolerances(ksp,1.e-10,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);CHKERRQ(ierr);
   ierr = KSPSetType(ksp,KSPCG);CHKERRQ(ierr);

File src/ksp/ksp/examples/tests/ex11.c

 
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp_A);CHKERRQ(ierr);
   ierr = KSPSetOptionsPrefix(ksp_A,"fc_");CHKERRQ(ierr);
-  ierr = KSPSetOperators(ksp_A,A,A,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
+  ierr = KSPSetOperators(ksp_A,A,A);CHKERRQ(ierr);
 
   ierr = KSPGetPC(ksp_A,&pc_A);CHKERRQ(ierr);
   ierr = PCSetType(pc_A,PCFIELDSPLIT);CHKERRQ(ierr);

File src/ksp/ksp/examples/tests/ex12f.F

 
 ! Solve system
       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
-      call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
+      call KSPSetOperators(ksp,A,A,ierr)
       call KSPSetFromOptions(ksp,ierr)
       call KSPSolve(ksp,b,x,ierr)
 

File src/ksp/ksp/examples/tests/ex14.c

    User-defined routines
 */
 extern PetscErrorCode ComputeFunction(AppCtx*,Vec,Vec),FormInitialGuess(AppCtx*,Vec);
-extern PetscErrorCode ComputeJacobian(AppCtx*,Vec,Mat,MatStructure*);
+extern PetscErrorCode ComputeJacobian(AppCtx*,Vec,Mat);
 
 #undef __FUNCT__
 #define __FUNCT__ "main"
         Compute the Jacobian matrix.  See the comments in this routine for
         important information about setting the flag mat_flag.
      */
-    ierr = ComputeJacobian(&user,X,J,&mat_flag);CHKERRQ(ierr);
+    ierr = ComputeJacobian(&user,X,J);CHKERRQ(ierr);
 
     /*
         Solve J Y = F, where J is the Jacobian matrix.
    We cannot work directly with the global numbers for the original
    uniprocessor grid!
 */
-PetscErrorCode ComputeJacobian(AppCtx *user,Vec X,Mat jac,MatStructure *flag)
+PetscErrorCode ComputeJacobian(AppCtx *user,Vec X,Mat jac)
 {
   PetscErrorCode ierr;
   Vec            localX = user->localX;   /* local vector */
   ierr = VecRestoreArray(localX,&x);CHKERRQ(ierr);
   ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 
-  /*
-     Set flag to indicate that the Jacobian matrix retains an identical
-     nonzero structure throughout all nonlinear iterations (although the
-     values of the entries change). Thus, we can save some work in setting
-     up the preconditioner (e.g., no need to redo symbolic factorization for
-     ILU/ICC preconditioners).
-      - If the nonzero structure of the matrix is different during
-        successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
-        must be used instead.  If you are unsure whether the matrix
-        structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
-      - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
-        believes your assertion and does not check the structure
-        of the matrix.  If you erroneously claim that the structure
-        is the same when it actually is not, the new preconditioner
-        will not function correctly.  Thus, use this optimization
-        feature with caution!
-  */
-  *flag = SAME_NONZERO_PATTERN;
   return 0;
 }

File src/ksp/ksp/examples/tests/ex15.c

 
   /* Create KSP context; set operators and options; solve linear system */
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
-  ierr = KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
+  ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
 
   /* Insure that preconditioner has same null space as matrix */
   /* currently does not do anything */

File src/ksp/ksp/examples/tests/ex16f.F

       call KSPCreate(PETSC_COMM_WORLD,ksp1,ierr)
       call KSPSetOptionsPrefix(ksp1,'a',ierr)
       call KSPAppendOptionsPrefix(ksp1,'_',ierr)
-      call KSPSetOperators(ksp1,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
+      call KSPSetOperators(ksp1,A,A,ierr)
       call KSPSetFromOptions(ksp1,ierr)
       call KSPSolve(ksp1,b,x,ierr)
 
       call KSPCreate(PETSC_COMM_WORLD,ksp2,ierr)
       call KSPSetOptionsPrefix(ksp2,'b',ierr)
       call KSPAppendOptionsPrefix(ksp2,'_',ierr)
-      call KSPSetOperators(ksp2,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
+      call KSPSetOperators(ksp2,A,A,ierr)
       call KSPSetFromOptions(ksp2,ierr)
       call KSPSolve(ksp2,b,x,ierr)
 

File src/ksp/ksp/examples/tests/ex17.c

 
   /* Create KSP context; set operators and options; solve linear system */
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
-  ierr = KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
+  ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
   ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
   ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr);
   ierr = KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

File src/ksp/ksp/examples/tests/ex18.c

   ierr = PetscLogStageRegister("Stage 1",&stage1);
   ierr = PetscLogStagePush(stage1);CHKERRQ(ierr);
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
-  ierr = KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
+  ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
   ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
   ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr);