Commits

Matt Knepley committed c0c93d0 Merge

Merge branch 'master' into knepley/feature-dmda-section

* master: (287 commits)
Mat ex170: Comments
VTK: Small fix to error message (.vts to .vtu)
VTK: Small fix to error message
Fixed bib entries
Bib: Updates
AO: fix erroneous processing of -ao_view and factor into AOViewFromOptions
doc: fix named argument in {Vec,Mat,DM}ViewFromOptions
Sys: add PetscDataTypeFromString() and test code
Mat: Should say that it has a nullspace in MatView()
parms: update tarball with fix for namespace conflict with metis
fix citation 'Golub_Varga_1961'
parmetis: update tarball to parmetis-4.0.2-p5 which fixes an install issue with cygwin
Sys Logging: revert parent traversal
fixed hdf5.py so that if self.libraries.compression is None the code still runs correctly
DMDA: fix bad cast of DM_DA to PetscObject
MatClique: follow DistMultiVec API changes
MatClique: remove unused variables
config cmakeboot: add C++ flags any time compiler is available
config OpenMP: check for C++ flag any time the compiler is available
replaced all left-over uses of a single PetscMalloc() to allocated multiple arrays: replaced with PetscMallocN() The only ones left are when the second array is set into the first array and one ugly usage in the MUMPS interface that cannot be easily fixed
...

Comments (0)

Files changed (305)

 Ethan Coon <ecoon@lanl.gov>                     <ecoon@aeneas>
 Glenn Hammond <glenn.hammond@pnnl.gov>          Glenn Hammond <unknown>
 Hong Zhang <hzhang@mcs.anl.gov>                 hzhang <unknown>
+Hong Zhang (VT) <zhang@vt.edu>
 Jason Sarich <sarich@mcs.anl.gov>               <sarich@24-148-35-133.na.21stcentury.net>
 Jason Sarich <sarich@mcs.anl.gov>               <sarich@Chico.mcs.anl.gov>
 Jed Brown <jed@59A2.org>                        <jed@59A2.org>
 Kris Buschelman <buschelm@mcs.anl.gov>          <Kris@attempt2>
 Kris Buschelman <buschelm@mcs.anl.gov>          <kris@192.168.1.104>
 Lisandro Dalcin <dalcinl@gmail.com>             <dalcinl@trantor.ceride.gov.ar>
+Lois Curfman McInnes <curfman@mcs.anl.gov>      <lois@new-host-2.home>
 Mark Adams <mark.adams@columbia.edu>
 Mark Adams <mark.adams@columbia.edu>            <adams@jaguarpf-login1.ccs.ornl.gov>
 Mark Spiegelman <mspieg@ldea.columbia.edu>      <mspieg@mingus.ldeo.columbia.edu>
 Peter Brune <brune@uchicago.edu>                <emppter@pbrune>
 Peter Brune <brune@mcs.anl.gov>                 <brune@localhost>
 Peter Brune <brune@mcs.anl.gov>                 <prbrune@localhost>
+Richard Tran Mills <rtm@eecs.utk.edu>
 Shri Abhyankar <abhyshr@mcs.anl.gov>
 Stefano Zampini <stefano.zampini@gmail.com>
 Tobin Isaac <tisaac@ices.utexas.edu>

bin/matlab/PetscReadBinaryMatlab.m

 %   This function returns a single struct containing all objects submitted
 %   to the PetscViewerBinaryMatlab viewer.
 
-  matlabHeader = '%$$ '; % string that marks a matlab line for evaluation (could be passed)
+  matlabHeader = ['%$$ '; '#$$ ']; % string that marks a matlab line for evaluation (could be passed)
+  matlabHeaderLen = size(matlabHeader,2);
 
   if (isempty(strfind(filename,'.info')))
      filename = [filename,'.info'];
   while (ischar(str))
 
      % check for output lines that start matlabHeader
-     header = strmatch(matlabHeader,str);
-     if header
-	 str = str(header+length(matlabHeader):end);
+     if strncmp(str,matlabHeader(1,:),matlabHeaderLen) || strncmp(str,matlabHeader(2,:),matlabHeaderLen)
+	 str = str(1+matlabHeaderLen:end);
 
      	 % check for old-style file open/close commands
 	 if strfind(str,'fopen(Set.filename')
 testexamples_PASTIX: ${TESTEXAMPLES_PASTIX}
 testexamples_ML: ${TESTEXAMPLES_ML}
 testexamples_CUSP: ${TESTEXAMPLES_CUSP}
-testexamples_TXPETSCGPU: ${TESTEXAMPLES_TXPETSCGPU}
+testexamples_CUDA: ${TESTEXAMPLES_CUDA}
 testexamples_YAML: ${TESTEXAMPLES_YAML}
 testexamples_THREADCOMM: ${TESTEXAMPLES_THREADCOMM}
 testexamples_MOAB: ${TESTEXAMPLES_MOAB}
 	-@${OMAKE} testexamples_ML TESTEXAMPLES_ML=`echo ${TESTEXAMPLES_ML} | sed s/runex[0-9]*[a-z0-9_]*//g`
 buildexamples_CUSP:
 	-@${OMAKE} testexamples_CUSP TESTEXAMPLES_CUSP=`echo ${TESTEXAMPLES_CUSP} | sed s/runex[0-9]*[a-z0-9_]*//g`
-buildexamples_TXPETSCGPU:
-	-@${OMAKE} testexamples_TXPETSCGPU TESTEXAMPLES_TXPETSCGPU=`echo ${TESTEXAMPLES_TXPETSCGPU} | sed s/runex[0-9]*[a-z0-9_]*//g`
+buildexamples_CUDA:
+	-@${OMAKE} testexamples_CUDA TESTEXAMPLES_CUDA=`echo ${TESTEXAMPLES_CUDA} | sed s/runex[0-9]*[a-z0-9_]*//g`
 buildexamples_YAML:
 	-@${OMAKE} testexamples_YAML TESTEXAMPLES_YAML=`echo ${TESTEXAMPLES_YAML} | sed s/runex[0-9]*[a-z0-9_]*//g`
 buildexamples_THREADCOMM:

config/BuildSystem/config/package.py

     if 'with-'+self.package+'-include-dir' in self.framework.argDB:
         raise RuntimeError('Use --with-'+self.package+'-include; not --with-'+self.package+'-include-dir')
 
-    if 'with-'+self.package+'-include' in self.framework.argDB and 'with-'+self.package+'-lib' in self.framework.argDB:
-      inc = self.framework.argDB['with-'+self.package+'-include']
+    if 'with-'+self.package+'-include' in self.framework.argDB or 'with-'+self.package+'-lib' in self.framework.argDB:
       libs = self.framework.argDB['with-'+self.package+'-lib']
+      inc  = []
+      if self.includes:
+        inc = self.framework.argDB['with-'+self.package+'-include']
+      # hope that package root is one level above first include directory specified
+        d   = os.path.dirname(inc[0])
+      else:
+        d   = None
       if not isinstance(inc, list): inc = inc.split(' ')
       if not isinstance(libs, list): libs = libs.split(' ')
       inc = [os.path.abspath(i) for i in inc]
-      # hope that package root is one level above first include directory specified
-      d = os.path.dirname(inc[0])
       yield('User specified '+self.PACKAGE+' libraries', d, libs, inc)
-      raise RuntimeError('--with-'+self.package+'-lib='+str(self.framework.argDB['with-'+self.package+'-lib'])+' and \n'+\
-                         '--with-'+self.package+'-include='+str(self.framework.argDB['with-'+self.package+'-include'])+' did not work')
+      msg = '--with-'+self.package+'-lib='+str(self.framework.argDB['with-'+self.package+'-lib'])
+      if self.includes:
+        msg += ' and \n'+'--with-'+self.package+'-include='+str(self.framework.argDB['with-'+self.package+'-include'])
+      msg += ' did not work'
+      raise RuntimeError(msg)
 
     for d in self.getSearchDirectories():
       for libdir in [self.libdir, self.altlibdir]:

config/BuildSystem/config/packages/hdf5.py

     self.includes  = ['hdf5.h']
     self.liblist   = [['libhdf5_hl.a', 'libhdf5.a']]
     self.needsMath = 1
-    self.needsCompression = 1
+    self.needsCompression = 0
     self.complex   = 1
     self.worksonWindows = 1
     return
     self.deps = [self.mpi]
     return
 
+  def generateLibList(self, framework):
+    '''First try library list without compression libraries (zlib) then try with'''
+    list = []
+    for l in self.liblist:
+      list.append(l)
+    if self.libraries.compression:
+      for l in self.liblist:
+        list.append(l + self.libraries.compression)
+    self.liblist = list
+    return config.package.Package.generateLibList(self,framework)
+
   def Install(self):
     import os
 
     return self.installDir
 
   def configureLibrary(self):
-    self.extraLib = self.libraries.compression
     if hasattr(self.compilers, 'FC'):
       # PETSc does not need the Fortran interface, but some users will call the Fortran interface
       # and expect our standard linking to be sufficient.  Thus we try to link the Fortran

config/BuildSystem/docs/examples/__init__.py

-all = ['blasTest']

config/BuildSystem/docs/examples/blasTest.py

-import config.base
-
-class Configure(config.base.Configure):
-  def __init__(self, framework):
-    config.base.Configure.__init__(self, framework)
-    self.compilers = self.framework.require('config.compilers', self)
-    self.libraries = self.framework.require('config.libraries', self)
-    return
-
-  def checkDot(self):
-    '''Verify that the ddot() function is contained in the BLAS library'''
-    return self.libraries.check('libblas.a', 'ddot', otherLibs = self.compilers.flibs, fortranMangle = 1)
-
-  def configure(self):
-    self.executeTest(self.checkDot)
-    return

config/BuildSystem/docs/examples/cursesTest.py

-import config.base
-
-class Configure(config.base.Configure):
-  def __init__(self, framework):
-    config.base.Configure.__init__(self, framework)
-    self.headers = self.framework.require('config.headers', self)
-    return
-
-  def checkCurses(self):
-    '''Verify that we have the curses header'''
-    return self.headers.check('curses.h')
-
-  def configure(self):
-    self.executeTest(self.checkCurses)
-    return

config/PETSc/packages/cuda.py

   def getSearchDirectories(self):
     import os
     yield ''
+    yield os.path.join('/Developer','NVIDIA','CUDA-5.5')
     yield os.path.join('/usr','local','cuda')
     self.libdir           = os.path.join('lib','Win32')
     self.altlibdir        = os.path.join('lib','x64')

config/PETSc/packages/elemental.py

 class Configure(PETSc.package.NewPackage):
   def __init__(self, framework):
     PETSc.package.NewPackage.__init__(self, framework)
-    self.download   = ['https://elemental.googlecode.com/files/elemental-0.79-p1.tgz',
-                       'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/elemental-0.79-p1.tgz']
+    self.download   = ['http://libelemental.org/pub/releases/elemental-0.81.tgz',
+                       'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/elemental-0.81.tgz']
     self.liblist    = [['libelemental.a','libpmrrr.a']]
     self.includes   = ['elemental.hpp']
     self.cxx              = 1

config/PETSc/packages/openmp.py

       self.setCompilers.pushLanguage('FC')
       self.setCompilers.addCompilerFlag(ompflag)
       self.setCompilers.popLanguage()
-    if self.languages.clanguage == 'Cxx':
+    if hasattr(self.compilers, 'CXX'):
       self.setCompilers.pushLanguage('Cxx')
       self.setCompilers.addCompilerFlag(ompflag)
       if self.setCompilers.checkLinkerFlag(ompflag):

config/PETSc/packages/pARMS.py

 class Configure(PETSc.package.NewPackage):
   def __init__(self, framework):
     PETSc.package.NewPackage.__init__(self, framework)
-    self.download  = ['http://ftp.mcs.anl.gov/pub/petsc/externalpackages/pARMS_3.2.tar.gz']
+    self.download  = ['http://ftp.mcs.anl.gov/pub/petsc/externalpackages/pARMS_3.2p2.tar.gz']
     self.functions = ['parms_PCCreate']
     self.includes  = ['parms.h']
     self.liblist   = [['libparms.a']]

config/PETSc/packages/parmetis.py

 class Configure(PETSc.package.NewPackage):
   def __init__(self, framework):
     PETSc.package.NewPackage.__init__(self, framework)
-    self.download          = ['http://ftp.mcs.anl.gov/pub/petsc/externalpackages/parmetis-4.0.2-p4.tar.gz']
+    self.download          = ['http://ftp.mcs.anl.gov/pub/petsc/externalpackages/parmetis-4.0.2-p5.tar.gz']
     self.functions         = ['ParMETIS_V3_PartKway']
     self.includes          = ['parmetis.h']
     self.liblist           = [['libparmetis.a']]

config/PETSc/packages/txpetscgpu.py

-from __future__ import generators
-import config.package
-
-class Configure(config.package.Package):
-  def __init__(self, framework):
-    config.package.Package.__init__(self, framework)
-    self.download        = ['http://ftp.mcs.anl.gov/pub/petsc/externalpackages/txpetscgpu-0.1.0.tar.gz']
-    self.includes        = ['txpetscgpu_version.h']
-    self.includedir      = ['include']
-    self.forceLanguage   = 'CUDA'
-    self.cxx             = 0
-    self.archIndependent = 1
-    self.worksonWindows  = 1
-    self.downloadonWindows = 1
-    self.complex         = 1
-    return
-
-  def setupDependencies(self, framework):
-    config.package.Package.setupDependencies(self, framework)
-    self.thrust = framework.require('config.packages.thrust', self)
-    self.cusp = framework.require('config.packages.cusp', self)
-    self.deps   = [self.thrust, self.cusp]
-    return
-
-  def Install(self):
-    import shutil
-    import os
-    self.framework.log.write('txpetscgpu directory = '+self.packageDir+' installDir '+self.installDir+'\n')
-    srcdir = self.packageDir
-    destdir = os.path.join(self.installDir, 'include', 'txpetscgpu')
-    try:
-      if os.path.isdir(destdir): shutil.rmtree(destdir)
-      shutil.copytree(srcdir,destdir)
-    except RuntimeError,e:
-      raise RuntimeError('Error installing txpetscgpu include files: '+str(e))
-# default and --download have different includedirs
-    self.includedir = os.path.join(destdir, 'include')
-    return self.installDir
-
-  def getSearchDirectories(self):
-    import os
-    yield ''
-    yield os.path.join('/usr','local','cuda')
-    yield os.path.join('/usr','local','cuda','include')
-    return
-
-  def configureLibrary(self):
-    '''Calls the regular package configureLibrary and then does an additional tests needed by txpetscgpu'''
-    if not self.cusp.found or not self.thrust.found:
-      raise RuntimeError('PETSc TxPETScGPU support requires the CUSP and Thrust packages\nRerun configure using --with-cusp-dir and --with-thrust-dir')
-    config.package.Package.configureLibrary(self)
-    return
-

config/PETSc/packages/valgrind.py

     self.deps = []
     return
 
+  def setup(self):
+    PETSc.package.NewPackage.setup(self)
+    if 'with-'+self.package+'-lib' in self.framework.argDB:
+      raise RuntimeError('It is incorrect to specify library for valgrind, please remove --with-valgrind-lib')
+    return
+
   def getSearchDirectories(self):
     '''By default, do not search any particular directories'''
     yield ''

config/builder.py

                                                                   {'numProcs': 4, 'args': '-patch_size 2 -grid_size 4 -comm_size 1'},
                                                                   {'numProcs': 4, 'args': '-patch_size 2 -grid_size 4 -comm_size 2'},
                                                                   {'numProcs': 16, 'args': '-patch_size 2 -grid_size 8 -comm_size 2'}],
+                        'src/mat/examples/tests/ex170':          [{'numProcs': 1, 'args': ''},
+                                                                  {'numProcs': 1, 'args': '-testnum 1'},
+                                                                  {'numProcs': 2, 'args': '-testnum 1'},
+                                                                  {'numProcs': 1, 'args': '-testnum 2'},
+                                                                  {'numProcs': 2, 'args': '-testnum 2'}],
                         'src/dm/impls/plex/examples/tests/ex1': [# CTetGen tests 0-1
                                                                  {'numProcs': 1, 'args': '-dim 3 -ctetgen_verbose 4 -dm_view ::ascii_info_detail -info -info_exclude null'},
                                                                  {'numProcs': 1, 'args': '-dim 3 -ctetgen_verbose 4 -refinement_limit 0.0625 -dm_view ::ascii_info_detail -info -info_exclude null'},
                                                                  #{'numProcs': 1, 'args': '-dim 3 -filename /PETSc3/petsc/blockcylinder-20.exo'},
                                                                  ],
                         'src/dm/impls/plex/examples/tests/ex8': [{'numProcs': 1, 'args': '-dm_view ::ascii_info_detail'},
-                                                                 {'numProcs': 1, 'args': '-transform'}],
+                                                                 {'numProcs': 1, 'args': '-interpolate -dm_view ::ascii_info_detail'},
+                                                                 {'numProcs': 1, 'args': '-transform'},
+                                                                 {'numProcs': 1, 'args': '-interpolate -transform'},
+                                                                 {'numProcs': 1, 'args': '-run_type file -filename %(meshes)s/simpleblock-100.exo -dm_view ::ascii_info_detail -v0 -1.5,-0.5,0.5,-0.5,-0.5,0.5,0.5,-0.5,0.5 -J 0.0,0.0,0.5,0.0,0.5,0.0,-0.5,0.0,0.0,0.0,0.0,0.5,0.0,0.5,0.0,-0.5,0.0,0.0,0.0,0.0,0.5,0.0,0.5,0.0,-0.5,0.0,0.0 -invJ 0.0,0.0,-2.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,-2.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,-2.0,0.0,2.0,0.0,2.0,0.0,0.0 -detJ 0.125,0.125,0.125', 'requires': ['exodusii']},
+                                                                 {'numProcs': 1, 'args': '-interpolate -run_type file -filename %(meshes)s/simpleblock-100.exo -dm_view ::ascii_info_detail -v0 -1.5,-0.5,0.5,-0.5,-0.5,0.5,0.5,-0.5,0.5 -J 0.0,0.0,0.5,0.0,0.5,0.0,-0.5,0.0,0.0,0.0,0.0,0.5,0.0,0.5,0.0,-0.5,0.0,0.0,0.0,0.0,0.5,0.0,0.5,0.0,-0.5,0.0,0.0 -invJ 0.0,0.0,-2.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,-2.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,-2.0,0.0,2.0,0.0,2.0,0.0,0.0 -detJ 0.125,0.125,0.125 -centroid -1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0 -normal 0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0 -vol 1.0,1.0,1.0', 'requires': ['exodusii']}],
                         'src/dm/impls/plex/examples/tests/ex1f90': [{'numProcs': 1, 'args': ''}],
                         'src/dm/impls/plex/examples/tests/ex2f90': [{'numProcs': 1, 'args': ''}],
                         'src/dm/impls/plex/examples/tutorials/ex1': [{'numProcs': 1, 'args': ''},
                         'src/snes/examples/tutorials/ex52':   [# 2D Laplacian 0-3
                                                                {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function',
                                                                 'setup': './bin/pythonscripts/PetscGenerateFEMQuadrature.py 2 1 1 1 laplacian src/snes/examples/tutorials/ex52.h',
-                                                                'source': ['src/snes/examples/tutorials/ex52_integrateElement.cu'], 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -batch -gpu', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -batch -gpu -gpu_batches 2', 'requires': ['Cuda']},
+                                                                'source': ['src/snes/examples/tutorials/ex52_integrateElement.cu'], 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -batch -gpu', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -batch -gpu -gpu_batches 2', 'requires': ['cuda']},
                                                                # 2D Laplacian refined 4-8
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch -gpu', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch -gpu -gpu_batches 2', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch -gpu -gpu_batches 4', 'requires': ['Cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch -gpu', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch -gpu -gpu_batches 2', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -batch -gpu -gpu_batches 4', 'requires': ['cuda']},
                                                                # 2D Elasticity 9-12
                                                                {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity',
                                                                 'setup': './bin/pythonscripts/PetscGenerateFEMQuadrature.py 2 1 2 1 elasticity src/snes/examples/tutorials/ex52.h',
-                                                                'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch -gpu', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch -gpu -gpu_batches 2', 'requires': ['Cuda']},
+                                                                'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch -gpu', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch -gpu -gpu_batches 2', 'requires': ['cuda']},
                                                                # 2D Elasticity refined 13-17
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch -gpu', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch -gpu -gpu_batches 2', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch -gpu -gpu_batches 4', 'requires': ['Cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch -gpu', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch -gpu -gpu_batches 2', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dm_view -refinement_limit 0.0625 -compute_function -op_type elasticity -batch -gpu -gpu_batches 4', 'requires': ['cuda']},
                                                                # 3D Laplacian 18-20
                                                                {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function',
                                                                 'setup': './bin/pythonscripts/PetscGenerateFEMQuadrature.py 3 1 1 1 laplacian src/snes/examples/tutorials/ex52.h',
-                                                                'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -batch -gpu', 'requires': ['Cuda']},
+                                                                'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -batch -gpu', 'requires': ['cuda']},
                                                                # 3D Laplacian refined 21-24
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -batch -gpu', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -batch -gpu -gpu_batches 2', 'requires': ['Cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -batch -gpu', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -batch -gpu -gpu_batches 2', 'requires': ['cuda']},
                                                                # 3D Elasticity 25-27
                                                                {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -op_type elasticity',
                                                                 'setup': './bin/pythonscripts/PetscGenerateFEMQuadrature.py 3 1 3 1 elasticity src/snes/examples/tutorials/ex52.h',
-                                                                'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch -gpu', 'requires': ['Cuda']},
+                                                                'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0 -compute_function -op_type elasticity -batch -gpu', 'requires': ['cuda']},
                                                                # 3D Elasticity refined 28-31
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity -batch', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity -batch -gpu', 'requires': ['Cuda']},
-                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity -batch -gpu -gpu_batches 2', 'requires': ['Cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity -batch', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity -batch -gpu', 'requires': ['cuda']},
+                                                               {'numProcs': 1, 'args': '-dim 3 -dm_view -refinement_limit 0.0125 -compute_function -op_type elasticity -batch -gpu -gpu_batches 2', 'requires': ['cuda']},
                                                                # 'source': ['src/snes/examples/tutorials/ex52_integrateElement.cu']},
                                                                ],
                         'src/snes/examples/tutorials/ex62':   [# 2D serial P1 tests 0-3
     '''Parses a PETSc action
     - Return a dictionary for the portions of a run'''
     import re
-    m = re.match('-@\$\{MPIEXEC\} -n (?P<numProcs>\d+) ./(?P<ex>ex\w+)(?P<args>[-.,\w ]+)>', lines[0])
+    m = re.match('-@\$\{MPIEXEC\} -n (?P<numProcs>\d+) ./(?P<ex>ex\w+)(?P<args>[\'%-.,\w ]+)>', lines[0])
     if not m:
       raise RuntimeError('Could not parse launch sequence:\n'+lines[0])
     comparison = 'Not present'
     '''
     with file(depFile) as f:
       try:
-        target, deps = f.read().split(':')
+        target, deps = f.read().split(':', 1)
       except ValueError as e:
         self.logPrint('ERROR in dependency file %s: %s' % (depFile, str(e)))
     target = target.split()[0]
     if (target != self.sourceManager.getObjectName(source)): print target, self.sourceManager.getObjectName(source)
     assert(target == self.sourceManager.getObjectName(source))
+    deps = deps.split('\n\n')[0]
     deps = [d for d in deps.replace('\\','').split() if not os.path.splitext(d)[1] == '.mod']
     if not os.path.basename(deps[0]) == source:
       raise RuntimeError('ERROR: first dependency %s should be %s' % (deps[0], source))
        self.logPrint("MISCONFIGURATION: Regression output file %s (test %s) is missing" % (outputName, testNum), debugSection='screen')
    else:
      with file(outputName) as f:
-       validOutput = f.read()
+       validOutput = f.read().strip() # Jed is now stripping output it appears
        if not validOutput == output:
          if replace:
            with file(outputName, 'w') as f:
 
  def getTestCommand(self, executable, **params):
    numProcs = params.get('numProcs', 1)
-   args     = params.get('args', '') % dict(meshes=os.path.join(self.petscDir,'share','petsc','datafiles','meshes'))
+   try:
+     args   = params.get('args', '') % dict(meshes=os.path.join(self.petscDir,'share','petsc','datafiles','meshes'))
+   except ValueError:
+     args   = params.get('args', '')
    hosts    = ','.join(['localhost']*int(numProcs))
    return ' '.join([self.configInfo.mpi.mpiexec, '-hosts', hosts, '-n', str(numProcs), os.path.abspath(executable), args])
 

config/cmakeboot.py

      langlist.append(('FC','Fortran'))
    if hasattr(self.compilers,'CUDAC'):
      langlist.append(('CUDA','CUDA'))
-   if (self.languages.clanguage == 'Cxx'):
+   if hasattr(self.compilers,'CXX'):
      langlist.append(('Cxx','CXX'))
    win32fe = None
    for petsclanguage,cmakelanguage in langlist:

config/configure.py

     framework.logClear()
     if hasattr(framework, 'log'):
       try:
-        framework.log.write('**** Configure header conftest.h ****\n')
+        framework.log.write('**** Configure header '+framework.compilerDefines+' ****\n')
         framework.outputHeader(framework.log)
-        framework.log.write('**** C specific Configure header conffix.h ****\n')
+        framework.log.write('**** C specific Configure header '+framework.compilerFixes+' ****\n')
         framework.outputCHeader(framework.log)
       except Exception, e:
         framework.log.write('Problem writing headers to log: '+str(e))

config/examples/arch-cuda-cg-opt.py

     '--with-thrust=1',
     '--with-cusp=1',
     '--with-cusp-dir=/sandbox/soft/cusp-v0.3.1',
-    '--download-txpetscgpu=1',
     '--with-debugging=0',
     'COPTFLAGS=-O3',
     'CXXOPTFLAGS=-O3',

config/examples/arch-cuda-double.py

 #
 # This test build is with Cuda 5.0, with default thrust, and cusp-v0.3.1 separately installed.
 # [using default mpich from ubuntu 12.04]
-# Also enable txpetscgpu.
 #
 if __name__ == '__main__':
   import sys
     '--with-cusp=1',
     '-with-cusp-dir=/home/balay/soft/cusp-v0.3.1',
     '--with-thrust=1',
-    '--download-txpetscgpu=1',
     '--with-precision=double',
     '--with-clanguage=c',
     '--with-cuda-arch=sm_13'

config/examples/arch-cuda5-cg-opt.py

     '--with-thrust=1',
     '--with-cusp=1',
     '--with-cusp-dir=/sandbox/soft/cusp-v0.3.1',
-    '--download-txpetscgpu=1',
     '--with-debugging=0',
     'COPTFLAGS=-O3',
     'CXXOPTFLAGS=-O3',

include/finclude/petscmat.h

       PetscEnum MAT_CUSP_MULT_DIAG
       PetscEnum MAT_CUSP_MULT_OFFDIAG
       PetscEnum MAT_CUSP_MULT
-      PetscEnum MAT_CUSP_SOLVE
       PetscEnum MAT_CUSP_ALL
       parameter(MAT_CUSP_MULT_DIAG=0)
       parameter(MAT_CUSP_MULT_OFFDIAG=1)
       parameter(MAT_CUSP_MULT=2)
-      parameter(MAT_CUSP_SOLVE=3)
-      parameter(MAT_CUSP_ALL=4)
+      parameter(MAT_CUSP_ALL=3)
 #endif
 
 
 !
 !     CUSPARSE enumerated types
 !
-#if defined(PETSC_HAVE_TXPETSCGPU)
+#if defined(PETSC_HAVE_CUDA)
       PetscEnum MAT_CUSPARSE_CSR
       PetscEnum MAT_CUSPARSE_ELL
       PetscEnum MAT_CUSPARSE_HYB
       PetscEnum MAT_CUSPARSE_MULT_DIAG
       PetscEnum MAT_CUSPARSE_MULT_OFFDIAG
       PetscEnum MAT_CUSPARSE_MULT
-      PetscEnum MAT_CUSPARSE_SOLVE
       PetscEnum MAT_CUSPARSE_ALL
       parameter(MAT_CUSPARSE_MULT_DIAG=0)
       parameter(MAT_CUSPARSE_MULT_OFFDIAG=1)
       parameter(MAT_CUSPARSE_MULT=2)
-      parameter(MAT_CUSPARSE_SOLVE=3)
-      parameter(MAT_CUSPARSE_ALL=4)
+      parameter(MAT_CUSPARSE_ALL=3)
 #endif
 !
 !  End of Fortran include file for the Mat package in PETSc

include/finclude/petscviewer.h

       PetscEnum PETSC_VIEWER_DRAW_CONTOUR
       PetscEnum PETSC_VIEWER_DRAW_PORTS
       PetscEnum PETSC_VIEWER_VTK_VTS
+      PetscEnum PETSC_VIEWER_VTK_VTR
       PetscEnum PETSC_VIEWER_VTK_VTU
       PetscEnum PETSC_VIEWER_BINARY_MATLAB
       PetscEnum PETSC_VIEWER_NATIVE
       parameter (PETSC_VIEWER_DRAW_CONTOUR = 20)
       parameter (PETSC_VIEWER_DRAW_PORTS = 21)
       parameter (PETSC_VIEWER_VTK_VTS = 22)
-      parameter (PETSC_VIEWER_VTK_VTU = 23)
-      parameter (PETSC_VIEWER_BINARY_MATLAB = 24)
-      parameter (PETSC_VIEWER_NATIVE = 25)
-      parameter (PETSC_VIEWER_NOFORMAT = 26)
+      parameter (PETSC_VIEWER_VTK_VTR = 23)
+      parameter (PETSC_VIEWER_VTK_VTU = 24)
+      parameter (PETSC_VIEWER_BINARY_MATLAB = 25)
+      parameter (PETSC_VIEWER_NATIVE = 26)
+      parameter (PETSC_VIEWER_NOFORMAT = 27)
 !
 !  End of Fortran include file for the PetscViewer package in PETSc
 

include/petsc-private/dmimpl.h

 struct _DMOps {
   PetscErrorCode (*view)(DM,PetscViewer);
   PetscErrorCode (*load)(DM,PetscViewer);
+  PetscErrorCode (*clone)(DM,DM*);
   PetscErrorCode (*setfromoptions)(DM);
   PetscErrorCode (*setup)(DM);
   PetscErrorCode (*createglobalvector)(DM,Vec*);
   PetscErrorCode (*globaltolocalend)(DM,Vec,InsertMode,Vec);
   PetscErrorCode (*localtoglobalbegin)(DM,Vec,InsertMode,Vec);
   PetscErrorCode (*localtoglobalend)(DM,Vec,InsertMode,Vec);
+  PetscErrorCode (*localtolocalbegin)(DM,Vec,InsertMode,Vec);
+  PetscErrorCode (*localtolocalend)(DM,Vec,InsertMode,Vec);
 
   PetscErrorCode (*destroy)(DM);
 
 
 PETSC_EXTERN PetscErrorCode DMCreateGlobalVector_Section_Private(DM,Vec*);
 PETSC_EXTERN PetscErrorCode DMCreateLocalVector_Section_Private(DM,Vec*);
+PETSC_EXTERN PetscErrorCode DMCreateSubDM_Section_Private(DM,PetscInt,PetscInt[],IS*,DM*);
 
 /*
 

include/petsc-private/matimpl.h

   PetscErrorCode (*restorerowuppertriangular)(Mat);
   /*109*/
   PetscErrorCode (*matsolve)(Mat,Mat,Mat);
-  PetscErrorCode (*getredundantmatrix)(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
+  PetscErrorCode (*getredundantmatrix)(Mat,PetscInt,MPI_Comm,MatReuse,Mat*);
   PetscErrorCode (*getrowmin)(Mat,Vec,PetscInt[]);
   PetscErrorCode (*getcolumnvector)(Mat,Vec,PetscInt);
   PetscErrorCode (*missingdiagonal)(Mat,PetscBool *,PetscInt*);

include/petsc-private/petscimpl.h

   PetscOps             *bops;
   MPI_Comm             comm;
   PetscInt             type;
-  PetscLogDouble       flops,time,mem;
+  PetscLogDouble       flops,time,mem,memchildren;
   PetscInt             id;
   PetscInt             refct;
   PetscMPIInt          tag;
    PetscNew(pops,&((h)->ops)) ||                                                \
    PetscHeaderCreate_Private((PetscObject)h,classid,class_name,descr,mansec,com,(PetscObjectFunction)des,(PetscObjectViewerFunction)vie) || \
    PetscLogObjectCreate(h) ||                                                   \
-   PetscLogObjectMemory(h, sizeof(struct tp) + sizeof(PetscOps) + sizeof(pops)))
+   PetscLogObjectMemory((PetscObject)h, sizeof(struct tp) + sizeof(PetscOps) + sizeof(pops)))
 
 PETSC_EXTERN PetscErrorCode PetscComposedQuantitiesDestroy(PetscObject obj);
 PETSC_EXTERN PetscErrorCode PetscHeaderCreate_Private(PetscObject,PetscClassId,const char[],const char[],const char[],MPI_Comm,PetscErrorCode (*)(PetscObject*),PetscErrorCode (*)(PetscObject,PetscViewer));

include/petsc-private/snesimpl.h

 
   PetscBool   tolerancesset;      /* SNESSetTolerances() called and tolerances should persist through SNESCreate_XXX()*/
 
-  PetscReal   norm_init;          /* the initial norm value */
-  PetscBool   norm_init_set;      /* the initial norm has been set */
   PetscBool   vec_func_init_set;  /* the initial function has been set */
 
   SNESNormSchedule normschedule;  /* Norm computation type for SNES instance */

include/petscao.h

 PETSC_EXTERN PetscErrorCode AOCreateMappingIS(IS,IS,AO*);
 
 PETSC_EXTERN PetscErrorCode AOView(AO,PetscViewer);
+PETSC_EXTERN PetscErrorCode AOViewFromOptions(AO,const char*,const char*);
 PETSC_EXTERN PetscErrorCode AODestroy(AO*);
 
 /* Dynamic creation and loading functions */

include/petscdm.h

 PETSC_EXTERN PetscFunctionList DMList;
 PETSC_EXTERN PetscBool         DMRegisterAllCalled;
 PETSC_EXTERN PetscErrorCode DMCreate(MPI_Comm,DM*);
+PETSC_EXTERN PetscErrorCode DMClone(DM,DM*);
 PETSC_EXTERN PetscErrorCode DMSetType(DM, DMType);
 PETSC_EXTERN PetscErrorCode DMGetType(DM, DMType *);
 PETSC_EXTERN PetscErrorCode DMRegister(const char[],PetscErrorCode (*)(DM));
 PETSC_EXTERN PetscErrorCode DMGlobalToLocalEnd(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMLocalToGlobalBegin(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMLocalToGlobalEnd(DM,Vec,InsertMode,Vec);
+PETSC_EXTERN PetscErrorCode DMLocalToLocalBegin(DM,Vec,InsertMode,Vec);
+PETSC_EXTERN PetscErrorCode DMLocalToLocalEnd(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMConvert(DM,DMType,DM*);
 
 PETSC_EXTERN PetscErrorCode DMGetCoordinateDM(DM,DM*);

include/petscdmda.h

 PETSC_EXTERN PetscErrorCode DMDAGlobalToNaturalEnd(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMDANaturalToGlobalBegin(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMDANaturalToGlobalEnd(DM,Vec,InsertMode,Vec);
-PETSC_EXTERN PetscErrorCode DMDALocalToLocalBegin(DM,Vec,InsertMode,Vec);
-PETSC_EXTERN PetscErrorCode DMDALocalToLocalEnd(DM,Vec,InsertMode,Vec);
+PETSC_DEPRECATED("Use DMLocalToLocalBegin()") PETSC_STATIC_INLINE PetscErrorCode DMDALocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) {return DMLocalToLocalBegin(dm,g,mode,l);}
+PETSC_DEPRECATED("Use DMLocalToLocalEnd()") PETSC_STATIC_INLINE PetscErrorCode DMDALocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) {return DMLocalToLocalEnd(dm,g,mode,l);}
 PETSC_EXTERN PetscErrorCode DMDACreateNaturalVector(DM,Vec *);
 
 PETSC_EXTERN PetscErrorCode DMDAGetCorners(DM,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*,PetscInt*);

include/petscdmplex.h

 
 PETSC_EXTERN PetscErrorCode DMPlexCreate(MPI_Comm, DM*);
 PETSC_EXTERN PetscErrorCode DMPlexCreateSubmesh(DM, const char[], PetscInt, DM*);
-PETSC_EXTERN PetscErrorCode DMPlexCreateCohesiveSubmesh(DM, PetscBool, DM *);
+PETSC_EXTERN PetscErrorCode DMPlexCreateCohesiveSubmesh(DM, PetscBool, const char [], PetscInt, DM *);
 PETSC_EXTERN PetscErrorCode DMPlexCreateFromCellList(MPI_Comm, PetscInt, PetscInt, PetscInt, PetscInt, PetscBool, const int[], PetscInt, const double[], DM*);
 PETSC_EXTERN PetscErrorCode DMPlexCreateFromDAG(DM, PetscInt, const PetscInt [], const PetscInt [], const PetscInt [], const PetscInt [], const PetscScalar []);
 PETSC_EXTERN PetscErrorCode DMPlexClone(DM, DM*);
 PETSC_EXTERN PetscErrorCode DMPlexSetRefinementLimit(DM, PetscReal);
 PETSC_EXTERN PetscErrorCode DMPlexGetRefinementUniform(DM, PetscBool *);
 PETSC_EXTERN PetscErrorCode DMPlexSetRefinementUniform(DM, PetscBool);
+PETSC_EXTERN PetscErrorCode DMPlexInvertCell(PetscInt, PetscInt, int []);
 PETSC_EXTERN PetscErrorCode DMPlexInterpolate(DM, DM *);
 PETSC_EXTERN PetscErrorCode DMPlexCopyCoordinates(DM, DM);
 PETSC_EXTERN PetscErrorCode DMPlexDistribute(DM, const char[], PetscInt, DM*);

include/petscdmshell.h

 PETSC_EXTERN PetscErrorCode DMShellSetGlobalToLocalVecScatter(DM,VecScatter);
 PETSC_EXTERN PetscErrorCode DMShellSetLocalToGlobal(DM,PetscErrorCode (*)(DM,Vec,InsertMode,Vec),PetscErrorCode (*)(DM,Vec,InsertMode,Vec));
 PETSC_EXTERN PetscErrorCode DMShellSetLocalToGlobalVecScatter(DM,VecScatter);
+PETSC_EXTERN PetscErrorCode DMShellSetLocalToLocal(DM,PetscErrorCode (*)(DM,Vec,InsertMode,Vec),PetscErrorCode (*)(DM,Vec,InsertMode,Vec));
+PETSC_EXTERN PetscErrorCode DMShellSetLocalToLocalVecScatter(DM,VecScatter);
 PETSC_EXTERN PetscErrorCode DMShellSetCreateMatrix(DM,PetscErrorCode (*)(DM,MatType,Mat*));
 PETSC_EXTERN PetscErrorCode DMGlobalToLocalBeginDefaultShell(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMGlobalToLocalEndDefaultShell(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMLocalToGlobalBeginDefaultShell(DM,Vec,InsertMode,Vec);
 PETSC_EXTERN PetscErrorCode DMLocalToGlobalEndDefaultShell(DM,Vec,InsertMode,Vec);
+PETSC_EXTERN PetscErrorCode DMLocalToLocalBeginDefaultShell(DM,Vec,InsertMode,Vec);
+PETSC_EXTERN PetscErrorCode DMLocalToLocalEndDefaultShell(DM,Vec,InsertMode,Vec);
 
 #endif

include/petsclog.h

 PETSC_EXTERN PetscErrorCode PetscStageLogGetCurrent(PetscStageLog,int*);
 PETSC_EXTERN PetscErrorCode PetscStageLogGetEventPerfLog(PetscStageLog,int,PetscEventPerfLog*);
 
+PETSC_EXTERN PetscErrorCode PetscLogObjectParent(PetscObject,PetscObject);
+PETSC_EXTERN PetscErrorCode PetscLogObjectMemory(PetscObject,PetscLogDouble);
+
 
 #if defined(PETSC_USE_LOG)  /* --- Logging is turned on --------------------------------*/
 PETSC_EXTERN PetscStageLog petsc_stageLog;
 PETSC_EXTERN PetscErrorCode (*PetscLogPHC)(PetscObject);
 PETSC_EXTERN PetscErrorCode (*PetscLogPHD)(PetscObject);
 
-#define PetscLogObjectParent(p,c) \
-  (c && p && (((PetscObject)(c))->parent = (PetscObject)(p),((PetscObject)(c))->parentid = ((PetscObject)p)->id,0))
-
-#define PetscLogObjectParents(p,n,d)  0;{int _i; for (_i=0; _i<n; _i++) {ierr = PetscLogObjectParent(p,(d)[_i]);CHKERRQ(ierr);}}
+#define PetscLogObjectParents(p,n,d)  0;{int _i; for (_i=0; _i<n; _i++) {ierr = PetscLogObjectParent((PetscObject)p,(PetscObject)(d)[_i]);CHKERRQ(ierr);}}
 #define PetscLogObjectCreate(h)      ((PetscLogPHC) ? (*PetscLogPHC)((PetscObject)h) : 0)
 #define PetscLogObjectDestroy(h)     ((PetscLogPHD) ? (*PetscLogPHD)((PetscObject)h) : 0)
-#define PetscLogObjectMemory(p,m)    (((PetscObject)(p))->mem += (m),0)
 /* Initialization functions */
 PETSC_EXTERN PetscErrorCode PetscLogBegin(void);
 PETSC_EXTERN PetscErrorCode PetscLogAllBegin(void);
 #define PetscLogEventEnd(e,o1,o2,o3,o4)     0
 #define PetscLogEventBarrierBegin(e,o1,o2,o3,o4,cm) 0
 #define PetscLogEventBarrierEnd(e,o1,o2,o3,o4,cm)   0
-#define PetscLogObjectParent(p,c)           0
 #define PetscLogObjectParents(p,n,c)        0
 #define PetscLogObjectCreate(h)             0
 #define PetscLogObjectDestroy(h)            0
-#define PetscLogObjectMemory(p,m)           0
 #define PetscLogDestroy()                   0
 #define PetscLogStagePush(a)                0
 #define PetscLogStagePop()                  0

include/petscmat.h

 PETSC_EXTERN PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatRestrict(Mat,Vec,Vec);
 PETSC_EXTERN PetscErrorCode MatGetVecs(Mat,Vec*,Vec*);
-PETSC_EXTERN PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
+PETSC_EXTERN PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,MatReuse,Mat*);
 PETSC_EXTERN PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,MatReuse,Mat*);
 PETSC_EXTERN PetscErrorCode MatFindZeroDiagonals(Mat,IS*);
 
 PETSC_EXTERN PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal);
 #endif
 
-#if defined PETSC_HAVE_TXPETSCGPU
-
+#ifdef PETSC_HAVE_CUDA
 /*E
     MatCUSPARSEStorageFormat - indicates the storage format for CUSPARSE (GPU)
-    matrices. Requires the txpetscgpu package to use. Configure with 
-    --download-txpetscgpu to build/install petsc with the txpetscgpu library.
+    matrices. 
 
     Not Collective
 
 +   MAT_CUSPARSE_CSR - Compressed Sparse Row
-.   MAT_CUSPARSE_ELL - Ellpack
--   MAT_CUSPARSE_HYB - Hybrid, a combination of Ellpack and Coordinate format.
+.   MAT_CUSPARSE_ELL - Ellpack (requires CUDA 4.2 or later).
+-   MAT_CUSPARSE_HYB - Hybrid, a combination of Ellpack and Coordinate format (requires CUDA 4.2 or later).
 
     Level: intermediate
 
 
 /*E
     MatCUSPARSEFormatOperation - indicates the operation of CUSPARSE (GPU)
-    matrices whose operation should use a particular storage format. Requires
-    the txpetscgpu package to use. Configure with --download-txpetscgpu to 
-    build/install petsc with the txpetscgpu library.
+    matrices whose operation should use a particular storage format. 
 
     Not Collective
 
 +   MAT_CUSPARSE_MULT_DIAG - sets the storage format for the diagonal matrix in the parallel MatMult
 .   MAT_CUSPARSE_MULT_OFFDIAG - sets the storage format for the offdiagonal matrix in the parallel MatMult
 .   MAT_CUSPARSE_MULT - sets the storage format for the entire matrix in the serial (single GPU) MatMult
-.   MAT_CUSPARSE_SOLVE - sets the storage format for the triangular factors in the serial (single GPU) MatSolve
 -   MAT_CUSPARSE_ALL - sets the storage format for all CUSPARSE (GPU) matrices
 
     Level: intermediate
 
 .seealso: MatCUSPARSESetFormat(), MatCUSPARSEStorageFormat
 E*/
-typedef enum {MAT_CUSPARSE_MULT_DIAG, MAT_CUSPARSE_MULT_OFFDIAG, MAT_CUSPARSE_MULT, MAT_CUSPARSE_SOLVE, MAT_CUSPARSE_ALL} MatCUSPARSEFormatOperation;
+typedef enum {MAT_CUSPARSE_MULT_DIAG, MAT_CUSPARSE_MULT_OFFDIAG, MAT_CUSPARSE_MULT, MAT_CUSPARSE_ALL} MatCUSPARSEFormatOperation;
 
 PETSC_EXTERN PetscErrorCode MatCreateSeqAIJCUSPARSE(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
 PETSC_EXTERN PetscErrorCode MatCreateAIJCUSPARSE(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
 PETSC_EXTERN PetscErrorCode MatCUSPARSESetFormat(Mat,MatCUSPARSEFormatOperation,MatCUSPARSEStorageFormat);
-
 #endif
 
 #if defined(PETSC_HAVE_CUSP)
 
 /*E
     MatCUSPStorageFormat - indicates the storage format for CUSP (GPU)
-    matrices. Requires the txpetscgpu package to use. Configure with 
-    --download-txpetscgpu to build/install petsc with the txpetscgpu library.
+    matrices. 
 
     Not Collective
 
 
 /*E
     MatCUSPFormatOperation - indicates the operation of CUSP (GPU)
-    matrices whose operation should use a particular storage format. Requires
-    the txpetscgpu package to use. Configure with --download-txpetscgpu to 
-    build/install petsc with the txpetscgpu library.
+    matrices whose operation should use a particular storage format. 
 
     Not Collective
 
 +   MAT_CUSP_MULT_DIAG - sets the storage format for the diagonal matrix in the parallel MatMult
 .   MAT_CUSP_MULT_OFFDIAG - sets the storage format for the offdiagonal matrix in the parallel MatMult
 .   MAT_CUSP_MULT - sets the storage format for the entire matrix in the serial (single GPU) MatMult
-.   MAT_CUSP_SOLVE - sets the storage format for the triangular factors in the serial (single GPU) MatSolve
 -   MAT_CUSP_ALL - sets the storage format for all CUSP (GPU) matrices
 
     Level: intermediate
 
 .seealso: MatCUSPSetFormat(), MatCUSPStorageFormat
 E*/
-typedef enum {MAT_CUSP_MULT_DIAG, MAT_CUSP_MULT_OFFDIAG, MAT_CUSP_MULT, MAT_CUSP_SOLVE, MAT_CUSP_ALL} MatCUSPFormatOperation;
+typedef enum {MAT_CUSP_MULT_DIAG, MAT_CUSP_MULT_OFFDIAG, MAT_CUSP_MULT, MAT_CUSP_ALL} MatCUSPFormatOperation;
 
 PETSC_EXTERN PetscErrorCode MatCUSPSetFormat(Mat,MatCUSPFormatOperation,MatCUSPStorageFormat);
 #endif

include/petscpc.h

 PETSC_EXTERN PetscErrorCode PCGAMGInitializePackage(void);
 
 #if defined(PETSC_HAVE_PCBDDC)
-/* Enum defining how to treat the coarse problem */
-typedef enum {SEQUENTIAL_BDDC,REPLICATED_BDDC,PARALLEL_BDDC,MULTILEVEL_BDDC} CoarseProblemType;
 PETSC_EXTERN PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC,IS);
 PETSC_EXTERN PetscErrorCode PCBDDCSetCoarseningRatio(PC,PetscInt);
-PETSC_EXTERN PetscErrorCode PCBDDCSetMaxLevels(PC,PetscInt);
+PETSC_EXTERN PetscErrorCode PCBDDCSetLevels(PC,PetscInt);
 PETSC_EXTERN PetscErrorCode PCBDDCSetNullSpace(PC,MatNullSpace);
 PETSC_EXTERN PetscErrorCode PCBDDCSetDirichletBoundaries(PC,IS);
 PETSC_EXTERN PetscErrorCode PCBDDCGetDirichletBoundaries(PC,IS*);
 PETSC_EXTERN PetscErrorCode PCBDDCSetNeumannBoundaries(PC,IS);
 PETSC_EXTERN PetscErrorCode PCBDDCGetNeumannBoundaries(PC,IS*);
-PETSC_EXTERN PetscErrorCode PCBDDCSetCoarseProblemType(PC,CoarseProblemType);
 PETSC_EXTERN PetscErrorCode PCBDDCSetDofsSplitting(PC,PetscInt,IS[]);
 PETSC_EXTERN PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode);
 PETSC_EXTERN PetscErrorCode PCBDDCCreateFETIDPOperators(PC,Mat*,PC*);

include/petscsnes.h

 PETSC_EXTERN PetscErrorCode SNESSetPicard(SNES,Vec,PetscErrorCode (*SNESFunction)(SNES,Vec,Vec,void*),Mat,Mat,PetscErrorCode (*SNESJacobianFunction)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void*);
 PETSC_EXTERN PetscErrorCode SNESGetPicard(SNES,Vec*,PetscErrorCode (**SNESFunction)(SNES,Vec,Vec,void*),Mat*,Mat*,PetscErrorCode (**SNESJacobianFunction)(SNES,Vec,Mat*,Mat*,MatStructure*,void*),void**);
 PETSC_EXTERN PetscErrorCode SNESSetInitialFunction(SNES,Vec);
-PETSC_EXTERN PetscErrorCode SNESSetInitialFunctionNorm(SNES,PetscReal);
 
 PETSC_EXTERN PetscErrorCode SNESSetObjective(SNES,PetscErrorCode (*SNESObjectiveFunction)(SNES,Vec,PetscReal *,void*),void*);
 PETSC_EXTERN PetscErrorCode SNESGetObjective(SNES,PetscErrorCode (**SNESObjectiveFunction)(SNES,Vec,PetscReal *,void*),void**);

include/petscsys.h

   Concepts: memory allocation
 
 M*/
-#define PetscNewLog(o,A,b) (PetscNew(A,b) || ((o) ? PetscLogObjectMemory(o,sizeof(A)) : 0))
+#define PetscNewLog(o,A,b) (PetscNew(A,b) || ((o) ? PetscLogObjectMemory((PetscObject)o,sizeof(A)) : 0))
 
 /*MC
    PetscFree - Frees memory
 PETSC_EXTERN PetscErrorCode PetscDataTypeToMPIDataType(PetscDataType,MPI_Datatype*);
 PETSC_EXTERN PetscErrorCode PetscMPIDataTypeToPetscDataType(MPI_Datatype,PetscDataType*);
 PETSC_EXTERN PetscErrorCode PetscDataTypeGetSize(PetscDataType,size_t*);
+PETSC_EXTERN PetscErrorCode PetscDataTypeFromString(const char*,PetscDataType*,PetscBool*);
 
 /*
     Basic memory and string operations. These are usually simple wrappers
 PETSC_EXTERN PetscErrorCode PetscSortIntWithScalarArray(PetscInt,PetscInt[],PetscScalar[]);
 PETSC_EXTERN PetscErrorCode PetscSortReal(PetscInt,PetscReal[]);
 PETSC_EXTERN PetscErrorCode PetscSortRealWithPermutation(PetscInt,const PetscReal[],PetscInt[]);
+PETSC_EXTERN PetscErrorCode PetscSortRemoveDupsReal(PetscInt*,PetscReal[]);
 PETSC_EXTERN PetscErrorCode PetscSortSplit(PetscInt,PetscInt,PetscScalar[],PetscInt[]);
 PETSC_EXTERN PetscErrorCode PetscSortSplitReal(PetscInt,PetscInt,PetscReal[],PetscInt[]);
 PETSC_EXTERN PetscErrorCode PetscProcessTree(PetscInt,const PetscBool [],const PetscInt[],PetscInt*,PetscInt**,PetscInt**,PetscInt**,PetscInt**);
 
 PETSC_EXTERN MPI_Comm PetscObjectComm(PetscObject);
 
+typedef enum {PETSC_SUBCOMM_GENERAL=0,PETSC_SUBCOMM_CONTIGUOUS=1,PETSC_SUBCOMM_INTERLACED=2} PetscSubcommType;
+PETSC_EXTERN const char *const PetscSubcommTypes[];
+
 /*S
    PetscSubcomm - Context of MPI subcommunicators, used by PCREDUNDANT
 
 typedef struct _n_PetscSubcomm* PetscSubcomm;
 
 struct _n_PetscSubcomm {
-  MPI_Comm   parent;      /* parent communicator */
-  MPI_Comm   dupparent;   /* duplicate parent communicator, under which the processors of this subcomm have contiguous rank */
-  MPI_Comm   comm;        /* this communicator */
-  PetscInt   n;           /* num of subcommunicators under the parent communicator */
-  PetscInt   color;       /* color of processors belong to this communicator */
+  MPI_Comm    parent;           /* parent communicator */
+  MPI_Comm    dupparent;        /* duplicate parent communicator, under which the processors of this subcomm have contiguous rank */
+  MPI_Comm    comm;             /* this communicator */
+  PetscMPIInt n;                /* num of subcommunicators under the parent communicator */
+  PetscMPIInt color;            /* color of processors belong to this communicator */
+  PetscMPIInt *subsize;         /* size of subcommunicator[color] */
+  PetscSubcommType type;
 };
 
-typedef enum {PETSC_SUBCOMM_GENERAL=0,PETSC_SUBCOMM_CONTIGUOUS=1,PETSC_SUBCOMM_INTERLACED=2} PetscSubcommType;
-PETSC_EXTERN const char *const PetscSubcommTypes[];
-
 PETSC_EXTERN PetscErrorCode PetscSubcommCreate(MPI_Comm,PetscSubcomm*);
 PETSC_EXTERN PetscErrorCode PetscSubcommDestroy(PetscSubcomm*);
 PETSC_EXTERN PetscErrorCode PetscSubcommSetNumber(PetscSubcomm,PetscInt);
 PETSC_EXTERN PetscErrorCode PetscSubcommSetType(PetscSubcomm,PetscSubcommType);
-PETSC_EXTERN PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm,PetscMPIInt,PetscMPIInt,PetscMPIInt);
+PETSC_EXTERN PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm,PetscMPIInt,PetscMPIInt);
+PETSC_EXTERN PetscErrorCode PetscSubcommView(PetscSubcomm,PetscViewer);
+PETSC_EXTERN PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm);
 
 /*S
    PetscSegBuffer - a segmented extendable buffer

include/petscvec.h

 PETSC_EXTERN PetscErrorCode VecStrideMaxAll(Vec,PetscInt [],PetscReal []);
 PETSC_EXTERN PetscErrorCode VecStrideMinAll(Vec,PetscInt [],PetscReal []);
 PETSC_EXTERN PetscErrorCode VecStrideScaleAll(Vec,const PetscScalar[]);
+PETSC_EXTERN PetscErrorCode VecUniqueEntries(Vec,PetscInt*,PetscScalar**);
 
 PETSC_EXTERN PetscErrorCode VecStrideNorm(Vec,PetscInt,NormType,PetscReal*);
 PETSC_EXTERN PetscErrorCode VecStrideMax(Vec,PetscInt,PetscInt *,PetscReal *);
 PETSC_EXTERN PetscErrorCode PetscCUSPIndicesDestroy(PetscCUSPIndices*);
 PETSC_EXTERN PetscErrorCode VecCUSPCopyToGPUSome_Public(Vec,PetscCUSPIndices);
 PETSC_EXTERN PetscErrorCode VecCUSPCopyFromGPUSome_Public(Vec,PetscCUSPIndices);
-
-#if defined(PETSC_HAVE_TXPETSCGPU)
-PETSC_EXTERN PetscErrorCode VecCUSPResetIndexBuffersFlagsGPU_Public(PetscCUSPIndices);
-PETSC_EXTERN PetscErrorCode VecCUSPCopySomeToContiguousBufferGPU_Public(Vec,PetscCUSPIndices);
-PETSC_EXTERN PetscErrorCode VecCUSPCopySomeFromContiguousBufferGPU_Public(Vec,PetscCUSPIndices);
 PETSC_EXTERN PetscErrorCode VecScatterInitializeForGPU(VecScatter,Vec,ScatterMode);
 PETSC_EXTERN PetscErrorCode VecScatterFinalizeForGPU(VecScatter);
-#endif
-
 PETSC_EXTERN PetscErrorCode VecCreateSeqCUSP(MPI_Comm,PetscInt,Vec*);
 PETSC_EXTERN PetscErrorCode VecCreateMPICUSP(MPI_Comm,PetscInt,PetscInt,Vec*);
 #endif

include/petscviewer.h

   PETSC_VIEWER_DRAW_CONTOUR,
   PETSC_VIEWER_DRAW_PORTS,
   PETSC_VIEWER_VTK_VTS,
+  PETSC_VIEWER_VTK_VTR,
   PETSC_VIEWER_VTK_VTU,
   PETSC_VIEWER_BINARY_MATLAB,
   PETSC_VIEWER_NATIVE,

share/petsc/datafiles/meshes/annulus-20.exo

Binary file added.

share/petsc/datafiles/meshes/simpleblock-100.exo

Binary file added.

src/benchmarks/benchmarkExample.py

 #!/usr/bin/env python
 import os,sys
 sys.path.append(os.path.join(os.environ['PETSC_DIR'], 'config'))
+sys.path.append(os.getcwd())
 from builder2 import buildExample
 from benchmarkBatch import generateBatchScript
 
     return ' '.join(a)
 
   def run(self, numProcs = 1, log = True, **opts):
-    if self.petsc.mpiexec() is None:
-      cmd = self.petsc.example(self.num)
-    else:
-      cmd = ' '.join([self.petsc.mpiexec(), '-n', str(numProcs), self.petsc.example(self.num)])
-    cmd += ' '+self.optionsToString(**self.opts)+' '+self.optionsToString(**opts)
+    cmd = ''
+    if self.petsc.mpiexec() is not None:
+      cmd += self.petsc.mpiexec() + ' '
+      numProcs = os.environ.get('NUM_RANKS', numProcs)
+      cmd += ' -n ' + str(numProcs) + ' '
+      if os.environ.has_key('PE_HOSTFILE'):
+        cmd += ' -hostfile hostfile '
+    cmd += ' '.join([self.petsc.example(self.num), self.optionsToString(**self.opts), self.optionsToString(**opts)])
     if 'batch' in opts and opts['batch']:
       del opts['batch']
       filename = generateBatchScript(self.num, numProcs, 120, ' '+self.optionsToString(**self.opts)+' '+self.optionsToString(**opts))
       if not name in events:
         events[name] = []
       try:
-        events[name].append((stage.event[name].Time[0], stage.event[name].Flops[0]/(stage.event[name].Time[0] * 1e6)))
+        events[name].append((max(stage.event[name].Time), sum(stage.event[name].Flops)/(max(stage.event[name].Time) * 1e6)))
       except ZeroDivisionError:
-        events[name].append((stage.event[name].Time[0], 0))
+        events[name].append((max(stage.event[name].Time), 0))
   return
 
 def plotTime(library, num, eventNames, sizes, times, events):
     savefig(filename)
   return
 
+def plotEventScaling(library, num, eventNames, procs, events, filename = None):
+  from pylab import legend, plot, savefig, semilogy, show, title, xlabel, ylabel
+  import numpy as np
+
+  arches = procs.keys()
+  bs     = events[arches[0]].keys()[0]
+  data   = []
+  names  = []
+  for arch, style in zip(arches, ['-', ':']):
+    for event, color in zip(eventNames, ['b', 'g', 'r', 'y']):
+      if event in events[arch][bs]:
+        names.append(arch+'-'+str(bs)+' '+event)
+        data.append(procs[arch][bs])
+        data.append(1e-3*np.array(events[arch][bs][event])[:,1])
+        data.append(color+style)
+      else:
+        print 'Could not find %s in %s-%d events' % (event, arch, bs)
+  plot(*data)
+  title('Performance on '+library+' Example '+str(num))
+  xlabel('Number of Processors')
+  ylabel('Computation Rate (GF/s)')
+  legend(names, 'upper left', shadow = True)
+  if filename is None:
+    show()
+  else:
+    savefig(filename)
+  return
+
 def plotSummaryLine(library, num, eventNames, sizes, times, events):
   from pylab import legend, plot, show, title, xlabel, ylabel
   import numpy as np
   return
 
 def getDMComplexSize(dim, out):
-  '''Retrieves the number of cells from '''
+  '''Retrieves the number of cells from -dm_view output'''
   size = 0
   for line in out.split('\n'):
     if line.strip().startswith(str(dim)+'-cells: '):
-      size = int(line.strip()[9:])
+      sizes = line.strip()[9:].split()
+      size  = sum(map(int, sizes))
       break
   return size
 
   for numBlock in [2**i for i in map(int, args.blockExp)]:
     opts['gpu_blocks'] = numBlock
     # Generate new block size
-    cmd = './bin/pythonscripts/PetscGenerateFEMQuadrature.py %d %d %d %d %s %s.h' % (args.dim, args.order, numComp, numBlock, args.operator, os.path.splitext(source[0])[0])
+    cmd = os.environ.get('PETSC_DIR', '.')
+    cmd += '/bin/pythonscripts/PetscGenerateFEMQuadrature.py %d %d %d %d %s %s.h' % (args.dim, args.order, numComp, numBlock, args.operator, os.path.splitext(source[0])[0])
     print(cmd)
     ret = os.system('python '+cmd)
     args.files = ['['+','.join(source)+']']

src/contrib/nightlysummary/README

 This folder contains a summary generator for the PETSc nightly tests.
-The script runhtml.sh should be run from $PETSC_DIR.
-It grabs the latest logs from the web, places them into the subfolder currentlogs/, and then processes them.
-A summary HTML file is generated in subfolder html/
+
+Usage: $> runhtml.sh LOGDIR OUTFILE
+  LOGDIR  ... Directory where to find the log files
+  OUTFILE ... The output file where the HTML code will be written to. Note that paths in OUTFILE are relative to LOGDIR.
 

src/contrib/nightlysummary/html/style.css

-
-
-div.main
-{
-  max-width: 800px;
-  background: white;
-  margin-left: auto;
-  margin-right: auto;
-  padding: 20px;
-  border: 5px solid #CCCCCC;
-  border-radius: 10px;
-  background: #FBFBFB;
-}
-
-table
-{
-  /*border: 1px solid black;
-  border-radius: 10px;*/
-  padding: 3px;
-}
-
-td a:link, td a:visited, td a:focus, td a:active
-{
-  font-weight: bold;
-  text-decoration: underline;
-  color: black;
-}
-
-td a:hover
-{
-  font-weight: bold;
-  text-decoration: underline;
-  color: black;
-}
-
-th
-{
-  padding: 10px;
-  font-size: 1.1em;
-  font-weight: bold;
-  text-align: center;
-}
-
-td.desc
-{
-  max-width: 650px;
-  padding: 2px;
-  font-size: 0.9em;
-}
-
-td.green
-{
-  text-align: center;
-  vertical-align: middle;
-  padding: 2px;
-  background: #01DF01;
-  min-width: 50px;
-}
-
-td.yellow
-{
-  text-align: center;
-  vertical-align: middle;
-  padding: 2px;
-  background: #F4FA58;
-  min-width: 50px;
-}
-
-td.red
-{
-  text-align: center;
-  vertical-align: middle;
-  padding: 2px;
-  background: #FE2E2E;
-  min-width: 50px;
-}
-

src/contrib/nightlysummary/runhtml.sh

 #!/bin/bash
 
-currentlogdir=src/contrib/nightlysummary/currentlogs
 
-## Grab logs:
-rm $currentlogdir/*
-mkdir $currentlogdir
-wget -P $currentlogdir ftp://ftp.mcs.anl.gov/pub/petsc/nightlylogs/*.log
+if test $# -lt 2 ; then
+  echo "Usage: $> runhtml.sh LOGDIR OUTFILE";
+  echo " LOGDIR  ... Directory where to find the log files";
+  echo " OUTFILE ... The output file where the HTML code will be written to";
+  exit 0
+fi
 
+LOGDIR=$1
+OUTFILE=$2
 
-OUTFILE=src/contrib/nightlysummary/html/index.html
+## Write HTML header
 
 echo "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">" > $OUTFILE
 echo "<html>" >> $OUTFILE
 echo "<head><title>PETSc Test Summary</title>" >> $OUTFILE
-echo "<link href=\"style.css\" rel=\"stylesheet\" type=\"text/css\"></head>" >> $OUTFILE
+
+## CSS beautification begin ##
+
+echo "<style type=\"text/css\">" >> $OUTFILE
+echo "div.main {" >> $OUTFILE
+echo "  max-width: 800px;" >> $OUTFILE
+echo "  background: white;" >> $OUTFILE
+echo "  margin-left: auto;" >> $OUTFILE
+echo "  margin-right: auto;" >> $OUTFILE
+echo "  padding: 20px;" >> $OUTFILE
+echo "  border: 5px solid #CCCCCC;" >> $OUTFILE
+echo "  border-radius: 10px;" >> $OUTFILE
+echo "  background: #FBFBFB;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "table {" >> $OUTFILE
+echo "  /*border: 1px solid black;" >> $OUTFILE
+echo "  border-radius: 10px;*/" >> $OUTFILE
+echo "  padding: 3px;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "td a:link, td a:visited, td a:focus, td a:active {" >> $OUTFILE
+echo "  font-weight: bold;" >> $OUTFILE
+echo "  text-decoration: underline;" >> $OUTFILE
+echo "  color: black;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "td a:hover {" >> $OUTFILE
+echo "  font-weight: bold;" >> $OUTFILE
+echo "  text-decoration: underline;" >> $OUTFILE
+echo "  color: black;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "th {" >> $OUTFILE
+echo "  padding: 10px;" >> $OUTFILE
+echo "  font-size: 1.1em;" >> $OUTFILE
+echo "  font-weight: bold;" >> $OUTFILE
+echo "  text-align: center;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "td.desc {" >> $OUTFILE
+echo "  max-width: 650px;" >> $OUTFILE
+echo "  padding: 2px;" >> $OUTFILE
+echo "  font-size: 0.9em;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "td.green {" >> $OUTFILE
+echo "  text-align: center;" >> $OUTFILE
+echo "  vertical-align: middle;" >> $OUTFILE
+echo "  padding: 2px;" >> $OUTFILE
+echo "  background: #01DF01;" >> $OUTFILE
+echo "  min-width: 50px;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "td.yellow {" >> $OUTFILE
+echo "  text-align: center;" >> $OUTFILE
+echo "  vertical-align: middle;" >> $OUTFILE
+echo "  padding: 2px;" >> $OUTFILE
+echo "  background: #F4FA58;" >> $OUTFILE
+echo "  min-width: 50px;" >> $OUTFILE
+echo "}" >> $OUTFILE
+
+echo "td.red {" >> $OUTFILE
+echo "  text-align: center;" >> $OUTFILE
+echo "  vertical-align: middle;" >> $OUTFILE
+echo "  padding: 2px;" >> $OUTFILE
+echo "  background: #FE2E2E;" >> $OUTFILE
+echo "  min-width: 50px;" >> $OUTFILE
+echo "}" >> $OUTFILE
+echo "</style>" >> $OUTFILE
+
+## CSS beautification end ##
+
+echo "</head>" >> $OUTFILE
 echo "<body><div class=\"main\">" >> $OUTFILE
 echo "<center>Last update: " >> $OUTFILE
 date >> $OUTFILE
 echo "</center>" >> $OUTFILE
 echo "<h1>PETSc Test Summary</h1>" >> $OUTFILE
-echo "<p>This page is an automated summary of the output generated by the <a href=\"http://ftp.mcs.anl.gov/pub/petsc/nightlylogs/\">Nightly logs</a>. It serves as a basis for discussion rather than trying to be a full-fledged testing solution.</p>" >> $OUTFILE
+echo "<p>This page is an automated summary of the output generated by the Nightly logs. It provides a quick overview of the test results rather than trying to be a full-fledged testing solution.</p>" >> $OUTFILE
 
 # Writes a full result table
 # inputs:
   echo "<center><table>" >> $OUTFILE
   echo "<tr><th>Test</th><th>Warnings</th><th>Errors</th></tr>" >> $OUTFILE
 
-  for f in `ls $currentlogdir/$2*.log`
+  for f in `ls $LOGDIR/$2*.log`
   do
     echo "Processing file $f..."
     echo "<tr><td class=\"desc\">" >> $OUTFILE
-    echo "<a href=\"http://ftp.mcs.anl.gov/pub/petsc/nightlylogs/${f#$currentlogdir/}\">${f#$currentlogdir/}</a>" >> $OUTFILE
+    echo "<a href=\"${f#$LOGDIR/}\">${f#$LOGDIR/}</a>" >> $OUTFILE
 
     # Write number of warnings:
     numwarnings=`grep "[Ww]arning[: ]" $f | wc -l`

src/dm/examples/tests/ex42.c

+/* -*- Mode: C++; c-basic-offset:2 ; indent-tabs-mode:nil ; -*- */
+
+static char help[] = "Test VTK Rectilinear grid (.vtr) viewer support\n\n";
+
+#include <mpi.h>
+#include "petscdmda.h"
+
+/*
+  Write 3D DMDA vector with coordinates in VTK VTR format
+
+*/
+PetscErrorCode test_3d(const char filename[])
+{
+  MPI_Comm          comm = MPI_COMM_WORLD;
+  const PetscInt    M=10, N=15, P=30, dof=1, sw=1;
+  const PetscScalar Lx=1.0, Ly=1.0, Lz=1.0;
+  DM                da;
+  Vec               v;
+  PetscViewer       view;
+  DMDALocalInfo     info;
+  PetscScalar       ***va;
+  PetscInt          i,j,k;
+  PetscErrorCode    ierr;
+
+  ierr = DMDACreate3d(comm,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,
+                      DMDA_STENCIL_STAR, M,N,P,
+                      PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,sw,NULL,NULL,NULL,&da);CHKERRQ(ierr);
+
+  ierr = DMDASetUniformCoordinates(da,0.0,Lx,0.0,Ly,0.0,Lz);CHKERRQ(ierr);
+  ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
+  ierr = DMCreateGlobalVector(da,&v);CHKERRQ(ierr);
+  ierr = DMDAVecGetArray(da,v,&va);CHKERRQ(ierr);
+  for (k=info.zs; k<info.zs+info.zm; k++) {
+    for (j=info.ys; j<info.ys+info.ym; j++) {
+      for (i=info.xs; i<info.xs+info.xm; i++) {
+        PetscScalar x = (Lx*i)/M;
+        PetscScalar y = (Ly*j)/N;
+        PetscScalar z = (Lz*k)/P;
+        va[k][j][i] = pow(x-0.5*Lx,2)+pow(y-0.5*Ly,2)+pow(z-0.5*Lz,2);
+      }
+    }
+  }
+  ierr = DMDAVecRestoreArray(da,v,&va);CHKERRQ(ierr);
+  ierr = PetscViewerVTKOpen(comm,filename,FILE_MODE_WRITE,&view);CHKERRQ(ierr);
+  ierr = VecView(v,view);CHKERRQ(ierr);
+  ierr = PetscViewerDestroy(&view);CHKERRQ(ierr);
+  ierr = VecDestroy(&v);CHKERRQ(ierr);
+  ierr = DMDestroy(&da);CHKERRQ(ierr);
+  return 0;
+}
+
+
+/*
+  Write 2D DMDA vector with coordinates in VTK VTR format
+
+*/
+PetscErrorCode test_2d(const char filename[])
+{
+  MPI_Comm          comm = MPI_COMM_WORLD;
+  const PetscInt    M=10, N=20, dof=1, sw=1;
+  const PetscScalar Lx=1.0, Ly=1.0, Lz=1.0;
+  DM                da;
+  Vec               v;
+  PetscViewer       view;
+  DMDALocalInfo     info;
+  PetscScalar       **va;
+  PetscInt          i,j;
+  PetscErrorCode    ierr;
+
+  ierr = DMDACreate2d(comm,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,
+                      DMDA_STENCIL_STAR, M,N,
+                      PETSC_DECIDE,PETSC_DECIDE,dof,sw,NULL,NULL,&da);CHKERRQ(ierr);
+  ierr = DMDASetUniformCoordinates(da,0.0,Lx,0.0,Ly,0.0,Lz);CHKERRQ(ierr);
+  ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
+  ierr = DMCreateGlobalVector(da,&v);CHKERRQ(ierr);
+  ierr = DMDAVecGetArray(da,v,&va);CHKERRQ(ierr);
+  for (j=info.ys; j<info.ys+info.ym; j++) {
+    for (i=info.xs; i<info.xs+info.xm; i++) {
+      PetscScalar x = (Lx*i)/M;
+      PetscScalar y = (Ly*j)/N;
+      va[j][i] = pow(x-0.5*Lx,2)+pow(y-0.5*Ly,2);
+    }
+  }
+  ierr = DMDAVecRestoreArray(da,v,&va);CHKERRQ(ierr);
+  ierr = PetscViewerVTKOpen(comm,filename,FILE_MODE_WRITE,&view);CHKERRQ(ierr);
+  ierr = VecView(v,view);CHKERRQ(ierr);
+  ierr = PetscViewerDestroy(&view);CHKERRQ(ierr);
+  ierr = VecDestroy(&v);CHKERRQ(ierr);
+  ierr = DMDestroy(&da);CHKERRQ(ierr);
+  return 0;
+}
+
+
+/*
+  Write 2D DMDA vector without coordinates in VTK VTR format
+
+*/
+PetscErrorCode test_2d_nocoord(const char filename[])
+{
+  MPI_Comm          comm = MPI_COMM_WORLD;
+  const PetscInt    M=10, N=20, dof=1, sw=1;
+  const PetscScalar Lx=1.0, Ly=1.0;
+  DM                da;
+  Vec               v;
+  PetscViewer       view;
+  DMDALocalInfo     info;
+  PetscScalar       **va;
+  PetscInt          i,j;
+  PetscErrorCode    ierr;
+
+  ierr = DMDACreate2d(comm,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,
+                      DMDA_STENCIL_STAR, M,N,
+                      PETSC_DECIDE,PETSC_DECIDE,dof,sw,NULL,NULL,&da);CHKERRQ(ierr);
+
+  ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
+  ierr = DMCreateGlobalVector(da,&v);CHKERRQ(ierr);
+  ierr = DMDAVecGetArray(da,v,&va);CHKERRQ(ierr);
+  for (j=info.ys; j<info.ys+info.ym; j++) {
+    for (i=info.xs; i<info.xs+info.xm; i++) {
+      PetscScalar x = (Lx*i)/M;
+      PetscScalar y = (Ly*j)/N;
+      va[j][i] = pow(x-0.5*Lx,2)+pow(y-0.5*Ly,2);
+    }
+  }
+  ierr = DMDAVecRestoreArray(da,v,&va);CHKERRQ(ierr);
+  ierr = PetscViewerVTKOpen(comm,filename,FILE_MODE_WRITE,&view);CHKERRQ(ierr);
+  ierr = VecView(v,view);CHKERRQ(ierr);
+  ierr = PetscViewerDestroy(&view);CHKERRQ(ierr);
+  ierr = VecDestroy(&v);CHKERRQ(ierr);
+  ierr = DMDestroy(&da);CHKERRQ(ierr);
+  return 0;
+}
+
+
+/*
+  Write 3D DMDA vector without coordinates in VTK VTR format
+
+*/
+PetscErrorCode test_3d_nocoord(const char filename[])
+{
+  MPI_Comm          comm = MPI_COMM_WORLD;
+  const PetscInt    M=10, N=20, P=30, dof=1, sw=1;
+  const PetscScalar Lx=1.0, Ly=1.0, Lz=1.0;
+  DM                da;
+  Vec               v;
+  PetscViewer       view;
+  DMDALocalInfo     info;
+  PetscScalar       ***va;
+  PetscInt          i,j,k;
+  PetscErrorCode    ierr;
+
+  ierr = DMDACreate3d(comm,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,
+                      DMDA_STENCIL_STAR, M,N,P,
+                      PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,sw,NULL,NULL,NULL,&da);CHKERRQ(ierr);
+
+  ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
+  ierr = DMCreateGlobalVector(da,&v);CHKERRQ(ierr);
+  ierr = DMDAVecGetArray(da,v,&va);CHKERRQ(ierr);
+  for (k=info.zs; k<info.zs+info.zm; k++) {
+    for (j=info.ys; j<info.ys+info.ym; j++) {
+      for (i=info.xs; i<info.xs+info.xm; i++) {
+        PetscScalar x = (Lx*i)/M;
+        PetscScalar y = (Ly*j)/N;
+        PetscScalar z = (Lz*k)/P;
+        va[k][j][i] = pow(x-0.5*Lx,2)+pow(y-0.5*Ly,2)+pow(z-0.5*Lz,2);
+      }
+    }
+  }
+  ierr = DMDAVecRestoreArray(da,v,&va);CHKERRQ(ierr);
+  ierr = PetscViewerVTKOpen(comm,filename,FILE_MODE_WRITE,&view);CHKERRQ(ierr);
+  ierr = VecView(v,view);CHKERRQ(ierr);
+  ierr = PetscViewerDestroy(&view);CHKERRQ(ierr);
+  ierr = VecDestroy(&v);CHKERRQ(ierr);
+  ierr = DMDestroy(&da);CHKERRQ(ierr);
+  return 0;
+}
+
+int main(int argc, char *argv[])
+{
+  PetscErrorCode ierr;
+  ierr = PetscInitialize(&argc,&argv,0,help);CHKERRQ(ierr);
+  test_3d("3d.vtr");
+  test_2d("2d.vtr");
+  test_2d_nocoord("2d_nocoord.vtr");
+  test_3d_nocoord("3d_nocoord.vtr");
+  ierr = PetscFinalize();
+  return 0;
+}

src/dm/examples/tests/ex7.c

 
-static char help[] = "Tests DMDALocalToLocalxxx().\n\n";
+static char help[] = "Tests DMLocalToLocalxxx() for DMDA.\n\n";
 
 #include <petscdmda.h>
 
 
   stencil_type = (DMDAStencilType) st;
 
-  ierr = PetscOptionsHasName(NULL,"-2d",&flg2);CHKERRQ(ierr);
-  ierr = PetscOptionsHasName(NULL,"-3d",&flg3);CHKERRQ(ierr);
+  ierr = PetscOptionsHasName(NULL,"-grid2d",&flg2);CHKERRQ(ierr);
+  ierr = PetscOptionsHasName(NULL,"-grid3d",&flg3);CHKERRQ(ierr);
   if (flg2) {
     ierr = DMDACreate2d(PETSC_COMM_WORLD,periodic,periodic,stencil_type,M,N,m,n,dof,stencil_width,
                         NULL,NULL,&da);CHKERRQ(ierr);
   ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr);
 
 
-  ierr = DMDALocalToLocalBegin(da,local,INSERT_VALUES,local_copy);CHKERRQ(ierr);
-  ierr = DMDALocalToLocalEnd(da,local,INSERT_VALUES,local_copy);CHKERRQ(ierr);
+  ierr = DMLocalToLocalBegin(da,local,INSERT_VALUES,local_copy);CHKERRQ(ierr);
+  ierr = DMLocalToLocalEnd(da,local,INSERT_VALUES,local_copy);CHKERRQ(ierr);
 
 
   ierr = PetscOptionsGetBool(NULL,"-save",&flg,NULL);CHKERRQ(ierr);

src/dm/examples/tests/makefile

 EXAMPLESC       = ex1.c ex2.c ex3.c ex4.c ex5.c ex6.c ex7.c ex8.c ex9.c ex10.c\
                   ex11.c ex12.c ex12.m ex13.c ex14.c ex15.c ex16.c ex17.c ex18.c ex19.c \
 	          ex21.c ex22.c ex23.c ex24.c ex25.c ex26.c ex27.c ex28.c ex30.c \
-	          ex31.c ex32.c ex34.c ex36.c ex37.c ex38.c ex39.c ex40.c ex41.c
+	          ex31.c ex32.c ex34.c ex36.c ex37.c ex38.c ex39.c ex40.c ex41.c \
+	          ex42.c
 EXAMPLESF       =
 MANSEC          = DM