Commits

BarryFSmith committed 0157cb0 Merge

Merge branch 'jed/dmda-memusage'

Comments (0)

Files changed (48)

include/finclude/ftn-custom/petscdmda.h90

 #include "finclude/ftn-custom/petscvechide.h90"
 #include "finclude/ftn-custom/petscdmhide.h90"
 
-      Interface
-        Subroutine DMDAGetGlobalIndicesF90(v,n,array,ierr)
-          USE_DM_HIDE
-          PetscInt, pointer :: array(:)
-          PetscInt  n
-          PetscErrorCode ierr
-          DM_HIDE   v
-        End Subroutine
-        Subroutine DMDARestoreGlobalIndicesF90(v,n,array,ierr)
-          USE_DM_HIDE
-          PetscInt, pointer :: array(:)
-          PetscInt  n
-          PetscErrorCode ierr
-          DM_HIDE   v
-        End Subroutine
-      End Interface
-
-
       type DMDALocalInfof90
         PetscInt ::       dim,dof,sw
         PetscInt ::       mx,my,mz

include/petscdmda.h

 PETSC_EXTERN PetscErrorCode DMDAGlobalToNaturalAllCreate(DM,VecScatter*);
 PETSC_EXTERN PetscErrorCode DMDANaturalAllToGlobalCreate(DM,VecScatter*);
 
-PETSC_EXTERN PetscErrorCode DMDAGetGlobalIndices(DM,PetscInt*,const PetscInt*[]);
-PETSC_EXTERN PetscErrorCode DMDARestoreGlobalIndices(DM,PetscInt*,const PetscInt*[]);
-
 PETSC_EXTERN PetscErrorCode DMDAGetScatter(DM,VecScatter*,VecScatter*);
 PETSC_EXTERN PetscErrorCode DMDAGetNeighbors(DM,const PetscMPIInt**);
 

include/petscis.h

 
    Level: beginner
 
-.seealso: ISGlobalToLocalMappingApply()
+.seealso: ISGlobalToLocalMappingApplyBlock()
 
 E*/
 typedef enum {IS_GTOLM_MASK,IS_GTOLM_DROP} ISGlobalToLocalMappingType;
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping,PetscInt,const PetscInt[],PetscInt[]);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping,IS,IS*);
 PETSC_EXTERN PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping,ISGlobalToLocalMappingType,PetscInt,const PetscInt[],PetscInt*,PetscInt[]);
+PETSC_EXTERN PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping,ISGlobalToLocalMappingType,PetscInt,const PetscInt[],PetscInt*,PetscInt[]);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping,PetscInt*);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]);
+PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]);
+PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping,PetscInt*,PetscInt*[],PetscInt*[],PetscInt**[]);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping,const PetscInt**);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping,const PetscInt**);
 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping,const PetscInt**);

src/dm/examples/tests/ex4.c

 
   /* Tests mappings betweeen application/PETSc orderings */
   if (testorder) {
+    ISLocalToGlobalMapping ltogm;
+
+    ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingGetSize(ltogm,&nloc);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
     ierr = DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL);CHKERRQ(ierr);
-    ierr = DMDAGetGlobalIndices(da,&nloc,&ltog);CHKERRQ(ierr);
     ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
     ierr = PetscMalloc1(nloc,&iglobal);CHKERRQ(ierr);
 
       }
     }
     ierr = PetscFree(iglobal);CHKERRQ(ierr);
-    ierr = DMDARestoreGlobalIndices(da,&nloc,&ltog);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
   }
 
   /* Free memory */

src/dm/examples/tests/ex6.c

 
   /* Tests mappings betweeen application/PETSc orderings */
   if (test_order) {
+    ISLocalToGlobalMapping ltogm;
+
+    ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingGetSize(ltogm,&nloc);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
+
     ierr = DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);CHKERRQ(ierr);
-    ierr = DMDAGetGlobalIndices(da,&nloc,&ltog);CHKERRQ(ierr);
     ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
     /* ierr = AOView(ao,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
     ierr = PetscMalloc1(nloc,&iglobal);CHKERRQ(ierr);
       }
     }
     ierr = PetscFree(iglobal);CHKERRQ(ierr);
-    ierr = DMDARestoreGlobalIndices(da,&nloc,&ltog);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
   }
 
   /* Free memory */

src/dm/examples/tests/makefile

 	   if (${DIFF} output/ex4_1.out ex4_1.tmp) then true; \
 	   else printf "${PWD}\nPossible problem with with ex4_1, diffs above\n=========================================\n"; fi; \
 	   ${RM} -f ex4_1.tmp
+runex4_2:
+	-@${MPIEXEC} -n 1 ./ex4 -testorder -nox
 runex5:
 	-@${MPIEXEC} -n 3 ./ex5 -time 50 -nox
 runex6:
 	  ${RM} -f 2d_nocoord.vtr 2d.vtr 3d_nocoord.vtr 3d.vtr
 
 
-TESTEXAMPLES_C		  = ex1.PETSc runex1 ex1.rm ex4.PETSc runex4 ex4.rm ex15.PETSc ex15.rm ex16.PETSc ex16.rm \
+TESTEXAMPLES_C		  = ex1.PETSc runex1 ex1.rm ex4.PETSc runex4 runex4_2 ex4.rm ex15.PETSc ex15.rm ex16.PETSc ex16.rm \
                             ex21.PETSc runex21 ex21.rm ex24.PETSc runex24 ex24.rm ex25.PETSc \
                             runex25 ex25.rm ex30.PETSc runex30 runex30_2 runex30_3 ex30.rm ex31.PETSc runex31 ex31.rm ex32.PETSc runex32 ex32.rm \
                             ex34.PETSc runex34 ex34.rm ex36.PETSc runex36_1d runex36_2d runex36_2dp1 runex36_2dp2 runex36_3d runex36_3dp1 ex36.rm

src/dm/impls/da/da3.c

       if ((k >= dd->Zs) && (k < dd->Ze)) {
 
         /* overlay ghost numbers, useful for error checking */
-        base = (dd->Xe-dd->Xs)*(dd->Ye-dd->Ys)*(k-dd->Zs);
+        base = (dd->Xe-dd->Xs)*(dd->Ye-dd->Ys)*(k-dd->Zs)/dd->w;
         ierr = ISLocalToGlobalMappingGetBlockIndices(da->ltogmap,&idx);CHKERRQ(ierr);
         plane=k;
-        /* Keep z wrap around points on the dradrawg */
+        /* Keep z wrap around points on the drawing */
         if (k<0) plane=dd->P+k;
         if (k>=dd->P) plane=k-dd->P;
         ymin = dd->Ys; ymax = dd->Ye;
             if (x<xmin) xcoord = xmax - (xmin-x);
             if (x>=xmax) xcoord = xmin + (x-xmax);
             ierr = PetscDrawString(draw,xcoord/dd->w,ycoord,PETSC_DRAW_BLUE,node);CHKERRQ(ierr);
-            base+=dd->w;
+            base++;
           }
         }
         ierr = ISLocalToGlobalMappingRestoreBlockIndices(da->ltogmap,&idx);CHKERRQ(ierr);

src/dm/impls/da/daindex.c

 #include <petsc-private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
 
 #undef __FUNCT__
-#define __FUNCT__ "DMDAGetGlobalIndices"
-/*@C
-   DMDAGetGlobalIndices - Returns the global node number of all local nodes,
-   including ghost nodes.
-
-   Not Collective
-
-   Input Parameter:
-.  da - the distributed array
-
-   Output Parameters:
-+  n - the number of local elements, including ghost nodes (or NULL)
--  idx - the global indices
-
-   Level: intermediate
-
-   Note:
-   For DMDA_STENCIL_STAR stencils the inactive corner ghost nodes are also included
-   in the list of local indices (even though those nodes are not updated
-   during calls to DMDAXXXToXXX().
-
-   Essentially the same data is returned in the form of a local-to-global mapping
-   with the routine DMDAGetISLocalToGlobalMapping(), that is the recommended interface.
-
-   You must call DMDARestoreGlobalIndices() after you are finished using the indices
-
-   Fortran Note:
-   This routine is used differently from Fortran
-.vb
-        DM          da
-        integer     n,da_array(1)
-        PetscOffset i_da
-        integer     ierr
-        call DMDAGetGlobalIndices(da,n,da_array,i_da,ierr)
-
-   C Access first local entry in list
-        value = da_array(i_da + 1)
-.ve
-
-   See Users-Manual: ch_fortran for details.
-
-.keywords: distributed array, get, global, indices, local-to-global
-
-.seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMLocalToGlobalBegin(), DMDARestoreGlobalIndices()
-          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToLocalBegin(), DMDAGetAO(), DMDAGetGlobalIndicesF90()
-          DMDAGetISLocalToGlobalMapping(), DMDACreate3d(), DMDACreate1d(), DMLocalToLocalEnd(), DMDAGetOwnershipRanges()
-@*/
-PetscErrorCode  DMDAGetGlobalIndices(DM da,PetscInt *n,const PetscInt *idx[])
-{
-  PetscErrorCode ierr;
-
-  PetscFunctionBegin;
-  PetscValidHeaderSpecific(da,DM_CLASSID,1);
-  if (n) {
-    PetscInt bs;
-    ierr = ISLocalToGlobalMappingGetSize(da->ltogmap,n);CHKERRQ(ierr);
-    ierr = ISLocalToGlobalMappingGetBlockSize(da->ltogmap,&bs);CHKERRQ(ierr);
-    *n = (*n)*bs;
-  }
-  if (idx) {
-    ierr = ISLocalToGlobalMappingGetIndices(da->ltogmap,idx);CHKERRQ(ierr);
-  }
-  PetscFunctionReturn(0);
-}
-
-#undef __FUNCT__
 #define __FUNCT__ "DMDAGetNatural_Private"
 /*
    Gets the natural number for each global number on the process.
 }
 
 #undef __FUNCT__
-#define __FUNCT__ "DMDARestoreGlobalIndices"
-/*@C
-   DMDARestoreGlobalIndices - Restores the global node number of all local nodes,   including ghost nodes.
-
-   Not Collective
-
-   Input Parameter:
-.  da - the distributed array
-
-   Output Parameters:
-+  n - the number of local elements, including ghost nodes (or NULL)
--  idx - the global indices
-
-   Level: intermediate
-
-   Note:
-   For DMDA_STENCIL_STAR stencils the inactive corner ghost nodes are also included
-   in the list of local indices (even though those nodes are not updated
-   during calls to DMDAXXXToXXX().
-
-   Essentially the same data is returned in the form of a local-to-global mapping
-   with the routine DMDAGetISLocalToGlobalMapping();
-
-   Fortran Note:
-   This routine is used differently from Fortran
-.vb
-        DM          da
-        integer     n,da_array(1)
-        PetscOffset i_da
-        integer     ierr
-        call DMDAGetGlobalIndices(da,n,da_array,i_da,ierr)
-
-   C Access first local entry in list
-        value = da_array(i_da + 1)
-.ve
-
-   See Users-Manual: ch_fortran for details.
-
-.keywords: distributed array, get, global, indices, local-to-global
-
-.seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMLocalToGlobalBegin()
-          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToLocalBegin(), DMDAGetAO(), DMDAGetGlobalIndicesF90()
-          DMDAGetISLocalToGlobalMapping(), DMDACreate3d(), DMDACreate1d(), DMLocalToLocalEnd(), DMDAGetOwnershipRanges()
-@*/
-PetscErrorCode  DMDARestoreGlobalIndices(DM da,PetscInt *n,const PetscInt *idx[])
-{
-  PetscErrorCode ierr;
-
-  PetscFunctionBegin;
-  PetscValidHeaderSpecific(da,DM_CLASSID,1);
-  if (idx) {
-    ierr = ISLocalToGlobalMappingRestoreIndices(da->ltogmap,idx);CHKERRQ(ierr);
-  }
-  PetscFunctionReturn(0);
-}
-
-#undef __FUNCT__
 #define __FUNCT__ "DMDAGetAO"
 /*@
    DMDAGetAO - Gets the application ordering context for a distributed array.
 .keywords: distributed array, get, global, indices, local-to-global
 
 .seealso: DMDACreate2d(), DMDAGetGhostCorners(), DMDAGetCorners(), DMDALocalToGlocal()
-          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetGlobalIndices(), DMDAGetOwnershipRanges(),
+          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetOwnershipRanges(),
           AO, AOPetscToApplication(), AOApplicationToPetsc()
 @*/
 PetscErrorCode  DMDAGetAO(DM da,AO *ao)
   PetscFunctionReturn(0);
 }
 
-/*MC
-    DMDAGetGlobalIndicesF90 - Returns a Fortran90 pointer to the list of
-    global indices (global node number of all local nodes, including
-    ghost nodes).
-
-    Synopsis:
-    DMDAGetGlobalIndicesF90(DM da,integer n,{integer, pointer :: idx(:)},integer ierr)
-
-    Not Collective
-
-    Input Parameter:
-.   da - the distributed array
-
-    Output Parameters:
-+   n - the number of local elements, including ghost nodes (or NULL)
-.   idx - the Fortran90 pointer to the global indices
--   ierr - error code
-
-    Level: intermediate
-
-.keywords: distributed array, get, global, indices, local-to-global, f90
-
-.seealso: DMDAGetGlobalIndices(), DMDARestoreGlobalIndicesF90(), DMDARestoreGlobalIndices()
-M*/
-
-/*MC
-    DMDARestoreGlobalIndicesF90 - Returns a Fortran90 pointer to the list of
-    global indices (global node number of all local nodes, including
-    ghost nodes).
-
-    Synopsis:
-    DMDARestoreGlobalIndicesF90(DM da,integer n,{integer, pointer :: idx(:)},integer ierr)
-
-    Not Collective
-
-    Input Parameter:
-.   da - the distributed array
-
-    Output Parameters:
-+   n - the number of local elements, including ghost nodes (or NULL)
-.   idx - the Fortran90 pointer to the global indices
--   ierr - error code
-
-    Level: intermediate
-
-.keywords: distributed array, get, global, indices, local-to-global, f90
-
-.seealso: DMDARestoreGlobalIndices(), DMDAGetGlobalIndicesF90(), DMDAGetGlobalIndices()
-M*/
 

src/dm/impls/da/dainterp.c

 #define __FUNCT__ "DMCreateAggregates_DA"
 PetscErrorCode  DMCreateAggregates_DA(DM dac,DM daf,Mat *rest)
 {
-  PetscErrorCode   ierr;
-  PetscInt         dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc;
-  PetscInt         dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
-  DMBoundaryType   bxc,byc,bzc,bxf,byf,bzf;
-  DMDAStencilType  stc,stf;
-  PetscInt         i,j,l;
-  PetscInt         i_start,j_start,l_start, m_f,n_f,p_f;
-  PetscInt         i_start_ghost,j_start_ghost,l_start_ghost,m_ghost,n_ghost,p_ghost;
-  const PetscInt   *idx_f;
-  PetscInt         i_c,j_c,l_c;
-  PetscInt         i_start_c,j_start_c,l_start_c, m_c,n_c,p_c;
-  PetscInt         i_start_ghost_c,j_start_ghost_c,l_start_ghost_c,m_ghost_c,n_ghost_c,p_ghost_c;
-  const PetscInt   *idx_c;
-  PetscInt         d;
-  PetscInt         a;
-  PetscInt         max_agg_size;
-  PetscInt         *fine_nodes;
-  PetscScalar      *one_vec;
-  PetscInt         fn_idx;
+  PetscErrorCode         ierr;
+  PetscInt               dimc,Mc,Nc,Pc,mc,nc,pc,dofc,sc;
+  PetscInt               dimf,Mf,Nf,Pf,mf,nf,pf,doff,sf;
+  DMBoundaryType         bxc,byc,bzc,bxf,byf,bzf;
+  DMDAStencilType        stc,stf;
+  PetscInt               i,j,l;
+  PetscInt               i_start,j_start,l_start, m_f,n_f,p_f;
+  PetscInt               i_start_ghost,j_start_ghost,l_start_ghost,m_ghost,n_ghost,p_ghost;
+  const PetscInt         *idx_f;
+  PetscInt               i_c,j_c,l_c;
+  PetscInt               i_start_c,j_start_c,l_start_c, m_c,n_c,p_c;
+  PetscInt               i_start_ghost_c,j_start_ghost_c,l_start_ghost_c,m_ghost_c,n_ghost_c,p_ghost_c;
+  const PetscInt         *idx_c;
+  PetscInt               d;
+  PetscInt               a;
+  PetscInt               max_agg_size;
+  PetscInt               *fine_nodes;
+  PetscScalar            *one_vec;
+  PetscInt               fn_idx;
+  ISLocalToGlobalMapping ltogmf,ltogmc;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(dac,DM_CLASSID,1);
 
   ierr = DMDAGetCorners(daf,&i_start,&j_start,&l_start,&m_f,&n_f,&p_f);CHKERRQ(ierr);
   ierr = DMDAGetGhostCorners(daf,&i_start_ghost,&j_start_ghost,&l_start_ghost,&m_ghost,&n_ghost,&p_ghost);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(daf,NULL,&idx_f);CHKERRQ(ierr);
+
+  ierr = DMGetLocalToGlobalMapping(daf,&ltogmf);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogmf,&idx_f);CHKERRQ(ierr);
 
   ierr = DMDAGetCorners(dac,&i_start_c,&j_start_c,&l_start_c,&m_c,&n_c,&p_c);CHKERRQ(ierr);
   ierr = DMDAGetGhostCorners(dac,&i_start_ghost_c,&j_start_ghost_c,&l_start_ghost_c,&m_ghost_c,&n_ghost_c,&p_ghost_c);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(dac,NULL,&idx_c);CHKERRQ(ierr);
+
+  ierr = DMGetLocalToGlobalMapping(dac,&ltogmc);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogmc,&idx_c);CHKERRQ(ierr);
 
   /*
      Basic idea is as follows. Here's a 2D example, suppose r_x, r_y are the ratios
       }
     }
   }
-  ierr = DMDARestoreGlobalIndices(daf,NULL,&idx_f);CHKERRQ(ierr);
-  ierr = DMDARestoreGlobalIndices(dac,NULL,&idx_c);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogmf,&idx_f);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogmc,&idx_c);CHKERRQ(ierr);
   ierr = PetscFree2(one_vec,fine_nodes);CHKERRQ(ierr);
   ierr = MatAssemblyBegin(*rest, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   ierr = MatAssemblyEnd(*rest, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);

src/dm/impls/da/f90-custom/makefile

 
 CFLAGS   =
 FFLAGS   =
-SOURCEC  = zdaindexf90.c zda1f90.c
+SOURCEC  = zda1f90.c
 SOURCEF  =
 SOURCEH  =
 LIBBASE  = libpetscdm

src/dm/impls/da/f90-custom/zdaindexf90.c

-
-#include <petscdmda.h>
-#include <../src/sys/f90-src/f90impl.h>
-
-#if defined(PETSC_HAVE_FORTRAN_CAPS)
-#define dmdagetglobalindicesf90_     DMDAGETGLOBALINDICESF90
-#define dmdarestoreglobalindicesf90_ DMDARESTOREGLOBALINDICESF90
-#elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
-#define dmdagetglobalindicesf90_     dmdagetglobalindicesf90
-#define dmdarestoreglobalindicesf90_ dmdarestoreglobalindicesf90
-#endif
-
-PETSC_EXTERN void PETSC_STDCALL dmdagetglobalindicesf90_(DM *da,PetscInt *n,F90Array1d *indices,int *ierr PETSC_F90_2PTR_PROTO(ptrd))
-{
-  const PetscInt *idx;
-  *ierr = DMDAGetGlobalIndices(*da,n,&idx); if (*ierr) return;
-  *ierr = F90Array1dCreate((void*)idx,PETSC_INT,1,*n,indices PETSC_F90_2PTR_PARAM(ptrd));
-}
-
-PETSC_EXTERN void PETSC_STDCALL dmdarestoreglobalindicesf90_(DM *da,PetscInt *n,F90Array1d *ptr,int *ierr PETSC_F90_2PTR_PROTO(ptrd))
-{
-  const PetscInt *fa;
-
-  *ierr = F90Array1dAccess(ptr,PETSC_INT,(void**)&fa PETSC_F90_2PTR_PARAM(ptrd));if (*ierr) return;
-  *ierr = F90Array1dDestroy(ptr,PETSC_INT PETSC_F90_2PTR_PARAM(ptrd));if (*ierr) return;
-  *ierr = DMDARestoreGlobalIndices(*da,n,&fa); if (*ierr) return;
-}
-
-
-

src/dm/impls/da/ftn-custom/makefile

 
 CFLAGS   =
 FFLAGS   =
-SOURCEC  = zdaf.c zda1f.c zda2f.c zda3f.c zdacornf.c zdagetscatterf.c zdaindexf.c zdaviewf.c
+SOURCEC  = zdaf.c zda1f.c zda2f.c zda3f.c zdacornf.c zdagetscatterf.c zdaviewf.c
 SOURCEF  =
 SOURCEH  =
 LIBBASE  = libpetscdm

src/dm/impls/da/hypre/mhyp.c

 #define __FUNCT__ "MatSetupDM_HYPREStruct"
 static PetscErrorCode  MatSetupDM_HYPREStruct(Mat mat,DM da)
 {
-  PetscErrorCode   ierr;
-  Mat_HYPREStruct  *ex = (Mat_HYPREStruct*) mat->data;
-  PetscInt         dim,dof,sw[3],nx,ny,nz,ilower[3],iupper[3],ssize,i;
-  DMBoundaryType   px,py,pz;
-  DMDAStencilType  st;
+  PetscErrorCode         ierr;
+  Mat_HYPREStruct        *ex = (Mat_HYPREStruct*) mat->data;
+  PetscInt               dim,dof,sw[3],nx,ny,nz,ilower[3],iupper[3],ssize,i;
+  DMBoundaryType         px,py,pz;
+  DMDAStencilType        st;
+  ISLocalToGlobalMapping ltog;
 
   PetscFunctionBegin;
   ex->da = da;
 
   /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
   ierr        = MatGetOwnershipRange(mat,&ex->rstart,NULL);CHKERRQ(ierr);
-  ierr        = DMDAGetGlobalIndices(ex->da,NULL, (const PetscInt **) &ex->gindices);CHKERRQ(ierr);
+  ierr        = DMGetLocalToGlobalMapping(ex->da,&ltog);CHKERRQ(ierr);
+  ierr        = ISLocalToGlobalMappingGetIndices(ltog, (const PetscInt **) &ex->gindices);CHKERRQ(ierr);
   ierr        = DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,0);CHKERRQ(ierr);
   ex->gnxgny *= ex->gnx;
   ierr        = DMDAGetCorners(ex->da,&ex->xs,&ex->ys,&ex->zs,&ex->nx,&ex->ny,0);CHKERRQ(ierr);
 #define __FUNCT__ "MatSetupDM_HYPRESStruct"
 static PetscErrorCode  MatSetupDM_HYPRESStruct(Mat mat,DM da)
 {
-  PetscErrorCode   ierr;
-  Mat_HYPRESStruct *ex = (Mat_HYPRESStruct*) mat->data;
-  PetscInt         dim,dof,sw[3],nx,ny,nz;
-  PetscInt         ilower[3],iupper[3],ssize,i;
-  DMBoundaryType   px,py,pz;
-  DMDAStencilType  st;
-  PetscInt         nparts= 1;  /* assuming only one part */
-  PetscInt         part  = 0;
-
+  PetscErrorCode         ierr;
+  Mat_HYPRESStruct       *ex = (Mat_HYPRESStruct*) mat->data;
+  PetscInt               dim,dof,sw[3],nx,ny,nz;
+  PetscInt               ilower[3],iupper[3],ssize,i;
+  DMBoundaryType         px,py,pz;
+  DMDAStencilType        st;
+  PetscInt               nparts= 1;  /* assuming only one part */
+  PetscInt               part  = 0;
+  ISLocalToGlobalMapping ltog;
   PetscFunctionBegin;
   ex->da = da;
   ierr   = PetscObjectReference((PetscObject)da);CHKERRQ(ierr);
 
   /* get values that will be used repeatedly in MatSetValuesLocal() and MatZeroRowsLocal() repeatedly */
   ierr = MatGetOwnershipRange(mat,&ex->rstart,NULL);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(ex->da,NULL,(const PetscInt **) &ex->gindices);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(ex->da,&ltog);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltog, (const PetscInt **) &ex->gindices);CHKERRQ(ierr);
   ierr = DMDAGetGhostCorners(ex->da,0,0,0,&ex->gnx,&ex->gnxgny,&ex->gnxgnygnz);CHKERRQ(ierr);
 
   ex->gnxgny    *= ex->gnx;

src/dm/impls/plex/plexdistribute.c

   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
-  ierr = ISGlobalToLocalMappingApply(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
+  ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
   if (flg) {
     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);

src/dm/impls/plex/plexlabel.c

   ierr = PetscFree3(sendcnts,offsets,displs);CHKERRQ(ierr);
   ierr = PetscFree(stratumSizes);CHKERRQ(ierr);
   /* Renumber points */
-  ierr = ISGlobalToLocalMappingApply(renumbering, IS_GTOLM_MASK, (*labelNew)->stratumOffsets[(*labelNew)->numStrata], (*labelNew)->points, NULL, (*labelNew)->points);CHKERRQ(ierr);
+  ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, (*labelNew)->stratumOffsets[(*labelNew)->numStrata], (*labelNew)->points, NULL, (*labelNew)->points);CHKERRQ(ierr);
   /* Sort points */
   for (s = 0; s < (*labelNew)->numStrata; ++s) {ierr = PetscSortInt((*labelNew)->stratumSizes[s], &(*labelNew)->points[(*labelNew)->stratumOffsets[s]]);CHKERRQ(ierr);}
   PetscFunctionReturn(0);

src/docs/tex/manual/part2.tex

 numbering scheme back to the global PETSc numbering can be handled with the
 ISLocalToGlobalMapping routines.
 
-If one is given a list of indices in a global numbering, the routine
+If one is given a list of block indices in a global numbering, the routine
 \begin{tabbing}
-  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping ctx,\\
+  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping ctx,\\
                               ISGlobalToLocalMappingType type,int nin,int idxin[],int *nout,int idxout[]);
 \end{tabbing}
 \sindex{global to local mapping} will provide
 represented locally in the mapping are not included in \trl{idxout}, so that
 potentially \trl{nout} is smaller than \trl{nin}.  One must
 pass in an array long enough to hold all the indices. One can call
-ISGlobalToLocalMappingApply() with \trl{idxout} equal to
+ISGlobalToLocalMappingApplyBlock() with \trl{idxout} equal to
 PETSC\_NULL to determine the required length (returned in
 \trl{nout}) and then allocate the required space and call
-ISGlobalToLocalMappingApply() a second time to set the values.
+ISGlobalToLocalMappingApplyBlock() a second time to set the values.
 
 Often it is convenient to set elements into a vector using the local node
 numbering rather than the global node numbering (e.g.,  each process may
 \begin{itemize}
 \item
 the global node number of each local node
-including the ghost nodes. This number may be determined by using the
-command
-\begin{tabbing}
-  DMDAGetGlobalIndices(DM da,int *n,const int *idx[]);
-  DMDARestoreGlobalIndices(DM da,int *n,const int *idx[]);
-\end{tabbing}
-The output argument \trl{n} contains the number of
-local nodes, including ghost nodes, while \trl{idx} contains a list of length
-\trl{n} containing the global indices that correspond to the local nodes. Either
-parameter may be omitted by passing PETSC\_NULL. Note that the Fortran
-interface differs slightly; see Section~\ref{sec_fortranarrays} for details.
-\item
-or to set up the vectors and matrices so that their entries may be
-added using the local numbering. This is done by first calling
+including the ghost nodes cqn be obtained by first calling
 \begin{tabbing}
-  DMDAGetISLocalToGlobalMapping(DM da,ISLocalToGlobalMapping *map);
+  DMGetLocalToGlobalMapping(DM da,ISLocalToGlobalMapping *map);
 \end{tabbing}
 followed by
 \begin{tabbing}
 Since Fortran 77 does not allow arrays to be returned in routine
 arguments, all PETSc routines that return arrays, such as
 VecGetArray(), MatDenseGetArray(),
-ISGetIndices(), and DMDAGetGlobalIndices()
+and ISGetIndices(), 
 are defined slightly differently in Fortran than in C.
 Instead of returning the array itself, these routines
 accept as input a user-specified array of dimension one and return an
    call VecGetArray(x,xx\_v,xx\_i,ierr)\\
    call MatDenseGetArray(A,aa\_v,aa\_i,ierr)\\
    call ISGetIndices(s,ss\_v,ss\_i,ierr)\\
-   call DMDAGetGlobalIndices(d,nloc,dd\_v,dd\_i,ierr)
 \end{tabbing}
 
 To access array elements directly, both the user-specified array and
 This is different from the use of VecSetValues()
 where, indexing always starts with 0.
 
-{\em Note}: If using VecGetArray(), MatDenseGetArray(), ISGetIndices(),
-or DMDAGetGlobalIndices()
+{\em Note}: If using VecGetArray(), MatDenseGetArray(), or ISGetIndices(),
 from Fortran, the user {\em must not} compile the Fortran code with options
 to check for ``array entries out of bounds'' (e.g., on the IBM RS/6000 this
 is done with the \trl{-C} compiler option, so never use the \trl{-C} option with this).
  PetscInitialize(char *filename,int \trl{ierr)}\\
  PetscError(MPI\_COMM,int err,char *message,int \trl{ierr)}\\
  VecGetArray(), MatDenseGetArray()\\
- ISGetIndices(), DMDAGetGlobalIndices()\\
+ ISGetIndices(), \\
  VecDuplicateVecs(), VecDestroyVecs()\\
  PetscOptionsGetString()
 \end{tabbing}
 \begin{tabbing}
  VecGetArrayF90(), VecRestoreArrayF90()\\
  VecDuplicateVecsF90(), VecDestroyVecsF90()\\
- DMDAVecGetArrayF90(), DMDAGetGlobalIndicesF90()\\
+ DMDAVecGetArrayF90(), ISLocalToGlobalMappingGetIndicesF90()\\
  MatDenseGetArrayF90(), MatDenseRestoreArrayF90()\\
  ISGetIndicesF90(), ISRestoreIndicesF90()
 \end{tabbing}

src/docs/website/documentation/changes/dev.html

       <h4>DM/DA:</h4>
       <ul>
         <li>DMDAGetLocalToGlobalMappingBlock() has been removed, the DMDAGetLocalToGlobalMapping() now handles both block and non-block cases</li>
+        <li>DMDAGetGlobalIndices(DM,PetscInt*,const PetscInt*[]) and DMDARestoreGlobalIndices(DM,PetscInt*,const PetscInt*[]) are removed, use DMGetLocalToGlobalMapping() to get this information</li>
         <li>DMADDA has been removed, it never worked correctly</li>
         <li>The MatType argument is removed from DMCreateMatrix(), you can use DMSetMatType() to indicate the type you want used with a DM, defaults to MATAIJ</li>
         <li><tt>DMDABoundaryType</tt> has become <tt>DMBoundaryType</tt>, and all the enumeration values have also been renamed.</li>

src/docs/website/documentation/faq.html

       </p>
 
       <p>
-        PETSc also contains a balancing Neumann-Neumann preconditioner, see the
-        manual page for PCNN. This requires matrices be constructed with
-        <code>MatCreateIS()</code> via the finite element method. There are currently
-        no examples that demonstrate its use.
+        PETSc also contains a balancing Neumann-Neumann type preconditioner, see the
+        manual page for PCBDDC. This requires matrices be constructed with
+        <code>MatCreateIS()</code> via the finite element method. See src/ksp/ksp/examples/tests/ex59.c
       </p>
 
       <h3><a name="blocks">Can I create BAIJ matrices with different size blocks for different block rows?</a></h3>

src/ksp/ksp/examples/tests/ex14.c

    Notes:
    Due to grid point reordering with DMDAs, we must always work
    with the local grid points, and then transform them to the new
-   global numbering with the "ltog" mapping (via DMDAGetGlobalIndices()).
+   global numbering with the "ltog" mapping 
    We cannot work directly with the global numbers for the original
    uniprocessor grid!
 */
 PetscErrorCode ComputeJacobian(AppCtx *user,Vec X,Mat jac)
 {
-  PetscErrorCode ierr;
-  Vec            localX = user->localX;   /* local vector */
-  const PetscInt *ltog;                   /* local-to-global mapping */
-  PetscInt       i,j,row,mx,my,col[5];
-  PetscInt       nloc,xs,ys,xm,ym,gxs,gys,gxm,gym,grow;
-  PetscScalar    two = 2.0,one = 1.0,lambda,v[5],hx,hy,hxdhy,hydhx,sc,*x;
+  PetscErrorCode         ierr;
+  Vec                    localX = user->localX;   /* local vector */
+  const PetscInt         *ltog;                   /* local-to-global mapping */
+  PetscInt               i,j,row,mx,my,col[5];
+  PetscInt               xs,ys,xm,ym,gxs,gys,gxm,gym,grow;
+  PetscScalar            two = 2.0,one = 1.0,lambda,v[5],hx,hy,hxdhy,hydhx,sc,*x;
+  ISLocalToGlobalMapping ltogm;
 
   mx = user->mx;            my = user->my;            lambda = user->param;
   hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
   /*
      Get the global node numbers for all local nodes, including ghost points
   */
-  ierr = DMDAGetGlobalIndices(user->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(user->da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
 
   /*
      Compute entries for the locally owned part of the Jacobian.
       ierr = MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);CHKERRQ(ierr);
     }
   }
-  ierr = DMDARestoreGlobalIndices(user->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
 
   /*
      Assemble matrix, using the 2-step process:

src/ksp/ksp/examples/tests/ex19.c

 #define __FUNCT__ "FormJacobian_Grid"
 int FormJacobian_Grid(AppCtx *user,GridCtx *grid,Mat *J)
 {
-  Mat            jac = *J;
-  PetscErrorCode ierr;
-  PetscInt       i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
-  PetscInt       nloc,grow;
-  const PetscInt *ltog;
-  PetscScalar    two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
+  Mat                    jac = *J;
+  PetscErrorCode         ierr;
+  PetscInt               i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
+  PetscInt               grow;
+  const PetscInt         *ltog;
+  PetscScalar            two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
+  ISLocalToGlobalMapping ltogm;
 
   mx    = grid->mx;               my = grid->my;
   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
   /* Get ghost points */
   ierr = DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
   ierr = DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(grid->da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
 
   /* Evaluate Jacobian of function */
   for (j=ys; j<ys+ym; j++) {
       }
     }
   }
-  ierr = DMDARestoreGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
   ierr = MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 

src/ksp/ksp/examples/tests/ex26.c

 #define __FUNCT__ "FormJacobian_Grid"
 int FormJacobian_Grid(GridCtx *grid,Mat *J)
 {
-  Mat            jac = *J;
-  PetscErrorCode ierr;
-  PetscInt       i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
-  PetscInt       nloc,grow;
-  const PetscInt *ltog;
-  PetscScalar    two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
+  Mat                    jac = *J;
+  PetscErrorCode         ierr;
+  PetscInt               i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
+  PetscInt               grow;
+  const PetscInt         *ltog;
+  PetscScalar            two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
+  ISLocalToGlobalMapping ltogm;
 
   mx    = grid->mx;            my = grid->my;
   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
   /* Get ghost points */
   ierr = DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
   ierr = DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(grid->da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
 
   /* Evaluate Jacobian of function */
   for (j=ys; j<ys+ym; j++) {
       }
     }
   }
-  ierr = DMDARestoreGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
   ierr = MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   return 0;

src/ksp/ksp/examples/tests/ex29.c

 #define __FUNCT__ "FormJacobian_Grid"
 int FormJacobian_Grid(GridCtx *grid,Mat *J)
 {
-  Mat            jac = *J;
-  PetscErrorCode ierr;
-  PetscInt       i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
-  PetscInt       nloc,grow;
-  const PetscInt *ltog;
-  PetscScalar    two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
+  Mat                    jac = *J;
+  PetscErrorCode         ierr;
+  PetscInt               i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
+  PetscInt               grow;
+  const PetscInt         *ltog;
+  PetscScalar            two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
+  ISLocalToGlobalMapping ltogm;
 
   mx    = grid->mx;               my = grid->my;
   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
   /* Get ghost points */
   ierr = DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
   ierr = DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(grid->da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
 
   /* Evaluate Jacobian of function */
   for (j=ys; j<ys+ym; j++) {
       }
     }
   }
-  ierr = DMDARestoreGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
   ierr = MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   return 0;

src/ksp/ksp/examples/tests/makefile

 	   if (${DIFF} output/ex13_1.out ex13_1.tmp) then true; \
 	   else printf "${PWD}\nPossible problem with with ex13_1, diffs above\n=========================================\n"; fi; \
 	   ${RM} -f ex13_1.tmp
+runex14:
+	-@${MPIEXEC} -n 1  ./ex14  > ex14_1.tmp 2>&1; \
+	   if (${DIFF} output/ex14_1.out ex14_1.tmp) then true; \
+	   else printf "${PWD}\nPossible problem with with ex14_1, diffs above\n=========================================\n"; fi; \
+	   ${RM} -f ex14_1.tmp
 # See http://www.mcs.anl.gov/petsc/documentation/faq.html#datafiles for how to obtain the datafiles used below
 runex16f:
 	-@${MPIEXEC} -n 1 ./ex16f   -f ${DATAFILESPATH}/matrices/arco1 -options_left no> ex16f_1.tmp 2>&1; \
 	done
 
 TESTEXAMPLES_C		       = ex1.PETSc ex1.rm ex3.PETSc runex3 runex3_2 ex3.rm ex4.PETSc runex4 runex4_3 \
-                                 runex4_5 ex4.rm ex7.PETSc ex7.rm ex14.PETSc ex14.rm ex19.PETSc runex19 runex19_2 ex19.rm \
+                                 runex4_5 ex4.rm ex7.PETSc ex7.rm ex14.PETSc runex14 ex14.rm ex19.PETSc runex19 runex19_2 ex19.rm \
                                  ex22.PETSc runex22 runex22_2 ex22.rm \
                                  ex24.PETSc runex24 runex24_2 runex24_3 runex24_4 ex24.rm \
                                  ex26.PETSc runex26 runex26_2 ex26.rm ex27.PETSc ex27.rm \

src/ksp/ksp/examples/tests/output/ex14_1.out

+Initial function norm = 0.207564
+   linear solve iterations = 2, xnorm=1.21872, ynorm=0.228979
+Iteration 1, function norm = 0.0148968
+   linear solve iterations = 2, xnorm=1.23797, ynorm=0.0192507
+Iteration 2, function norm = 0.000113967
+   linear solve iterations = 2, xnorm=1.23812, ynorm=0.000149569
+Iteration 3, function norm = 6.92417e-09
+   linear solve iterations = 2, xnorm=1.23812, ynorm=9.0883e-09
+Iteration 4, function norm = 4.44089e-16
+Converged due to function norm 4.44089e-16 < 2.07564e-09 (relative tolerance)
+Number of SNES iterations = 4

src/ksp/ksp/examples/tutorials/ex14f.F

 !   Notes:
 !   Due to grid point reordering with DMDAs, we must always work
 !   with the local grid points, and then transform them to the new
-!   global numbering with the 'ltog' mapping (via DMDAGetGlobalIndices()).
+!   global numbering with the 'ltog' mapping 
 !   We cannot work directly with the global numbers for the original
 !   uniprocessor grid!
 !
       PetscInt     ltog(1)
       PetscOffset idltog,idx
       PetscErrorCode ierr
-      PetscInt nloc,xs,ys,xm,ym
+      PetscInt xs,ys,xm,ym
       PetscInt gxs,gys,gxm,gym
       PetscInt grow(1),i,j
       PetscInt row,mx,my,ione
       PetscScalar v(5),hx,hy,hxdhy
       PetscScalar hydhx,sc,xx(1)
       Mat         B
+      ISLocalToGlobalMapping ltogm
       common   /mycommon/ mx,my,B,localX,localF,da
 
       ione   = 1
 
 !  Get the global node numbers for all local nodes, including ghost points
 
-      call DMDAGetGlobalIndices(da,nloc,ltog,idltog,ierr)
+      call DMGetLocalToGlobalMapping(da,ltogm,ierr)
+      call ISLocalToGlobalMappingGetIndices(ltogm,ltog,idltog,ierr)
 
 !  Compute entries for the locally owned part of the Jacobian.
 !   - Currently, all PETSc parallel matrix formats are partitioned by
  20     continue
  10   continue
 
-      call DMDARestoreGlobalIndices(da,nloc,ltog,idltog,ierr)
+      call ISLocalToGlobalMappingRestoreIndices(ltogm,ltog,idltog,ierr)
 
 !  Assemble matrix, using the 2-step process:
 !    MatAssemblyBegin(), MatAssemblyEnd().

src/ksp/ksp/examples/tutorials/ex43.c

 #define __FUNCT__ "BCApply_EAST"
 static PetscErrorCode BCApply_EAST(DM da,PetscInt d_idx,PetscScalar bc_val,Mat A,Vec b)
 {
-  DM             cda;
-  Vec            coords;
-  PetscInt       si,sj,nx,ny,i,j;
-  PetscInt       M,N;
-  DMDACoor2d     **_coords;
-  const PetscInt *g_idx;
-  PetscInt       *bc_global_ids;
-  PetscScalar    *bc_vals;
-  PetscInt       nbcs;
-  PetscInt       n_dofs;
-  PetscErrorCode ierr;
+  DM                     cda;
+  Vec                    coords;
+  PetscInt               si,sj,nx,ny,i,j;
+  PetscInt               M,N;
+  DMDACoor2d             **_coords;
+  const PetscInt         *g_idx;
+  PetscInt               *bc_global_ids;
+  PetscScalar            *bc_vals;
+  PetscInt               nbcs;
+  PetscInt               n_dofs;
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping ltogm;
 
   PetscFunctionBeginUser;
   /* enforce bc's */
-  ierr = DMDAGetGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&g_idx);CHKERRQ(ierr);
 
   ierr = DMGetCoordinateDM(da,&cda);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(da,&coords);CHKERRQ(ierr);
 
     bc_vals[j] =  bc_val;
   }
-  ierr = DMDARestoreGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&g_idx);CHKERRQ(ierr);
   nbcs = 0;
   if ((si+nx) == (M)) nbcs = ny;
 
 #define __FUNCT__ "BCApply_WEST"
 static PetscErrorCode BCApply_WEST(DM da,PetscInt d_idx,PetscScalar bc_val,Mat A,Vec b)
 {
-  DM             cda;
-  Vec            coords;
-  PetscInt       si,sj,nx,ny,i,j;
-  PetscInt       M,N;
-  DMDACoor2d     **_coords;
-  const PetscInt *g_idx;
-  PetscInt       *bc_global_ids;
-  PetscScalar    *bc_vals;
-  PetscInt       nbcs;
-  PetscInt       n_dofs;
-  PetscErrorCode ierr;
+  DM                     cda;
+  Vec                    coords;
+  PetscInt               si,sj,nx,ny,i,j;
+  PetscInt               M,N;
+  DMDACoor2d             **_coords;
+  const PetscInt         *g_idx;
+  PetscInt               *bc_global_ids;
+  PetscScalar            *bc_vals;
+  PetscInt               nbcs;
+  PetscInt               n_dofs;
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping ltogm;
 
   PetscFunctionBeginUser;
   /* enforce bc's */
-  ierr = DMDAGetGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&g_idx);CHKERRQ(ierr);
 
   ierr = DMGetCoordinateDM(da,&cda);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(da,&coords);CHKERRQ(ierr);
 
     bc_vals[j] =  bc_val;
   }
-  ierr = DMDARestoreGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&g_idx);CHKERRQ(ierr);
   nbcs = 0;
   if (si == 0) nbcs = ny;
 
 #define __FUNCT__ "BCApply_NORTH"
 static PetscErrorCode BCApply_NORTH(DM da,PetscInt d_idx,PetscScalar bc_val,Mat A,Vec b)
 {
-  DM             cda;
-  Vec            coords;
-  PetscInt       si,sj,nx,ny,i,j;
-  PetscInt       M,N;
-  DMDACoor2d     **_coords;
-  const PetscInt *g_idx;
-  PetscInt       *bc_global_ids;
-  PetscScalar    *bc_vals;
-  PetscInt       nbcs;
-  PetscInt       n_dofs;
-  PetscErrorCode ierr;
+  DM                     cda;
+  Vec                    coords;
+  PetscInt               si,sj,nx,ny,i,j;
+  PetscInt               M,N;
+  DMDACoor2d             **_coords;
+  const PetscInt         *g_idx;
+  PetscInt               *bc_global_ids;
+  PetscScalar            *bc_vals;
+  PetscInt               nbcs;
+  PetscInt               n_dofs;
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping ltogm;
 
   PetscFunctionBeginUser;
   /* enforce bc's */
-  ierr = DMDAGetGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&g_idx);CHKERRQ(ierr);
 
   ierr = DMGetCoordinateDM(da,&cda);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(da,&coords);CHKERRQ(ierr);
 
     bc_vals[i] =  bc_val;
   }
-  ierr = DMDARestoreGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&g_idx);CHKERRQ(ierr);
   nbcs = 0;
   if ((sj+ny) == (N)) nbcs = nx;
 
 #define __FUNCT__ "BCApply_SOUTH"
 static PetscErrorCode BCApply_SOUTH(DM da,PetscInt d_idx,PetscScalar bc_val,Mat A,Vec b)
 {
-  DM             cda;
-  Vec            coords;
-  PetscInt       si,sj,nx,ny,i,j;
-  PetscInt       M,N;
-  DMDACoor2d     **_coords;
-  const PetscInt *g_idx;
-  PetscInt       *bc_global_ids;
-  PetscScalar    *bc_vals;
-  PetscInt       nbcs;
-  PetscInt       n_dofs;
-  PetscErrorCode ierr;
+  DM                     cda;
+  Vec                    coords;
+  PetscInt               si,sj,nx,ny,i,j;
+  PetscInt               M,N;
+  DMDACoor2d             **_coords;
+  const PetscInt         *g_idx;
+  PetscInt               *bc_global_ids;
+  PetscScalar            *bc_vals;
+  PetscInt               nbcs;
+  PetscInt               n_dofs;
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping ltogm;
 
   PetscFunctionBeginUser;
   /* enforce bc's */
-  ierr = DMDAGetGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&g_idx);CHKERRQ(ierr);
 
   ierr = DMGetCoordinateDM(da,&cda);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(da,&coords);CHKERRQ(ierr);
 
     bc_vals[i] =  bc_val;
   }
-  ierr = DMDARestoreGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&g_idx);CHKERRQ(ierr);
   nbcs = 0;
   if (sj == 0) nbcs = nx;
 

src/ksp/ksp/examples/tutorials/ex49.c

 #define __FUNCT__ "BCApply_EAST"
 static PetscErrorCode BCApply_EAST(DM da,PetscInt d_idx,PetscScalar bc_val,Mat A,Vec b)
 {
-  DM             cda;
-  Vec            coords;
-  PetscInt       si,sj,nx,ny,i,j;
-  PetscInt       M,N;
-  DMDACoor2d     **_coords;
-  const PetscInt *g_idx;
-  PetscInt       *bc_global_ids;
-  PetscScalar    *bc_vals;
-  PetscInt       nbcs;
-  PetscInt       n_dofs;
-  PetscErrorCode ierr;
+  DM                     cda;
+  Vec                    coords;
+  PetscInt               si,sj,nx,ny,i,j;
+  PetscInt               M,N;
+  DMDACoor2d             **_coords;
+  const PetscInt         *g_idx;
+  PetscInt               *bc_global_ids;
+  PetscScalar            *bc_vals;
+  PetscInt               nbcs;
+  PetscInt               n_dofs;
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping ltogm;
 
   PetscFunctionBeginUser;
   /* enforce bc's */
-  ierr = DMDAGetGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&g_idx);CHKERRQ(ierr);
 
   ierr = DMGetCoordinateDM(da,&cda);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(da,&coords);CHKERRQ(ierr);
 
     bc_vals[j] =  bc_val;
   }
-  ierr = DMDARestoreGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&g_idx);CHKERRQ(ierr);
   nbcs = 0;
   if ((si+nx) == (M)) nbcs = ny;
 
 #define __FUNCT__ "BCApply_WEST"
 static PetscErrorCode BCApply_WEST(DM da,PetscInt d_idx,PetscScalar bc_val,Mat A,Vec b)
 {
-  DM             cda;
-  Vec            coords;
-  PetscInt       si,sj,nx,ny,i,j;
-  PetscInt       M,N;
-  DMDACoor2d     **_coords;
-  const PetscInt *g_idx;
-  PetscInt       *bc_global_ids;
-  PetscScalar    *bc_vals;
-  PetscInt       nbcs;
-  PetscInt       n_dofs;
-  PetscErrorCode ierr;
+  DM                     cda;
+  Vec                    coords;
+  PetscInt               si,sj,nx,ny,i,j;
+  PetscInt               M,N;
+  DMDACoor2d             **_coords;
+  const PetscInt         *g_idx;
+  PetscInt               *bc_global_ids;
+  PetscScalar            *bc_vals;
+  PetscInt               nbcs;
+  PetscInt               n_dofs;
+  PetscErrorCode         ierr;
+  ISLocalToGlobalMapping ltogm;
 
   PetscFunctionBeginUser;
   /* enforce bc's */
-  ierr = DMDAGetGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = DMGetLocalToGlobalMapping(da,&ltogm);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingGetIndices(ltogm,&g_idx);CHKERRQ(ierr);
 
   ierr = DMGetCoordinateDM(da,&cda);CHKERRQ(ierr);
   ierr = DMGetCoordinatesLocal(da,&coords);CHKERRQ(ierr);
 
     bc_vals[j] =  bc_val;
   }
-  ierr = DMDARestoreGlobalIndices(da,NULL,&g_idx);CHKERRQ(ierr);
+  ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&g_idx);CHKERRQ(ierr);
   nbcs = 0;
   if (si == 0) nbcs = ny;
 

src/ksp/pc/impls/bddc/bddc.c

 
    Approximate local solvers are automatically adapted for singular linear problems (see [1]) if the user has provided the nullspace using PCBDDCSetNullSpace
 
-   Boundary nodes are split in vertices, edges and faces using information from the local to global mapping of dofs and the local connectivity graph of nodes. The latter can be customized by using PCBDDCSetLocalAdjacencyGraph
+   Boundary nodes are split in vertices, edges and faces using information from the local to global mapping of dofs and the local connectivity graph of nodes. The latter can be customized by using PCBDDCSetLocalAdjacencyGraph()
 
-   Constraints can be customized by attaching a MatNullSpace object to the MATIS matrix via MatSetNearNullSpace.
+   Constraints can be customized by attaching a MatNullSpace object to the MATIS matrix via MatSetNearNullSpace().
 
    Change of basis is performed similarly to [2] when requested. When more the one constraint is present on a single connected component (i.e. an edge or a face), a robust method based on local QR factorizations is used.
 

src/mat/impls/is/matis.c

 }
 
 /*MC
-   MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners.
+   MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
    This stores the matrices in globally unassembled form. Each processor
    assembles only its local Neumann problem and the parallel matrix vector
    product is handled "implicitly".
 
   Level: advanced
 
-.seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping()
+.seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC
 
 M*/
 

src/mat/impls/localref/mlocalref.c

   PetscErrorCode ierr;
   const PetscInt *idx;
   PetscInt       m,*idxm;
+  PetscBool      isblock;
 
   PetscFunctionBegin;
   PetscValidHeaderSpecific(is,IS_CLASSID,1);
   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,2);
   PetscValidPointer(cltog,3);
+  ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
+  if (isblock) {
+    PetscInt bs,lbs;
+
+    ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
+    ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&lbs);CHKERRQ(ierr);
+    if (bs == lbs) {
+      ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr);
+      m    = m/bs;
+      ierr = ISBlockGetIndices(is,&idx);CHKERRQ(ierr);
+      ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr);
+      ierr = ISLocalToGlobalMappingApplyBlock(ltog,m,idx,idxm);CHKERRQ(ierr);
+      ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is),bs,m,idxm,PETSC_OWN_POINTER,cltog);CHKERRQ(ierr);
+      ierr = ISBlockRestoreIndices(is,&idx);CHKERRQ(ierr);
+      PetscFunctionReturn(0);
+    }
+  }
   ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr);
   ierr = ISGetIndices(is,&idx);CHKERRQ(ierr);
   ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr);

src/snes/examples/tests/ex11.c

-
-static char help[] ="Solves the 2-dim Bratu (SFI - solid fuel ignition) test problem, where\n\
-analytic formation of the Jacobian is the default.  \n\
-\n\
-  Solves the linear systems via 2 level additive Schwarz \n\
-\n\
-The command line\n\
-options are:\n\
-  -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
-     problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\
-  -Mx <xg>, where <xg> = number of grid points in the x-direction on coarse grid\n\
-  -My <yg>, where <yg> = number of grid points in the y-direction on coarse grid\n\n";
-
-/*
-    1) Solid Fuel Ignition (SFI) problem.  This problem is modeled by
-    the partial differential equation
-
-            -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1 ,
-
-    with boundary conditions
-
-             u = 0  for  x = 0, x = 1, y = 0, y = 1.
-
-    A finite difference approximation with the usual 5-point stencil
-    is used to discretize the boundary value problem to obtain a nonlinear
-    system of equations.
-
-   The code has two cases for multilevel solver
-     I. the coarse grid Jacobian is computed in parallel
-     II. the coarse grid Jacobian is computed sequentially on each processor
-   in both cases the coarse problem is SOLVED redundantly.
-
-*/
-
-#include <petscsnes.h>
-#include <petscdm.h>
-#include <petscdmda.h>
-
-/* User-defined application contexts */
-
-typedef struct {
-  PetscInt mx,my;               /* number grid points in x and y direction */
-  Vec      localX,localF;       /* local vectors with ghost region */
-  DM       da;
-  Vec      x,b,r;               /* global vectors */
-  Mat      J;                   /* Jacobian on grid */
-} GridCtx;
-
-typedef struct {
-  PetscReal  param;             /* test problem parameter */
-  GridCtx    fine;
-  GridCtx    coarse;
-  KSP        ksp_coarse;
-  KSP        ksp_fine;
-  PetscInt   ratio;
-  Mat        R;                 /* restriction fine to coarse */
-  Vec        Rscale;
-  PetscBool  redundant_build;   /* build coarse matrix redundantly */
-  Vec        localall;          /* contains entire coarse vector on each processor in NATURAL order*/
-  VecScatter tolocalall;        /* maps from parallel "global" coarse vector to localall */
-  VecScatter fromlocalall;      /* maps from localall vector back to global coarse vector */
-} AppCtx;
-
-#define COARSE_LEVEL 0
-#define FINE_LEVEL   1
-
-extern PetscErrorCode FormFunction(SNES,Vec,Vec,void*), FormInitialGuess1(AppCtx*,Vec);
-extern PetscErrorCode FormJacobian(SNES,Vec,Mat,Mat,void*);
-extern PetscErrorCode FormInterpolation(AppCtx*);
-
-/*
-      Mm_ratio - ration of grid lines between fine and coarse grids.
-*/
-#undef __FUNCT__
-#define __FUNCT__ "main"
-int main(int argc, char **argv)
-{
-  SNES           snes;
-  AppCtx         user;
-  PetscErrorCode ierr;
-  PetscInt       its, N, n, Nx = PETSC_DECIDE, Ny = PETSC_DECIDE, nlocal,Nlocal;
-  PetscMPIInt    size;
-  PetscReal      bratu_lambda_max = 6.81, bratu_lambda_min = 0.;
-  KSP            ksp;
-  PC             pc;
-
-  /*
-      Initialize PETSc, note that default options in ex11options can be
-      overridden at the command line
-  */
-  PetscInitialize(&argc, &argv,"ex11options",help);
-
-  user.ratio     = 2;
-  user.coarse.mx = 5; user.coarse.my = 5; user.param = 6.0;
-  ierr           = PetscOptionsGetInt(NULL,"-Mx",&user.coarse.mx,NULL);CHKERRQ(ierr);
-  ierr           = PetscOptionsGetInt(NULL,"-My",&user.coarse.my,NULL);CHKERRQ(ierr);
-  ierr           = PetscOptionsGetInt(NULL,"-ratio",&user.ratio,NULL);CHKERRQ(ierr);
-  user.fine.mx   = user.ratio*(user.coarse.mx-1)+1; user.fine.my = user.ratio*(user.coarse.my-1)+1;
-
-  ierr = PetscOptionsHasName(NULL,"-redundant_build",&user.redundant_build);CHKERRQ(ierr);
-  if (user.redundant_build) {
-    ierr = PetscPrintf(PETSC_COMM_WORLD,"Building coarse Jacobian redundantly\n");CHKERRQ(ierr);
-  }
-
-  ierr = PetscPrintf(PETSC_COMM_WORLD,"Coarse grid size %D by %D\n",user.coarse.mx,user.coarse.my);CHKERRQ(ierr);
-  ierr = PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",user.fine.mx,user.fine.my);CHKERRQ(ierr);
-
-  ierr = PetscOptionsGetReal(NULL,"-par",&user.param,NULL);CHKERRQ(ierr);
-  if (user.param >= bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ(PETSC_COMM_SELF,1,"Lambda is out of range");
-  n = user.fine.mx*user.fine.my; N = user.coarse.mx*user.coarse.my;
-
-  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
-  ierr = PetscOptionsGetInt(NULL,"-Nx",&Nx,NULL);CHKERRQ(ierr);
-  ierr = PetscOptionsGetInt(NULL,"-Ny",&Ny,NULL);CHKERRQ(ierr);
-
-  /* Set up distributed array for fine grid */
-  ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,
-                      user.fine.my,Nx,Ny,1,1,NULL,NULL,&user.fine.da);CHKERRQ(ierr);
-  ierr = DMCreateGlobalVector(user.fine.da,&user.fine.x);CHKERRQ(ierr);
-  ierr = VecDuplicate(user.fine.x,&user.fine.r);CHKERRQ(ierr);
-  ierr = VecDuplicate(user.fine.x,&user.fine.b);CHKERRQ(ierr);
-  ierr = VecGetLocalSize(user.fine.x,&nlocal);CHKERRQ(ierr);
-  ierr = DMCreateLocalVector(user.fine.da,&user.fine.localX);CHKERRQ(ierr);
-  ierr = VecDuplicate(user.fine.localX,&user.fine.localF);CHKERRQ(ierr);
-  ierr = MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&user.fine.J);CHKERRQ(ierr);
-
-  /* Set up distributed array for coarse grid */
-  ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,
-                      user.coarse.my,Nx,Ny,1,1,NULL,NULL,&user.coarse.da);CHKERRQ(ierr);
-  ierr = DMCreateGlobalVector(user.coarse.da,&user.coarse.x);CHKERRQ(ierr);
-  ierr = VecDuplicate(user.coarse.x,&user.coarse.b);CHKERRQ(ierr);
-  if (user.redundant_build) {
-    /* Create scatter from parallel global numbering to redundant with natural ordering */
-    ierr = DMDAGlobalToNaturalAllCreate(user.coarse.da,&user.tolocalall);CHKERRQ(ierr);
-    ierr = DMDANaturalAllToGlobalCreate(user.coarse.da,&user.fromlocalall);CHKERRQ(ierr);
-    ierr = VecCreateSeq(PETSC_COMM_SELF,N,&user.localall);CHKERRQ(ierr);
-    /* Create sequential matrix to hold entire coarse grid Jacobian on each processor */
-    ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,N,N,5,NULL,&user.coarse.J);CHKERRQ(ierr);
-  } else {
-    ierr = VecGetLocalSize(user.coarse.x,&Nlocal);CHKERRQ(ierr);
-    ierr = DMCreateLocalVector(user.coarse.da,&user.coarse.localX);CHKERRQ(ierr);
-    ierr = VecDuplicate(user.coarse.localX,&user.coarse.localF);CHKERRQ(ierr);
-    /* We will compute the coarse Jacobian in parallel */
-    ierr = MatCreateAIJ(PETSC_COMM_WORLD,Nlocal,Nlocal,N,N,5,NULL,3,NULL,&user.coarse.J);CHKERRQ(ierr);
-  }
-
-  /* Create nonlinear solver */
-  ierr = SNESCreate(PETSC_COMM_WORLD,&snes);CHKERRQ(ierr);
-
-  /* provide user function and Jacobian */
-  ierr = SNESSetFunction(snes,user.fine.b,FormFunction,&user);CHKERRQ(ierr);
-  ierr = SNESSetJacobian(snes,user.fine.J,user.fine.J,FormJacobian,&user);CHKERRQ(ierr);
-
-  /* set two level additive Schwarz preconditioner */
-  ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr);
-  ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr);
-  ierr = PCSetType(pc,PCMG);CHKERRQ(ierr);
-  ierr = PCMGSetLevels(pc,2,NULL);CHKERRQ(ierr);
-  ierr = PCMGSetType(pc,PC_MG_ADDITIVE);CHKERRQ(ierr);
-
-  /* always solve the coarse problem redundantly with direct LU solver */
-  ierr = PetscOptionsSetValue("-coarse_pc_type","redundant");CHKERRQ(ierr);
-  ierr = PetscOptionsSetValue("-coarse_redundant_pc_type","lu");CHKERRQ(ierr);
-  ierr = PetscOptionsSetValue("-coarse_redundant_ksp_type","preonly");CHKERRQ(ierr);
-
-  /* Create coarse level */
-  ierr = PCMGGetCoarseSolve(pc,&user.ksp_coarse);CHKERRQ(ierr);
-  ierr = KSPSetOptionsPrefix(user.ksp_coarse,"coarse_");CHKERRQ(ierr);
-  ierr = KSPSetFromOptions(user.ksp_coarse);CHKERRQ(ierr);
-  ierr = KSPSetOperators(user.ksp_coarse,user.coarse.J,user.coarse.J);CHKERRQ(ierr);
-  ierr = PCMGSetX(pc,COARSE_LEVEL,user.coarse.x);CHKERRQ(ierr);
-  ierr = PCMGSetRhs(pc,COARSE_LEVEL,user.coarse.b);CHKERRQ(ierr);
-  if (user.redundant_build) {
-    PC rpc;
-    ierr = KSPGetPC(user.ksp_coarse,&rpc);CHKERRQ(ierr);
-    ierr = PCRedundantSetScatter(rpc,user.tolocalall,user.fromlocalall);CHKERRQ(ierr);
-  }
-
-  /* Create fine level */
-  ierr = PCMGGetSmoother(pc,FINE_LEVEL,&user.ksp_fine);CHKERRQ(ierr);
-  ierr = KSPSetOptionsPrefix(user.ksp_fine,"fine_");CHKERRQ(ierr);
-  ierr = KSPSetFromOptions(user.ksp_fine);CHKERRQ(ierr);
-  ierr = KSPSetOperators(user.ksp_fine,user.fine.J,user.fine.J);CHKERRQ(ierr);
-  ierr = PCMGSetR(pc,FINE_LEVEL,user.fine.r);CHKERRQ(ierr);
-  ierr = PCMGSetResidual(pc,FINE_LEVEL,NULL,user.fine.J);CHKERRQ(ierr);
-
-  /* Create interpolation between the levels */
-  ierr = FormInterpolation(&user);CHKERRQ(ierr);
-  ierr = PCMGSetInterpolation(pc,FINE_LEVEL,user.R);CHKERRQ(ierr);
-  ierr = PCMGSetRestriction(pc,FINE_LEVEL,user.R);CHKERRQ(ierr);
-
-  /* Set options, then solve nonlinear system */
-  ierr = SNESSetFromOptions(snes);CHKERRQ(ierr);
-  ierr = FormInitialGuess1(&user,user.fine.x);CHKERRQ(ierr);
-  ierr = SNESSolve(snes,NULL,user.fine.x);CHKERRQ(ierr);
-  ierr = SNESGetIterationNumber(snes,&its);CHKERRQ(ierr);
-  ierr = PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n", its);CHKERRQ(ierr);
-
-  /* Free data structures */
-  if (user.redundant_build) {
-    ierr = VecScatterDestroy(&user.tolocalall);CHKERRQ(ierr);
-    ierr = VecScatterDestroy(&user.fromlocalall);CHKERRQ(ierr);
-    ierr = VecDestroy(&user.localall);CHKERRQ(ierr);
-  } else {
-    ierr = VecDestroy(&user.coarse.localX);CHKERRQ(ierr);
-    ierr = VecDestroy(&user.coarse.localF);CHKERRQ(ierr);
-  }
-
-  ierr = MatDestroy(&user.fine.J);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.fine.x);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.fine.r);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.fine.b);CHKERRQ(ierr);
-  ierr = DMDestroy(&user.fine.da);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.fine.localX);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.fine.localF);CHKERRQ(ierr);
-
-  ierr = MatDestroy(&user.coarse.J);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.coarse.x);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.coarse.b);CHKERRQ(ierr);
-  ierr = DMDestroy(&user.coarse.da);CHKERRQ(ierr);
-
-  ierr = SNESDestroy(&snes);CHKERRQ(ierr);
-  ierr = MatDestroy(&user.R);CHKERRQ(ierr);
-  ierr = VecDestroy(&user.Rscale);CHKERRQ(ierr);
-  PetscFinalize();
-
-  return 0;
-} /* --------------------  Form initial approximation ----------------- */
-#undef __FUNCT__
-#define __FUNCT__ "FormInitialGuess1"
-PetscErrorCode FormInitialGuess1(AppCtx *user,Vec X)
-{
-  PetscInt       i, j, row, mx, my, xs, ys, xm, ym, Xm, Ym, Xs, Ys;
-  PetscErrorCode ierr;
-  PetscReal      one = 1.0, lambda, temp1, temp, hx, hy;
-  PetscScalar    *x;
-  Vec            localX = user->fine.localX;
-
-  mx = user->fine.mx;       my = user->fine.my;            lambda = user->param;
-  hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
-  /* sc = hx*hy*lambda;        hxdhy = hx/hy;            hydhx = hy/hx; */
-
-  temp1 = lambda/(lambda + one);
-
-  /* Get ghost points */
-  ierr = DMDAGetCorners(user->fine.da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGhostCorners(user->fine.da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
-  ierr = VecGetArray(localX,&x);CHKERRQ(ierr);
-
-  /* Compute initial guess */
-  for (j=ys; j<ys+ym; j++) {
-    temp = (PetscReal)(PetscMin(j,my-j-1))*hy;
-    for (i=xs; i<xs+xm; i++) {
-      row = i - Xs + (j - Ys)*Xm;
-      if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
-        x[row] = 0.0;
-        continue;
-      }
-      x[row] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,mx-i-1))*hx,temp));
-    }
-  }
-  ierr = VecRestoreArray(localX,&x);CHKERRQ(ierr);
-
-  /* Insert values into global vector */
-  ierr = DMLocalToGlobalBegin(user->fine.da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
-  ierr = DMLocalToGlobalEnd(user->fine.da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
-  return 0;
-}
-
-/* --------------------  Evaluate Function F(x) --------------------- */
-#undef __FUNCT__
-#define __FUNCT__ "FormFunction"
-PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
-{
-  AppCtx         *user = (AppCtx*) ptr;
-  PetscInt       i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym;
-  PetscErrorCode ierr;
-  PetscReal      two = 2.0, one = 1.0, lambda,hx, hy, hxdhy, hydhx,sc;
-  PetscScalar    u, uxx, uyy, *x,*f;
-  Vec            localX = user->fine.localX, localF = user->fine.localF;
-
-  mx = user->fine.mx;       my = user->fine.my;       lambda = user->param;
-  hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
-  sc = hx*hy*lambda;        hxdhy = hx/hy;            hydhx = hy/hx;
-
-  /* Get ghost points */
-  ierr = DMGlobalToLocalBegin(user->fine.da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
-  ierr = DMGlobalToLocalEnd(user->fine.da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
-  ierr = DMDAGetCorners(user->fine.da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGhostCorners(user->fine.da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
-  ierr = VecGetArray(localX,&x);CHKERRQ(ierr);
-  ierr = VecGetArray(localF,&f);CHKERRQ(ierr);
-
-  /* Evaluate function */
-  for (j=ys; j<ys+ym; j++) {
-    row = (j - Ys)*Xm + xs - Xs - 1;
-    for (i=xs; i<xs+xm; i++) {
-      row++;
-      if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
-        u      = x[row];
-        uxx    = (two*u - x[row-1] - x[row+1])*hydhx;
-        uyy    = (two*u - x[row-Xm] - x[row+Xm])*hxdhy;
-        f[row] = uxx + uyy - sc*PetscExpScalar(u);
-      } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
-        f[row] = .5*two*(hydhx + hxdhy)*x[row];
-      } else {
-        f[row] = .25*two*(hydhx + hxdhy)*x[row];
-      }
-    }
-  }
-  ierr = VecRestoreArray(localX,&x);CHKERRQ(ierr);
-  ierr = VecRestoreArray(localF,&f);CHKERRQ(ierr);
-
-  /* Insert values into global vector */
-  ierr = DMLocalToGlobalBegin(user->fine.da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
-  ierr = DMLocalToGlobalEnd(user->fine.da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
-  ierr = PetscLogFlops(11.0*ym*xm);CHKERRQ(ierr);
-  return 0;
-}
-
-/*
-        Computes the part of the Jacobian associated with this processor
-*/
-#undef __FUNCT__
-#define __FUNCT__ "FormJacobian_Grid"
-PetscErrorCode FormJacobian_Grid(AppCtx *user,GridCtx *grid,Vec X, Mat J,Mat B)
-{
-  Mat            jac = J;
-  PetscErrorCode ierr;
-  PetscInt       i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym, col[5], nloc, grow;
-  const PetscInt *ltog;
-  PetscScalar    two    = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x, value;
-  Vec            localX = grid->localX;
-
-  mx = grid->mx;            my = grid->my;            lambda = user->param;
-  hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
-  sc = hx*hy;               hxdhy = hx/hy;            hydhx = hy/hx;
-
-  /* Get ghost points */
-  ierr = DMGlobalToLocalBegin(grid->da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
-  ierr = DMGlobalToLocalEnd(grid->da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
-  ierr = DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
-  ierr = VecGetArray(localX,&x);CHKERRQ(ierr);
-
-  /* Evaluate Jacobian of function */
-  for (j=ys; j<ys+ym; j++) {
-    row = (j - Ys)*Xm + xs - Xs - 1;
-    for (i=xs; i<xs+xm; i++) {
-      row++;
-      grow = ltog[row];
-      if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
-        v[0] = -hxdhy; col[0] = ltog[row - Xm];
-        v[1] = -hydhx; col[1] = ltog[row - 1];
-        v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = grow;
-        v[3] = -hydhx; col[3] = ltog[row + 1];
-        v[4] = -hxdhy; col[4] = ltog[row + Xm];
-        ierr = MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);CHKERRQ(ierr);
-      } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
-        value = .5*two*(hydhx + hxdhy);
-        ierr  = MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);CHKERRQ(ierr);
-      } else {
-        value = .25*two*(hydhx + hxdhy);
-        ierr  = MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);CHKERRQ(ierr);
-      }
-    }
-  }
-  ierr = DMDARestoreGlobalIndices(grid->da,&nloc,&ltog);CHKERRQ(ierr);
-  ierr = MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  ierr = VecRestoreArray(localX,&x);CHKERRQ(ierr);
-  ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-
-  return 0;
-}
-
-/*
-        Computes the ENTIRE Jacobian associated with the ENTIRE grid sequentially
-    This is for generating the coarse grid redundantly.
-
-          This is BAD code duplication, since the bulk of this routine is the
-    same as the routine above
-
-       Note the numbering of the rows/columns is the NATURAL numbering
-*/
-#undef __FUNCT__
-#define __FUNCT__ "FormJacobian_Coarse"
-PetscErrorCode FormJacobian_Coarse(AppCtx *user,GridCtx *grid,Vec X, Mat J,Mat B)
-{
-  Mat            jac = J;
-  PetscErrorCode ierr;
-  PetscInt       i, j, row, mx, my, col[5];
-  PetscScalar    two = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x, value;
-
-  mx = grid->mx;            my = grid->my;            lambda = user->param;
-  hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
-  sc = hx*hy;               hxdhy = hx/hy;            hydhx = hy/hx;
-
-  ierr = VecGetArray(X,&x);CHKERRQ(ierr);
-
-  /* Evaluate Jacobian of function */
-  for (j=0; j<my; j++) {
-    row = j*mx - 1;
-    for (i=0; i<mx; i++) {
-      row++;
-      if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
-        v[0] = -hxdhy; col[0] = row - mx;
-        v[1] = -hydhx; col[1] = row - 1;
-        v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = row;
-        v[3] = -hydhx; col[3] = row + 1;
-        v[4] = -hxdhy; col[4] = row + mx;
-        ierr = MatSetValues(jac,1,&row,5,col,v,INSERT_VALUES);CHKERRQ(ierr);
-      } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
-        value = .5*two*(hydhx + hxdhy);
-        ierr  = MatSetValues(jac,1,&row,1,&row,&value,INSERT_VALUES);CHKERRQ(ierr);
-      } else {
-        value = .25*two*(hydhx + hxdhy);
-        ierr  = MatSetValues(jac,1,&row,1,&row,&value,INSERT_VALUES);CHKERRQ(ierr);
-      }
-    }
-  }
-  ierr = MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  ierr = VecRestoreArray(X,&x);CHKERRQ(ierr);
-  ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-
-  return 0;
-}
-
-/* --------------------  Evaluate Jacobian F'(x) --------------------- */
-#undef __FUNCT__
-#define __FUNCT__ "FormJacobian"
-PetscErrorCode FormJacobian(SNES snes,Vec X,Mat J,Mat B,void *ptr)
-{
-  AppCtx         *user = (AppCtx*) ptr;
-  PetscErrorCode ierr;
-  KSP            ksp;
-  PC             pc;
-  PetscBool      ismg;
-
-  ierr  = FormJacobian_Grid(user,&user->fine,X,J,B);CHKERRQ(ierr);
-
-  /* create coarse grid jacobian for preconditioner */
-  ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr);
-  ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr);
-
-  ierr = PetscObjectTypeCompare((PetscObject)pc,PCMG,&ismg);CHKERRQ(ierr);
-  if (ismg) {
-
-    ierr = KSPSetOperators(user->ksp_fine,user->fine.J,user->fine.J);CHKERRQ(ierr);
-
-    /* restrict X to coarse grid */
-    ierr = MatMult(user->R,X,user->coarse.x);CHKERRQ(ierr);
-    ierr = VecPointwiseMult(user->coarse.x,user->coarse.x,user->Rscale);CHKERRQ(ierr);
-
-    /* form Jacobian on coarse grid */
-    if (user->redundant_build) {
-      /* get copy of coarse X onto each processor */
-      ierr = VecScatterBegin(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
-      ierr = VecScatterEnd(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
-      ierr = FormJacobian_Coarse(user,&user->coarse,user->localall,user->coarse.J,user->coarse.J);CHKERRQ(ierr);
-
-    } else {
-      /* coarse grid Jacobian computed in parallel */
-      ierr = FormJacobian_Grid(user,&user->coarse,user->coarse.x,user->coarse.J,user->coarse.J);CHKERRQ(ierr);
-    }
-    ierr = KSPSetOperators(user->ksp_coarse,user->coarse.J,user->coarse.J);CHKERRQ(ierr);
-  }
-
-  return 0;
-}
-
-
-#undef __FUNCT__
-#define __FUNCT__ "FormInterpolation"
-/*
-      Forms the interpolation (and restriction) operator from
-coarse grid to fine.
-*/
-PetscErrorCode FormInterpolation(AppCtx *user)
-{
-  PetscErrorCode ierr;
-  PetscInt       i,j,i_start,m_fine,j_start,m,n;
-  const PetscInt *idx,*idx_c;
-  PetscInt       m_ghost,n_ghost,m_ghost_c,n_ghost_c,m_coarse;
-  PetscInt       row,i_start_ghost,j_start_ghost,cols[4], m_c;
-  PetscInt       nc,ratio = user->ratio,m_c_local,m_fine_local;
-  PetscInt       i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col;
-  PetscScalar    v[4],x,y, one = 1.0;
-  Mat            mat;
-  Vec            Rscale;
-
-  ierr = DMDAGetCorners(user->fine.da,&i_start,&j_start,0,&m,&n,0);CHKERRQ(ierr);
-  ierr = DMDAGetGhostCorners(user->fine.da,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(user->fine.da,NULL,&idx);CHKERRQ(ierr);
-
-  ierr = DMDAGetCorners(user->coarse.da,&i_start_c,&j_start_c,0,&m_c,&n_c,0);CHKERRQ(ierr);
-  ierr = DMDAGetGhostCorners(user->coarse.da,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);CHKERRQ(ierr);
-  ierr = DMDAGetGlobalIndices(user->coarse.da,NULL,&idx_c);CHKERRQ(ierr);
-
-  /* create interpolation matrix */
-  ierr = VecGetLocalSize(user->fine.x,&m_fine_local);CHKERRQ(ierr);
-  ierr = VecGetLocalSize(user->coarse.x,&m_c_local);CHKERRQ(ierr);
-  ierr = VecGetSize(user->fine.x,&m_fine);CHKERRQ(ierr);
-  ierr = VecGetSize(user->coarse.x,&m_coarse);CHKERRQ(ierr);
-  ierr = MatCreateAIJ(PETSC_COMM_WORLD,m_fine_local,m_c_local,m_fine,m_coarse,
-                      5,0,3,0,&mat);CHKERRQ(ierr);
-
-  /* loop over local fine grid nodes setting interpolation for those*/
-  for (j=j_start; j<j_start+n; j++) {
-    for (i=i_start; i<i_start+m; i++) {
-      /* convert to local "natural" numbering and then to PETSc global numbering */
-      row = idx[m_ghost*(j-j_start_ghost) + (i-i_start_ghost)];
-
-      i_c = (i/ratio);    /* coarse grid node to left of fine grid node */
-      j_c = (j/ratio);    /* coarse grid node below fine grid node */
-
-      /*
-         Only include those interpolation points that are truly
-         nonzero. Note this is very important for final grid lines
-         in x and y directions; since they have no right/top neighbors
-      */
-      x  = ((PetscReal)(i - i_c*ratio))/((PetscReal)ratio);
-      y  = ((PetscReal)(j - j_c*ratio))/((PetscReal)ratio);
-      nc = 0;
-      /* one left and below; or we are right on it */
-      if (j_c < j_start_ghost_c || j_c > j_start_ghost_c+n_ghost_c) SETERRQ3(PETSC_COMM_SELF,1,"Sorry j %D %D %D",j_c,j_start_ghost_c,j_start_ghost_c+n_ghost_c);
-      if (i_c < i_start_ghost_c || i_c > i_start_ghost_c+m_ghost_c) SETERRQ3(PETSC_COMM_SELF,1,"Sorry i %D %D %D",i_c,i_start_ghost_c,i_start_ghost_c+m_ghost_c);
-      col      = m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c);
-      cols[nc] = idx_c[col];
-      v[nc++]  = x*y - x - y + 1.0;
-      /* one right and below */
-      if (i_c*ratio != i) {
-        cols[nc] = idx_c[col+1];
-        v[nc++]  = -x*y + x;
-      }
-      /* one left and above */
-      if (j_c*ratio != j) {
-        cols[nc] = idx_c[col+m_ghost_c];
-        v[nc++]  = -x*y + y;
-      }
-      /* one right and above */
-      if (j_c*ratio != j && i_c*ratio != i) {
-        cols[nc] = idx_c[col+m_ghost_c+1];
-        v[nc++]  = x*y;
-      }
-      ierr = MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);CHKERRQ(ierr);
-    }
-  }
-  ierr = DMDARestoreGlobalIndices(user->fine.da,NULL,&idx);CHKERRQ(ierr);
-  ierr = DMDARestoreGlobalIndices(user->fine.da,NULL,&idx_c);CHKERRQ(ierr);
-  ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-  ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
-
-  ierr         = VecDuplicate(user->coarse.x,&Rscale);CHKERRQ(ierr);
-  ierr         = VecSet(user->fine.x,one);CHKERRQ(ierr);
-  ierr         = MatMultTranspose(mat,user->fine.x,Rscale);CHKERRQ(ierr);
-  ierr         = VecReciprocal(Rscale);CHKERRQ(ierr);
-  user->Rscale = Rscale;
-  user->R      = mat;
-  return 0;
-}
-
-
-
-

src/snes/examples/tests/ex5.c

    Notes:
    Due to grid point reordering with DMDAs, we must always work
    with the local grid points, and then transform them to the new
-   global numbering with the "ltog" mapping (via DMDAGetGlobalIndices()).
+   global numbering with the "ltog" mapping
    We cannot work directly with the global numbers for the original
    uniprocessor grid!
 */
   AppCtx         *user  = (AppCtx*)ptr;   /* user-defined application context */
   Vec            localX = user->localX;   /* local vector */
   PetscErrorCode ierr;
-  const PetscInt *ltog;                   /* local-to-global mapping */
   PetscInt       i,j,row,mx,my,col[5];
-  PetscInt       nloc,xs,ys,xm,ym,gxs,gys,gxm,gym,grow;
+  PetscInt       xs,ys,xm,ym,gxs,gys,gxm,gym;
   PetscScalar    two = 2.0,one = 1.0,lambda,v[5],hx,hy,hxdhy,hydhx,sc,*x;
 
   mx = user->mx;            my = user->my;            lambda = user->param;
   ierr = DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);CHKERRQ(ierr);
 
   /*
-     Get the global node numbers for all local nodes, including ghost points
-  */
-  ierr = DMDAGetGlobalIndices(user->da,&nloc,&ltog);CHKERRQ(ierr);
-
-  /*
      Compute entries for the locally owned part of the Jacobian.
       - Currently, all PETSc parallel matrix formats are partitioned by
         contiguous chunks of rows across the processors. The "grow"
     row = (j - gys)*gxm + xs - gxs - 1;
     for (i=xs; i<xs+xm; i++) {
       row++;
-      grow = ltog[row];
       /* boundary points */
       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
-        ierr = MatSetValues(jac,1,&grow,1,&grow,&one,INSERT_VALUES);CHKERRQ(ierr);
+        ierr = MatSetValuesLocal(jac,1,&row,1,&row,&one,INSERT_VALUES);CHKERRQ(ierr);
         continue;
       }
       /* interior grid points */
-      v[0] = -hxdhy; col[0] = ltog[row - gxm];
-      v[1] = -hydhx; col[1] = ltog[row - 1];
-      v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = grow;
-      v[3] = -hydhx; col[3] = ltog[row + 1];
-      v[4] = -hxdhy; col[4] = ltog[row + gxm];
-      ierr = MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);CHKERRQ(ierr);
+      v[0] = -hxdhy; col[0] = row - gxm;
+      v[1] = -hydhx; col[1] = row - 1;
+      v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = row;
+      v[3] = -hydhx; col[3] = row + 1;
+      v[4] = -hxdhy; col[4] = row + gxm;
+      ierr = MatSetValuesLocal(jac,1,&row,5,col,v,INSERT_VALUES);CHKERRQ(ierr);
     }
   }