Commits

Matt Knepley  committed d815b23 Merge with conflicts

Merge branch 'knepley/fix-test-output-parsing' into knepley/feature-dt-fem

* knepley/fix-test-output-parsing:
Builder: strip test output before comparing
DMPlex: ParMetis now working - Ridiculous stub partition up front removed - Metis does not return 0 on success, so fixed call
DMPlex: Changed DMPlexCreatePartition() to take the partitioner name
DMPlex: remove unused variable
DMPlex: Put in METIS support

Conflicts:
config/builder.py

  • Participants
  • Parent commits f2436f7, 112eb74

Comments (0)

Files changed (3)

File config/builder.py

    else:
      with file(outputName) as f:
        output      = output.strip()
-       validOutput = f.read().strip() # Jed is now stripping output it appears
-       if not validOutput.replace("\r", "") == output:
+       validOutput = f.read().strip().replace('\r', '') # Jed is now stripping output it appears
+       if not validOutput == output:
          if replace:
            with file(outputName, 'w') as f:
              f.write(output)

File include/petscdmplex.h

 PETSC_EXTERN PetscErrorCode DMPlexRestoreTransitiveClosure(DM, PetscInt, PetscBool, PetscInt *, PetscInt *[]);
 
 PETSC_EXTERN PetscErrorCode DMPlexCreateNeighborCSR(DM, PetscInt, PetscInt *, PetscInt **, PetscInt **);
-PETSC_EXTERN PetscErrorCode DMPlexCreatePartition(DM, PetscInt, PetscBool, PetscSection *, IS *, PetscSection *, IS *);
+PETSC_EXTERN PetscErrorCode DMPlexCreatePartition(DM, const char[], PetscInt, PetscBool, PetscSection *, IS *, PetscSection *, IS *);
 PETSC_EXTERN PetscErrorCode DMPlexCreatePartitionClosure(DM, PetscSection, IS, PetscSection *, IS *);
 
 PETSC_EXTERN PetscErrorCode DMPlexGenerate(DM, const char [], PetscBool , DM *);

File src/dm/impls/plex/plex.c

 #endif
 
 #if defined(PETSC_HAVE_PARMETIS)
+#include <parmetis.h>
+
 #undef __FUNCT__
 #define __FUNCT__ "DMPlexPartition_ParMetis"
 PetscErrorCode DMPlexPartition_ParMetis(DM dm, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection *partSection, IS *partition)
 {
+  MPI_Comm       comm;
+  PetscInt       nvtxs      = numVertices; // The number of vertices in full graph
+  PetscInt      *vtxdist;                  // Distribution of vertices across processes
+  PetscInt      *xadj       = start;       // Start of edge list for each vertex
+  PetscInt      *adjncy     = adjacency;   // Edge lists for all vertices
+  PetscInt      *vwgt       = NULL;        // Vertex weights
+  PetscInt      *adjwgt     = NULL;        // Edge weights
+  PetscInt       wgtflag    = 0;           // Indicates which weights are present
+  PetscInt       numflag    = 0;           // Indicates initial offset (0 or 1)
+  PetscInt       ncon       = 1;           // The number of weights per vertex
+  PetscInt       nparts;                   // The number of partitions
+  PetscReal     *tpwgts;                   // The fraction of vertex weights assigned to each partition
+  PetscReal     *ubvec;                    // The balance intolerance for vertex weights
+  PetscInt       options[5];               // Options
+  // Outputs
+  PetscInt       edgeCut;                  // The number of edges cut by the partition
+  PetscInt      *assignment, *points;
+  PetscMPIInt    commSize, rank, p, v, i;
+  PetscErrorCode ierr;
+
   PetscFunctionBegin;
-  SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "ParMetis not yet supported");
+  ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
+  ierr = MPI_Comm_size(comm, &commSize);CHKERRQ(ierr);
+  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
+  nparts = commSize;
+  options[0] = 0; /* Use all defaults */
+  /* Calculate vertex distribution */
+  ierr = PetscMalloc4(nparts+1,PetscInt,&vtxdist,nparts*ncon,PetscReal,&tpwgts,ncon,PetscReal,&ubvec,nvtxs,PetscInt,&assignment);CHKERRQ(ierr);
+  vtxdist[0] = 0;
+  ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
+  for (p = 2; p <= nparts; ++p) {
+    vtxdist[p] += vtxdist[p-1];
+  }
+  /* Calculate weights */
+  for (p = 0; p < nparts; ++p) {
+    tpwgts[p] = 1.0/nparts;
+  }
+  ubvec[0] = 1.05;
+
+  if (nparts == 1) {
+    ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
+  } else {
+    if (vtxdist[1] == vtxdist[nparts]) {
+      if (!rank) {
+        PetscStackPush("METIS_PartGraphKway");
+        ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
+        PetscStackPop;
+        if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
+      }
+    } else {
+      PetscStackPush("ParMETIS_V3_PartKway");
+      ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
+      PetscStackPop;
+      if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
+    }
+  }
+  /* Convert to PetscSection+IS */
+  ierr = PetscSectionCreate(comm, partSection);CHKERRQ(ierr);
+  ierr = PetscSectionSetChart(*partSection, 0, commSize);CHKERRQ(ierr);
+  for (v = 0; v < nvtxs; ++v) {
+    ierr = PetscSectionAddDof(*partSection, assignment[v], 1);CHKERRQ(ierr);
+  }
+  ierr = PetscSectionSetUp(*partSection);CHKERRQ(ierr);
+  ierr = PetscMalloc(nvtxs * sizeof(PetscInt), &points);CHKERRQ(ierr);
+  for (p = 0, i = 0; p < commSize; ++p) {
+    for (v = 0; v < nvtxs; ++v) {
+      if (assignment[v] == p) points[i++] = v;
+    }
+  }
+  if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
+  ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
+  ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 #endif
 
 .seealso DMPlexDistribute()
 */
-PetscErrorCode DMPlexCreatePartition(DM dm, PetscInt height, PetscBool enlarge, PetscSection *partSection, IS *partition, PetscSection *origPartSection, IS *origPartition)
+PetscErrorCode DMPlexCreatePartition(DM dm, const char name[], PetscInt height, PetscBool enlarge, PetscSection *partSection, IS *partition, PetscSection *origPartSection, IS *origPartition)
 {
+  char           partname[1024];
+  PetscBool      isChaco = PETSC_FALSE, isMetis = PETSC_FALSE, flg;
   PetscMPIInt    size;
   PetscErrorCode ierr;
 
     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
     PetscFunctionReturn(0);
   }
+  ierr = PetscOptionsGetString(((PetscObject) dm)->prefix, "-dm_plex_partitioner", partname, 1024, &flg);CHKERRQ(ierr);
+  if (flg) name = partname;
+  if (name) {
+    ierr = PetscStrcmp(name, "chaco", &isChaco);CHKERRQ(ierr);
+    ierr = PetscStrcmp(name, "metis", &isMetis);CHKERRQ(ierr);
+  }
   if (height == 0) {
     PetscInt  numVertices;
     PetscInt *start     = NULL;
     PetscInt *adjacency = NULL;
 
     ierr = DMPlexCreateNeighborCSR(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
-    if (1) {
+    if (!name || isChaco) {
 #if defined(PETSC_HAVE_CHACO)
       ierr = DMPlexPartition_Chaco(dm, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
+#else
+      SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
 #endif
-    } else {
+    } else if (isMetis) {
 #if defined(PETSC_HAVE_PARMETIS)
       ierr = DMPlexPartition_ParMetis(dm, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
 #endif
-    }
+    } else SETERRQ1(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Unknown mesh partitioning package %s", name);
     if (enlarge) {
       *origPartSection = *partSection;
       *origPartition   = *partition;
   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
   if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
-  ierr = DMPlexCreatePartition(dm, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr);
+  ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr);
   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
   if (!rank) numRemoteRanks = numProcs;
   else       numRemoteRanks = 0;