Commits

Jed Brown committed 8bfbc91

SF: define MPIU_REPLACE to work around missing MPI_REPLACE

Comments (0)

Files changed (5)

include/petscsf.h

 PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata)
   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
 
+#if defined(MPI_REPLACE)
+#  define MPIU_REPLACE MPI_REPLACE
+#else
+/* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
+ * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
+ * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
+ * MPICH2-1.0.6. */
+#  define MPIU_REPLACE (MPI_Op)(0x5800000d)
+#endif
+
 #endif

src/dm/interface/dm.c

     switch (mode) {
     case INSERT_VALUES:
     case INSERT_ALL_VALUES:
-#if defined(PETSC_HAVE_MPI_REPLACE)
-      op = MPI_REPLACE; break;
-#else
-      SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
-#endif
+      op = MPIU_REPLACE; break;
     case ADD_VALUES:
     case ADD_ALL_VALUES:
       op = MPI_SUM; break;
     switch (mode) {
     case INSERT_VALUES:
     case INSERT_ALL_VALUES:
-#if defined(PETSC_HAVE_MPI_REPLACE)
-      op = MPI_REPLACE; break;
-#else
-      SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
-#endif
+      op = MPIU_REPLACE; break;
     case ADD_VALUES:
     case ADD_ALL_VALUES:
       op = MPI_SUM; break;

src/mat/impls/aij/mpi/mpiaij.c

   ierr = PetscSFSetGraphLayout(rowsf,A->rmap,A->rmap->n,NULL,PETSC_OWN_POINTER,rwant);CHKERRQ(ierr);
   ierr = PetscSFSetFromOptions(rowsf);CHKERRQ(ierr);
   for (i=0; i<m; i++) work[i] = A->rmap->rstart + i;
-  ierr = PetscSFReduceBegin(rowsf,MPIU_INT,work,rdest,MPI_REPLACE);CHKERRQ(ierr);
-  ierr = PetscSFReduceEnd(rowsf,MPIU_INT,work,rdest,MPI_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceBegin(rowsf,MPIU_INT,work,rdest,MPIU_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceEnd(rowsf,MPIU_INT,work,rdest,MPIU_REPLACE);CHKERRQ(ierr);
 
   /* Invert column permutation to find out where my columns should go */
   ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr);
   ierr = PetscSFSetGraphLayout(sf,A->cmap,A->cmap->n,NULL,PETSC_OWN_POINTER,cwant);CHKERRQ(ierr);
   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
   for (i=0; i<n; i++) work[i] = A->cmap->rstart + i;
-  ierr = PetscSFReduceBegin(sf,MPIU_INT,work,cdest,MPI_REPLACE);CHKERRQ(ierr);
-  ierr = PetscSFReduceEnd(sf,MPIU_INT,work,cdest,MPI_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceBegin(sf,MPIU_INT,work,cdest,MPIU_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceEnd(sf,MPIU_INT,work,cdest,MPIU_REPLACE);CHKERRQ(ierr);
   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
 
   ierr = ISRestoreIndices(rowp,&rwant);CHKERRQ(ierr);

src/vec/is/sf/impls/basic/sfbasic.c

 {
   PetscFunctionBegin;
   *UnpackOp = NULL;
-  if (op == MPI_REPLACE) *UnpackOp = link->UnpackInsert;
+  if (op == MPIU_REPLACE) *UnpackOp = link->UnpackInsert;
   else if (op == MPI_SUM || op == MPIU_SUM) *UnpackOp = link->UnpackAdd;
   else if (op == MPI_MAX || op == MPIU_MAX) *UnpackOp = link->UnpackMax;
   else if (op == MPI_MIN || op == MPIU_MIN) *UnpackOp = link->UnpackMin;
 {
   PetscFunctionBegin;
   *FetchAndOp = NULL;
-  if (op == MPI_REPLACE) *FetchAndOp = link->FetchAndInsert;
+  if (op == MPIU_REPLACE) *FetchAndOp = link->FetchAndInsert;
   else if (op == MPI_SUM || op == MPIU_SUM) *FetchAndOp = link->FetchAndAdd;
   else if (op == MPI_MAX || op == MPIU_MAX) *FetchAndOp = link->FetchAndMax;
   else if (op == MPI_MIN || op == MPIU_MIN) *FetchAndOp = link->FetchAndMin;

src/vec/is/sf/interface/sf.c

     roots[i].rank  = -1;
     roots[i].index = -1;
   }
-  ierr = PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPI_REPLACE);CHKERRQ(ierr);
-  ierr = PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPI_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr);
 
   /* Check whether our leaves are sparse */
   for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++;
       for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]);
       ierr = PetscMalloc5(sf->multi->nroots,PetscInt,&inranks,sf->multi->nroots,PetscInt,&newoffset,sf->nleaves,PetscInt,&outranks,sf->nleaves,PetscInt,&newoutoffset,maxdegree,PetscInt,&tmpoffset);CHKERRQ(ierr);
       for (i=0; i<sf->nleaves; i++) outranks[i] = rank;
-      ierr = PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPI_REPLACE);CHKERRQ(ierr);
-      ierr = PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPI_REPLACE);CHKERRQ(ierr);
+      ierr = PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr);
+      ierr = PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr);
       /* Sort the incoming ranks at each vertex, build the inverse map */
       for (i=0; i<sf->nroots; i++) {
         PetscInt j;
   PetscFunctionBegin;
   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
   ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr);
-  ierr = PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPI_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }
 
   PetscSFCheckGraphSet(sf,1);
   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
   ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr);
-  ierr = PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPI_REPLACE);CHKERRQ(ierr);
+  ierr = PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr);
   PetscFunctionReturn(0);
 }