Commits

Anonymous committed 3cd2304

Refactored UGridPartition to include only code shared between unstructured implicit/explicit; placed unique code in unstructured_implicit.F90; fixed formatting statement for debug output

  • Participants
  • Parent commits 109da55

Comments (0)

Files changed (4)

src/pflotran/discretization.F90

       discretization%dm_index_to_ndof(NFLOWDOF) = option%nflowdof
       discretization%dm_index_to_ndof(NTRANDOF) = option%ntrandof
     case(UNSTRUCTURED_GRID)
+
+      ! petsc will call parmetis to calculate the graph/dual
+#if !defined(PETSC_HAVE_PARMETIS)
+      option%io_buffer = &
+        'Must compile with Parmetis in order to use unstructured grids.'
+      call printErrMsg(option)
+#endif
+    
       select case(discretization%grid%itype)
         case(IMPLICIT_UNSTRUCTURED_GRID)
           call UGridDecompose(discretization%grid%unstructured_grid, &

src/pflotran/unstructured_explicit.F90

   type(unstructured_explicit_type), pointer :: explicit_grid
   PetscViewer :: viewer
   
-  Mat :: M_mat
+  Mat :: M_mat,M_mat_loc
   Vec :: M_vec
   Mat :: Adj_mat
   Mat :: Dual_mat
   PetscInt, allocatable :: int_array(:), int_array2(:), int_array3(:)
   PetscInt, allocatable :: int_array4(:)
   PetscInt, allocatable :: int_array2d(:,:)
-  PetscInt :: num_common_faces
   PetscInt :: num_connections_local_old, num_connections_local
   PetscInt :: num_connections_total 
   PetscInt :: num_connections_global, global_connection_offset
   PetscInt :: cell_stride, dual_offset, connection_offset, connection_stride
   PetscInt :: natural_id_offset
   PetscErrorCode :: ierr
+  PetscInt :: icell_up,icell_dn
   
   character(len=MAXSTRINGLENGTH) :: string
 
   call PetscViewerDestroy(viewer,ierr)
 #endif
 
-  call MatConvert(M_mat,MATMPIADJ,MAT_INITIAL_MATRIX,Adj_mat,ierr)
+  ! GB: When MatConvert() is used, the diagonal entries are lost in Adj_mat
+  !call MatConvert(M_mat,MATMPIADJ,MAT_INITIAL_MATRIX,Adj_mat,ierr)
+  !call MatDestroy(M_mat,ierr)
+
+  ! Alternate method of creating Adj_mat
+  if (option%mycommsize>1) then
+    call MatMPIAIJGetLocalMat(M_mat,MAT_INITIAL_MATRIX,M_mat_loc,ierr)
+    call MatGetRowIJF90(M_mat_loc,ZERO_INTEGER,PETSC_FALSE,PETSC_FALSE,num_rows, &
+                        ia_ptr,ja_ptr,success,ierr)
+  else
+    call MatGetRowIJF90(M_mat,ZERO_INTEGER,PETSC_FALSE,PETSC_FALSE,num_rows, &
+                        ia_ptr,ja_ptr,success,ierr)
+  endif
+
+  count=0
+  do icell = 1,num_rows
+    istart = ia_ptr(icell)
+    iend = ia_ptr(icell+1)-1
+    num_cols = iend-istart+1
+    count = count+num_cols
+  enddo
+  allocate(local_connections(count))
+  allocate(local_connection_offsets(num_rows+1))
+  local_connection_offsets(1:num_rows+1) = ia_ptr(1:num_rows+1)
+  local_connections(1:count)             = ja_ptr(1:count)
+
+  call MPI_Barrier(MPI_COMM_WORLD,ierr)
+
+  call MatCreateMPIAdj(option%mycomm,num_cells_local_old, &
+                       num_connections_global, &
+                       local_connection_offsets, &
+                       local_connections,PETSC_NULL_INTEGER,Adj_mat,ierr)
+
+  if (option%mycommsize>1) then
+    call MatRestoreRowIJF90(M_mat_loc,ZERO_INTEGER,PETSC_FALSE,PETSC_FALSE,num_rows, &
+                        ia_ptr,ja_ptr,success,ierr)
+  else
+    call MatRestoreRowIJF90(M_mat,ZERO_INTEGER,PETSC_FALSE,PETSC_FALSE,num_rows, &
+                        ia_ptr,ja_ptr,success,ierr)
+  endif
   call MatDestroy(M_mat,ierr)
 
 #if UGRID_DEBUG
   call PetscViewerDestroy(viewer,ierr)
 #endif
 
-  num_common_faces = 1
-  call UGridPartition(ugrid, &
-                             option, &
-                             Adj_mat, &
-                             num_common_faces, &
-                             Dual_mat,is_new, &
-                             num_cells_local_new)
+  ! Create the Dual matrix. For implicit ugrid the subroutine UGridPartition()
+  ! creates this Dual matrix, but for explicit grid, the dual matrix is 
+  ! crated before call to UGridPartition().
+  call MatCreateAIJ(option%mycomm,num_cells_local_old,PETSC_DECIDE, &
+                    ugrid%nmax,ugrid%nmax, &
+                    ugrid%max_ndual_per_cell,PETSC_NULL_INTEGER, &
+                    ugrid%max_ndual_per_cell,PETSC_NULL_INTEGER, &
+                    M_mat,ierr)
+  do iconn = 1, num_connections_local_old
+    icell_up = explicit_grid%connections(1,iconn)-1
+    icell_dn = explicit_grid%connections(2,iconn)-1
+    call MatSetValue(M_mat,icell_up,icell_dn,1.d0,INSERT_VALUES,ierr)
+    call MatSetValue(M_mat,icell_dn,icell_up,1.d0,INSERT_VALUES,ierr)
+  enddo
+  
+  call MatAssemblyBegin(M_mat,MAT_FINAL_ASSEMBLY,ierr)
+  call MatAssemblyEnd(M_mat,MAT_FINAL_ASSEMBLY,ierr)
+  
+  call MatConvert(M_mat,MATMPIADJ,MAT_INITIAL_MATRIX,Dual_mat,ierr)
+  call MatDestroy(M_mat,ierr)
+
+#if UGRID_DEBUG
+  call PetscViewerASCIIOpen(option%mycomm,'Dual_mat.out',viewer,ierr)
+  call MatView(Dual_mat,viewer,ierr)
+  call PetscViewerDestroy(viewer,ierr)
+#endif
+
+  call UGridPartition(ugrid,option,Dual_mat,is_new, &
+                      num_cells_local_new)
 
   ! second argument of ZERO_INTEGER means to use 0-based indexing
   ! MagGetRowIJF90 returns row and column pointers for compressed matrix data

src/pflotran/unstructured_grid.F90

   call PetscViewerDestroy(viewer,ierr)
 #endif
 
-  call UGridPartition(unstructured_grid, &
-                             option, &
-                             Adj_mat, &
-                             num_common_vertices, &
-                             Dual_mat,is_new, &
-                             num_cells_local_new)
-  
+#if UGRID_DEBUG
+  call printMsg(option,'Dual matrix')
+#endif
+
+  call MatMeshToCellGraph(Adj_mat,num_common_vertices,Dual_mat,ierr)
   call MatDestroy(Adj_mat,ierr)
+  
+#if UGRID_DEBUG
+  if (ugrid%grid_type == THREE_DIM_GRID) then
+    call PetscViewerASCIIOpen(option%mycomm,'Dual_subsurf.out',viewer,ierr)
+  else
+    call PetscViewerASCIIOpen(option%mycomm,'Dual_surf.out',viewer,ierr)
+  endif
+  call MatView(Dual_mat,viewer,ierr)
+  call PetscViewerDestroy(viewer,ierr)
+#endif
+  
+  call UGridPartition(unstructured_grid,option,Dual_mat,is_new, &
+                      num_cells_local_new)
+  
   if (allocated(local_vertices)) deallocate(local_vertices)
   if (allocated(local_vertex_offset)) deallocate(local_vertex_offset)
   

src/pflotran/unstructured_grid_aux.F90

 ! date: 10/05/12
 !
 ! ************************************************************************** !
-subroutine UGridPartition(ugrid,option, &
-                                 Adj_mat,num_common_vertices, &
-                                 Dual_mat,is_new, &
-                                 num_cells_local_new)
+subroutine UGridPartition(ugrid,option,Dual_mat,is_new, &
+                          num_cells_local_new)
 
   use Option_module
   
   
   type(unstructured_grid_type) :: ugrid
   type(option_type) :: option
-  Mat :: Adj_mat
-  PetscInt :: num_common_vertices
   Mat :: Dual_mat
   IS :: is_new
   PetscInt :: num_cells_local_new
   PetscErrorCode :: ierr
 
 #if UGRID_DEBUG
-  call printMsg(option,'Dual matrix')
-#endif
-
-  ! petsc will call parmetis to calculate the graph/dual
-#if defined(PETSC_HAVE_PARMETIS)
-  call MatMeshToCellGraph(Adj_mat,num_common_vertices,Dual_mat,ierr)
-#else
-  option%io_buffer = 'Must compile with Parmetis in order to use unstructured grids.'
-  call printErrMsg(option)
-#endif
-  
-#if UGRID_DEBUG
-  if (ugrid%grid_type == THREE_DIM_GRID) then
-    call PetscViewerASCIIOpen(option%mycomm,'Dual_subsurf.out',viewer,ierr)
-  else
-    call PetscViewerASCIIOpen(option%mycomm,'Dual_surf.out',viewer,ierr)
-  endif
-  call MatView(Dual_mat,viewer,ierr)
-  call PetscViewerDestroy(viewer,ierr)
-#endif
-
-#if UGRID_DEBUG
   call printMsg(option,'Partitioning')
 #endif