39 #include "NodePartitioner.hpp"
40 #include "PetscMatTools.hpp"
43 #include "TrianglesMeshReader.hpp"
44 #include "Warnings.hpp"
61 #if (PARMETIS_MAJOR_VERSION >= 4) //ParMETIS 4.x and above implies METIS 5.x and above
68 extern void METIS_PartMeshNodal(
int*,
int*,
int*,
int*,
int*,
int*,
int*,
int*,
int*);
74 template <
unsigned ELEMENT_DIM,
unsigned SPACE_DIM>
76 std::set<unsigned>& rNodesOwned)
83 EXCEPTION(
"The distributed vector factory size in the mesh doesn't match the total number of nodes.");
90 rNodesOwned.insert(node_index);
95 template <
unsigned ELEMENT_DIM,
unsigned SPACE_DIM>
97 std::vector<unsigned>& rNodePermutation,
98 std::set<unsigned>& rNodesOwned,
99 std::vector<unsigned>& rProcessorsOffset)
103 WARN_ONCE_ONLY(
"METIS_LIBRARY partitioning is deprecated and will be removed from later versions of Chaste");
104 assert(ELEMENT_DIM==2 || ELEMENT_DIM==3);
108 unsigned order_of_elements = 1;
116 if (order_of_elements == 2)
119 EXCEPTION(
"Metis cannot partition a quadratic mesh.");
123 idxtype* npart =
new idxtype[nn];
124 assert(npart != NULL);
130 idxtype* elmnts =
new idxtype[ne * (ELEMENT_DIM+1)];
131 assert(elmnts != NULL);
134 for (
unsigned element_number = 0; element_number < rMeshReader.
GetNumElements(); element_number++)
138 for (
unsigned i=0; i<ELEMENT_DIM+1; i++)
147 idxtype* epart =
new idxtype[ne];
148 assert(epart != NULL);
151 #if (PARMETIS_MAJOR_VERSION >= 4) //ParMETIS 4.x and above implies METIS 5.x and above
154 idxtype* eptr =
new idxtype[ne+1];
155 for (idxtype i=0; i<=ne; i++)
157 eptr[i] = i * (ELEMENT_DIM+1);
159 METIS_PartMeshNodal(&ne, &nn, eptr, elmnts,
160 NULL , NULL , &nparts, NULL ,
161 NULL , &edgecut , epart, npart);
178 METIS_PartMeshNodal(&ne, &nn, elmnts, &etype, &numflag, &nparts, &edgecut, epart, npart);
186 MPI_Datatype mpi_idxtype = MPI_LONG_LONG_INT;
187 if (
sizeof(idxtype) ==
sizeof(
int))
189 mpi_idxtype = MPI_INT;
191 MPI_Bcast(npart , nn , mpi_idxtype, 0 , PETSC_COMM_WORLD);
193 assert(rProcessorsOffset.size() == 0);
196 for (
unsigned node_index=0; node_index<rMeshReader.
GetNumNodes(); node_index++)
198 unsigned part_read = npart[node_index];
203 rNodesOwned.insert(node_index);
211 rProcessorsOffset[proc]++;
220 rNodePermutation.resize(rMeshReader.
GetNumNodes());
222 for (
unsigned node_index=0; node_index<rMeshReader.
GetNumNodes(); node_index++)
224 unsigned part_read = npart[node_index];
226 rNodePermutation[node_index] = rProcessorsOffset[part_read] + local_index[part_read];
228 local_index[part_read]++;
235 template <
unsigned ELEMENT_DIM,
unsigned SPACE_DIM>
237 std::vector<unsigned>& rNodePermutation,
238 std::set<unsigned>& rNodesOwned,
239 std::vector<unsigned>& rProcessorsOffset)
242 assert(ELEMENT_DIM==2 || ELEMENT_DIM==3);
246 #define COVERAGE_IGNORE
247 WARN_ONCE_ONLY(
"PETSc support for ParMetis is not installed.");
248 #undef COVERAGE_IGNORE
256 unsigned num_local_elements = num_elements / num_procs;
257 unsigned first_local_element = num_local_elements * local_proc_index;
261 num_local_elements += num_elements - (num_local_elements * num_procs);
270 Mat connectivity_matrix;
272 PetscTools::SetupMat(connectivity_matrix, num_nodes, num_nodes, 54, PETSC_DECIDE, PETSC_DECIDE,
false);
277 for (
unsigned element_index = 0; element_index < first_local_element; element_index++)
289 assert(SPACE_DIM >= ELEMENT_DIM);
291 for (
unsigned element_index = 0; element_index < num_local_elements; element_index++)
297 element_data = rMeshReader.
GetElementData(first_local_element + element_index);
304 for (
unsigned i=0; i<element_data.
NodeIndices.size(); i++)
307 for (
unsigned j=0; j<i; j++)
310 MatSetValue(connectivity_matrix, row, col, 1.0, INSERT_VALUES);
311 MatSetValue(connectivity_matrix, col, row, 1.0, INSERT_VALUES);
328 MatGetOwnershipRange(connectivity_matrix, &connectivity_matrix_lo, &connectivity_matrix_hi);
330 unsigned num_local_nodes = connectivity_matrix_hi - connectivity_matrix_lo;
344 MatGetInfo(connectivity_matrix, MAT_LOCAL, &matrix_info);
345 unsigned local_num_nz = (
unsigned) matrix_info.nz_used;
347 size_t size = (num_local_nodes+1)*
sizeof(
PetscInt);
349 PetscMalloc(size, &ptr);
351 size = local_num_nz*
sizeof(
PetscInt);
352 PetscMalloc(size, &ptr);
359 for (
PetscInt row_global_index=connectivity_matrix_lo; row_global_index<connectivity_matrix_hi; row_global_index++)
361 MatGetRow(connectivity_matrix, row_global_index, &row_num_nz, &column_indices, PETSC_NULL);
363 unsigned row_local_index = row_global_index - connectivity_matrix_lo;
364 xadj[row_local_index+1] = xadj[row_local_index] + row_num_nz;
365 for (
PetscInt col_index=0; col_index<row_num_nz; col_index++)
367 adjncy[xadj[row_local_index] + col_index] = column_indices[col_index];
370 MatRestoreRow(connectivity_matrix, row_global_index, &row_num_nz,&column_indices, PETSC_NULL);
377 MatCreateMPIAdj(PETSC_COMM_WORLD, num_local_nodes, num_nodes, xadj, adjncy, PETSC_NULL, &adj_matrix);
386 MatPartitioning part;
387 MatPartitioningCreate(PETSC_COMM_WORLD, &part);
388 MatPartitioningSetAdjacency(part, adj_matrix);
389 MatPartitioningSetFromOptions(part);
390 IS new_process_numbers;
392 MatPartitioningApply(part, &new_process_numbers);
406 #if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
407 ISPartitioningCount(new_process_numbers, num_procs, num_nodes_per_process);
409 ISPartitioningCount(new_process_numbers, num_nodes_per_process);
412 rProcessorsOffset.resize(num_procs);
413 rProcessorsOffset[0] = 0;
414 for (
PetscInt i=1; i<num_procs; i++)
416 rProcessorsOffset[i] = rProcessorsOffset[i-1] + num_nodes_per_process[i-1];
418 unsigned my_num_nodes = num_nodes_per_process[local_proc_index];
419 delete[] num_nodes_per_process;
422 IS new_global_node_indices;
423 ISPartitioningToNumbering(new_process_numbers, &new_global_node_indices);
427 AOCreateBasicIS(new_global_node_indices, PETSC_NULL , &ordering);
431 for (
unsigned i=0; i<my_num_nodes; i++)
433 local_range[i] = rProcessorsOffset[local_proc_index] + i;
435 AOApplicationToPetsc(ordering, my_num_nodes, local_range);
439 rNodesOwned.insert(local_range, local_range + my_num_nodes);
440 delete[] local_range;
444 for (
unsigned i=0; i<num_nodes; i++)
448 AOPetscToApplication(ordering, num_nodes, global_range);
450 rNodePermutation.resize(num_nodes);
451 std::copy(global_range, global_range+num_nodes, rNodePermutation.begin());
452 delete[] global_range;
465 template <
unsigned ELEMENT_DIM,
unsigned SPACE_DIM>
467 std::vector<unsigned>& rNodePermutation,
468 std::set<unsigned>& rNodesOwned,
469 std::vector<unsigned>& rProcessorsOffset,
477 std::vector<unsigned> node_ownership(num_nodes, 0);
478 std::vector<unsigned> node_index_ownership(num_nodes,
UNSIGNED_UNSET);
480 for (
unsigned node=0; node < num_nodes; node++)
482 std::vector<double> location = rMeshReader.
GetNextNode();
485 assert(location.size() == SPACE_DIM);
492 bool does_contain =
true;
494 for (
unsigned d=0; d<SPACE_DIM; d++)
497 boundary_check = ((location[d] > lower[d]) || sqrt((location[d]-lower[d])*(location[d]-lower[d])) < DBL_EPSILON);
498 boundary_check *= (location[d] < upper[d]);
499 does_contain *= boundary_check;
504 node_ownership[node] = 1;
505 rNodesOwned.insert(node);
511 std::vector<unsigned> global_ownership(num_nodes, 0);
512 std::vector<unsigned> global_index_ownership(num_nodes,
UNSIGNED_UNSET);
514 MPI_Allreduce(&node_ownership[0], &global_ownership[0], num_nodes, MPI_UNSIGNED, MPI_LXOR, PETSC_COMM_WORLD);
515 for (
unsigned i=0; i<num_nodes; i++)
517 if (global_ownership[i] == 0)
519 EXCEPTION(
"A node is either not in geometric region, or the regions are not disjoint.");
524 MPI_Allreduce(&node_index_ownership[0], &global_index_ownership[0], num_nodes, MPI_UNSIGNED, MPI_MIN, PETSC_COMM_WORLD);
528 rProcessorsOffset.push_back(rNodePermutation.size());
529 for (
unsigned node=0; node<num_nodes; node++)
531 if (global_index_ownership[node] == proc)
533 rNodePermutation.push_back(node);
538 assert(rNodePermutation.size() == num_nodes);
virtual DistributedVectorFactory * GetDistributedVectorFactory()
virtual ElementData GetNextElementData()=0
virtual ElementData GetElementData(unsigned index)
#define EXCEPTION(message)
static void DumbPartitioning(AbstractMesh< ELEMENT_DIM, SPACE_DIM > &rMesh, std::set< unsigned > &rNodesOwned)
static void GeometricPartitioning(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, std::vector< unsigned > &rNodePermutation, std::set< unsigned > &rNodesOwned, std::vector< unsigned > &rProcessorsOffset, ChasteCuboid< SPACE_DIM > *pRegion)
virtual unsigned GetNumNodes() const
static void PrintAndReset(std::string message)
std::vector< unsigned > NodeIndices
static void MetisLibraryPartitioning(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, std::vector< unsigned > &rNodePermutation, std::set< unsigned > &rNodesOwned, std::vector< unsigned > &rProcessorsOffset)
const ChastePoint< SPACE_DIM > & rGetUpperCorner() const
static void PetscMatrixPartitioning(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, std::vector< unsigned > &rNodePermutation, std::set< unsigned > &rNodesOwned, std::vector< unsigned > &rProcessorsOffset)
unsigned GetOrderOfElements()
virtual unsigned GetNumElements() const =0
unsigned GetProblemSize() const
const unsigned UNSIGNED_UNSET
virtual bool IsFileFormatBinary()
virtual std::vector< double > GetNextNode()=0
virtual unsigned GetNumNodes() const =0
const ChastePoint< SPACE_DIM > & rGetLowerCorner() const