NodePartitioner.cpp

00001 /*
00002 
00003 Copyright (c) 2005-2015, University of Oxford.
00004 All rights reserved.
00005 
00006 University of Oxford means the Chancellor, Masters and Scholars of the
00007 University of Oxford, having an administrative office at Wellington
00008 Square, Oxford OX1 2JD, UK.
00009 
00010 This file is part of Chaste.
00011 
00012 Redistribution and use in source and binary forms, with or without
00013 modification, are permitted provided that the following conditions are met:
00014  * Redistributions of source code must retain the above copyright notice,
00015    this list of conditions and the following disclaimer.
00016  * Redistributions in binary form must reproduce the above copyright notice,
00017    this list of conditions and the following disclaimer in the documentation
00018    and/or other materials provided with the distribution.
00019  * Neither the name of the University of Oxford nor the names of its
00020    contributors may be used to endorse or promote products derived from this
00021    software without specific prior written permission.
00022 
00023 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00024 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00025 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00026 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
00027 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00028 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
00029 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
00030 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00031 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
00032 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 
00034 */
00035 #include <cassert>
00036 #include <algorithm>
00037 
00038 #include "Exception.hpp"
00039 #include "NodePartitioner.hpp"
00040 #include "PetscMatTools.hpp"
00041 #include "PetscTools.hpp"
00042 #include "Timer.hpp"
00043 #include "TrianglesMeshReader.hpp"
00044 #include "Warnings.hpp"
00045 #include "petscao.h"
00046 #include "petscmat.h"
00047 
00048 /*
00049  * The following definition fixes an odd incompatibility of METIS 4.0 and Chaste. Since
00050  * the library was compiled with a plain-C compiler, it fails to link using a C++ compiler.
00051  * Note that METIS 4.0 fails to compile with g++ or icpc, so a C compiler should be used.
00052  *
00053  * Somebody had this problem before: http://www-users.cs.umn.edu/~karypis/.discus/messages/15/113.html?1119486445
00054  *
00055  * Note that it was thought necessary to define the function header before the #include statement.
00056 */
00057 
00058 #include <parmetis.h>
00059 
00061 #if (PARMETIS_MAJOR_VERSION >= 4) //ParMETIS 4.x and above implies METIS 5.x and above
00062 //Redefine the index type so that we can still use the old name "idxtype"
00063 #define idxtype idx_t
00064 #else
00065 //Old version of ParMETIS does not need the type redefinition
00066 //Old version of ParMETIS is missing the METIS prototype signature definition
00067 extern "C" {
00068 extern void METIS_PartMeshNodal(int*, int*, int*, int*, int*, int*, int*, int*, int*);
00069 }
00070 #endif
00071 
00072 
00073 
00074 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00075 void NodePartitioner<ELEMENT_DIM, SPACE_DIM>::DumbPartitioning(AbstractMesh<ELEMENT_DIM, SPACE_DIM>& rMesh,
00076                                                                std::set<unsigned>& rNodesOwned)
00077 {
00078     //Note that if there is not DistributedVectorFactory in the mesh, then a dumb partition based
00079     //on rMesh.GetNumNodes() is applied automatically.
00080     //If there is a DistributedVectorFactory then that one will be returned
00081     if (rMesh.GetDistributedVectorFactory()->GetProblemSize() != rMesh.GetNumNodes())
00082     {
00083         EXCEPTION("The distributed vector factory size in the mesh doesn't match the total number of nodes.");
00084     }
00085 
00086     for (unsigned node_index = rMesh.GetDistributedVectorFactory()->GetLow();
00087          node_index < rMesh.GetDistributedVectorFactory()->GetHigh();
00088          node_index++)
00089     {
00090          rNodesOwned.insert(node_index);
00091     }
00092 }
00093 
00094 
00095 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00096 void NodePartitioner<ELEMENT_DIM, SPACE_DIM>::MetisLibraryPartitioning(AbstractMeshReader<ELEMENT_DIM, SPACE_DIM>& rMeshReader,
00097                                                                            std::vector<unsigned>& rNodePermutation,
00098                                                                            std::set<unsigned>& rNodesOwned,
00099                                                                            std::vector<unsigned>& rProcessorsOffset)
00100 {
00101     assert(PetscTools::IsParallel());
00103     WARN_ONCE_ONLY("METIS_LIBRARY partitioning is deprecated and will be removed from later versions of Chaste");
00104     assert(ELEMENT_DIM==2 || ELEMENT_DIM==3); // Metis works with triangles and tetras
00105 
00106     TrianglesMeshReader<ELEMENT_DIM, SPACE_DIM>* p_mesh_reader=dynamic_cast<TrianglesMeshReader<ELEMENT_DIM, SPACE_DIM>*>(&rMeshReader);
00107 
00108     unsigned order_of_elements = 1;
00109     if (p_mesh_reader)
00110     {
00111         //A triangles mesh reader will let you read with non-linear elements
00112         order_of_elements = p_mesh_reader->GetOrderOfElements();
00113     }
00114 
00115     // If it is a quadratic TrianglesMeshReader
00116     if (order_of_elements == 2)
00117     {
00118         //Metis 4.0 cannot partition second order meshes
00119         EXCEPTION("Metis cannot partition a quadratic mesh.");
00120     }
00121 
00122     idxtype nn = rMeshReader.GetNumNodes();
00123     idxtype* npart = new idxtype[nn];
00124     assert(npart != NULL);
00125 
00126     //Only the master process will access the element data and perform the partitioning
00127     if (PetscTools::AmMaster())
00128     {
00129         idxtype ne = rMeshReader.GetNumElements();
00130         idxtype* elmnts = new idxtype[ne * (ELEMENT_DIM+1)];
00131         assert(elmnts != NULL);
00132 
00133         unsigned counter=0;
00134         for (unsigned element_number = 0; element_number < rMeshReader.GetNumElements(); element_number++)
00135         {
00136             ElementData element_data = rMeshReader.GetNextElementData();
00137 
00138             for (unsigned i=0; i<ELEMENT_DIM+1; i++)
00139             {
00140                 elmnts[counter++] = element_data.NodeIndices[i];
00141             }
00142         }
00143         rMeshReader.Reset();
00144 
00145         idxtype nparts = PetscTools::GetNumProcs();
00146         idxtype edgecut;
00147         idxtype* epart = new idxtype[ne];
00148         assert(epart != NULL);
00149 
00150         Timer::Reset();
00151 #if (PARMETIS_MAJOR_VERSION >= 4) //ParMETIS 4.x and above implies METIS 5.x and above
00152         //New interface
00153         //Where to find information on element i in the elmnts array
00154         idxtype* eptr = new idxtype[ne+1];
00155         for (idxtype i=0; i<=ne; i++)
00156         {
00157             eptr[i] = i * (ELEMENT_DIM+1);
00158         }
00159         METIS_PartMeshNodal(&ne, &nn, eptr, elmnts,
00160                 NULL /*vwgt*/, NULL /*vsize*/, &nparts, NULL /*tpwgts*/,
00161                 NULL /*options*/, &edgecut /*aka objval*/, epart, npart);
00162 #else
00163         //Old interface
00164         int numflag = 0; //0 means C-style numbering is assumed
00165         int etype;
00166         switch (ELEMENT_DIM)
00167         {
00168             case 2:
00169                 etype = 1; //1 is Metis speak for triangles
00170                 break;
00171             case 3:
00172                 etype = 2; //2 is Metis speak for tetrahedra
00173                 break;
00174             default:
00175                 NEVER_REACHED;
00176         }
00177 
00178         METIS_PartMeshNodal(&ne, &nn, elmnts, &etype, &numflag, &nparts, &edgecut, epart, npart);
00179 #endif
00180         delete[] elmnts;
00181         delete[] epart;
00182     }
00183 
00184     //Here's the new bottle-neck: share all the node ownership data
00185     //idxtype is normally int (see metis-4.0/Lib/struct.h 17-22) but is 64bit on Windows
00186     MPI_Datatype mpi_idxtype = MPI_LONG_LONG_INT;
00187     if (sizeof(idxtype) == sizeof(int))
00188     {
00189         mpi_idxtype = MPI_INT;
00190     }
00191     MPI_Bcast(npart /*data*/, nn /*size*/, mpi_idxtype, 0 /*From Master*/, PETSC_COMM_WORLD);
00192 
00193     assert(rProcessorsOffset.size() == 0); // Making sure the vector is empty. After calling resize() only newly created memory will be initialised to 0.
00194     rProcessorsOffset.resize(PetscTools::GetNumProcs(), 0);
00195 
00196     for (unsigned node_index=0; node_index<rMeshReader.GetNumNodes(); node_index++)
00197     {
00198         unsigned part_read = npart[node_index];
00199 
00200         // METIS output says I own this node
00201         if (part_read == PetscTools::GetMyRank())
00202         {
00203             rNodesOwned.insert(node_index);
00204         }
00205 
00206         // Offset is defined as the first node owned by a processor. We compute it incrementally.
00207         // i.e. if node_index belongs to proc 3 (of 6) we have to shift the processors 4, 5, and 6
00208         // offset a position.
00209         for (unsigned proc=part_read+1; proc<PetscTools::GetNumProcs(); proc++)
00210         {
00211             rProcessorsOffset[proc]++;
00212         }
00213     }
00214 
00215     /*
00216      *  Once we know the offsets we can compute the permutation vector
00217      */
00218     std::vector<unsigned> local_index(PetscTools::GetNumProcs(), 0);
00219 
00220     rNodePermutation.resize(rMeshReader.GetNumNodes());
00221 
00222     for (unsigned node_index=0; node_index<rMeshReader.GetNumNodes(); node_index++)
00223     {
00224         unsigned part_read = npart[node_index];
00225 
00226         rNodePermutation[node_index] = rProcessorsOffset[part_read] + local_index[part_read];
00227 
00228         local_index[part_read]++;
00229     }
00230 
00231     delete[] npart;
00232 }
00233 
00234 
00235 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00236 void NodePartitioner<ELEMENT_DIM, SPACE_DIM>::PetscMatrixPartitioning(AbstractMeshReader<ELEMENT_DIM, SPACE_DIM>& rMeshReader,
00237                                               std::vector<unsigned>& rNodePermutation,
00238                                               std::set<unsigned>& rNodesOwned,
00239                                               std::vector<unsigned>& rProcessorsOffset)
00240 {
00241     assert(PetscTools::IsParallel());
00242     assert(ELEMENT_DIM==2 || ELEMENT_DIM==3); // Metis works with triangles and tetras
00243 
00244     if (!PetscTools::HasParMetis()) //We must have ParMetis support compiled into Petsc
00245     {
00246 #define COVERAGE_IGNORE
00247         WARN_ONCE_ONLY("PETSc support for ParMetis is not installed.");
00248 #undef COVERAGE_IGNORE
00249     }
00250 
00251     unsigned num_nodes = rMeshReader.GetNumNodes();
00252     PetscInt num_procs = PetscTools::GetNumProcs();
00253     unsigned local_proc_index = PetscTools::GetMyRank();
00254 
00255     unsigned num_elements = rMeshReader.GetNumElements();
00256     unsigned num_local_elements = num_elements / num_procs;
00257     unsigned first_local_element = num_local_elements * local_proc_index;
00258     if (PetscTools::AmTopMost())
00259     {
00260         // Take the excess elements
00261         num_local_elements += num_elements - (num_local_elements * num_procs);
00262     }
00263 
00264     PetscTools::Barrier();
00265     Timer::Reset();
00266 
00267     /*
00268      * Create PETSc matrix which will have 1 for adjacent nodes.
00269      */
00270     Mat connectivity_matrix;
00272     PetscTools::SetupMat(connectivity_matrix, num_nodes, num_nodes, 54, PETSC_DECIDE, PETSC_DECIDE, false);
00273 
00274     if ( ! rMeshReader.IsFileFormatBinary() )
00275     {
00276         // Advance the file pointer to the first element I own
00277         for (unsigned element_index = 0; element_index < first_local_element; element_index++)
00278         {
00279             rMeshReader.GetNextElementData();
00280         }
00281     }
00282 
00283     // In the loop below we assume that there exist edges between any pair of nodes in an element. This is
00284     // a reasonable assumption for triangles and tetrahedra. This won't be the case for quadratic simplices,
00285     // squares or hexahedra (or higher order elements), leading to slightly suboptimal partitions in these
00286     // cases.
00287     // We allow ELEMENT_DIM smaller than SPACE_DIM in case this is a 2D mesh in
00288     // a 3D space.
00289     assert(SPACE_DIM >= ELEMENT_DIM);
00290 
00291     for (unsigned element_index = 0; element_index < num_local_elements; element_index++)
00292     {
00293         ElementData element_data;
00294 
00295         if ( rMeshReader.IsFileFormatBinary() )
00296         {
00297             element_data = rMeshReader.GetElementData(first_local_element + element_index);
00298         }
00299         else
00300         {
00301             element_data = rMeshReader.GetNextElementData();
00302         }
00303 
00304         for (unsigned i=0; i<element_data.NodeIndices.size(); i++)
00305         {
00306             unsigned row = element_data.NodeIndices[i];
00307             for (unsigned j=0; j<i; j++)
00308             {
00309                 unsigned col = element_data.NodeIndices[j];
00310                 MatSetValue(connectivity_matrix, row, col, 1.0, INSERT_VALUES);
00311                 MatSetValue(connectivity_matrix, col, row, 1.0, INSERT_VALUES);
00312             }
00313         }
00314     }
00316     PetscMatTools::Finalise(connectivity_matrix);
00317 
00318     PetscTools::Barrier();
00319     if (PetscTools::AmMaster())
00320     {
00321         Timer::PrintAndReset("Connectivity matrix assembly");
00322     }
00323 
00324     rMeshReader.Reset();
00325 
00326     PetscInt connectivity_matrix_lo;
00327     PetscInt connectivity_matrix_hi;
00328     MatGetOwnershipRange(connectivity_matrix, &connectivity_matrix_lo, &connectivity_matrix_hi);
00329 
00330     unsigned num_local_nodes = connectivity_matrix_hi - connectivity_matrix_lo;
00331 
00332     /* PETSc MatCreateMPIAdj and parMETIS rely on adjacency arrays
00333      * Named here:      xadj            adjncy
00334      * parMETIS name:   xadj            adjncy    (see S4.2 in parMETIS manual)
00335      * PETSc name:      i               j         (see MatCreateMPIAdj in PETSc manual)
00336      *
00337      * The adjacency information of all nodes is listed in the main array, adjncy.  Since each node
00338      * has a variable number of adjacent nodes, the array xadj is used to store the index (in adjncy) where
00339      * this information starts.  Since xadj[i] is the start of node i's information, xadj[i+1] marks the end.
00340      *
00341      *
00342      */
00343     MatInfo matrix_info;
00344     MatGetInfo(connectivity_matrix, MAT_LOCAL, &matrix_info);
00345     unsigned local_num_nz = (unsigned) matrix_info.nz_used;
00346 
00347     size_t size = (num_local_nodes+1)*sizeof(PetscInt);
00348     void* ptr;
00349     PetscMalloc(size, &ptr);
00350     PetscInt* xadj = (PetscInt*) ptr;
00351     size = local_num_nz*sizeof(PetscInt);
00352     PetscMalloc(size, &ptr);
00353     PetscInt* adjncy = (PetscInt*) ptr;
00354 
00355     PetscInt row_num_nz;
00356     const PetscInt* column_indices;
00357 
00358     xadj[0]=0;
00359     for (PetscInt row_global_index=connectivity_matrix_lo; row_global_index<connectivity_matrix_hi; row_global_index++)
00360     {
00361         MatGetRow(connectivity_matrix, row_global_index, &row_num_nz, &column_indices, PETSC_NULL);
00362 
00363         unsigned row_local_index = row_global_index - connectivity_matrix_lo;
00364         xadj[row_local_index+1] = xadj[row_local_index] + row_num_nz;
00365         for (PetscInt col_index=0; col_index<row_num_nz; col_index++)
00366         {
00367            adjncy[xadj[row_local_index] + col_index] =  column_indices[col_index];
00368         }
00369 
00370         MatRestoreRow(connectivity_matrix, row_global_index, &row_num_nz,&column_indices, PETSC_NULL);
00371     }
00372 
00373     PetscTools::Destroy(connectivity_matrix);
00374 
00375     // Convert to an adjacency matrix
00376     Mat adj_matrix;
00377     MatCreateMPIAdj(PETSC_COMM_WORLD, num_local_nodes, num_nodes, xadj, adjncy, PETSC_NULL, &adj_matrix);
00378 
00379     PetscTools::Barrier();
00380     if (PetscTools::AmMaster())
00381     {
00382         Timer::PrintAndReset("Adjacency matrix creation");
00383     }
00384 
00385     // Get PETSc to call ParMETIS
00386     MatPartitioning part;
00387     MatPartitioningCreate(PETSC_COMM_WORLD, &part);
00388     MatPartitioningSetAdjacency(part, adj_matrix);
00389     MatPartitioningSetFromOptions(part);
00390     IS new_process_numbers;
00391 
00392     MatPartitioningApply(part, &new_process_numbers);
00393     MatPartitioningDestroy(PETSC_DESTROY_PARAM(part));
00394 
00396     PetscTools::Destroy(adj_matrix);
00397 
00398     PetscTools::Barrier();
00399     if (PetscTools::AmMaster())
00400     {
00401         Timer::PrintAndReset("PETSc-ParMETIS call");
00402     }
00403 
00404     // Figure out who owns what - processor offsets
00405     PetscInt* num_nodes_per_process = new PetscInt[num_procs];
00406 #if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
00407     ISPartitioningCount(new_process_numbers, num_procs, num_nodes_per_process);
00408 #else
00409     ISPartitioningCount(new_process_numbers, num_nodes_per_process);
00410 #endif
00411 
00412     rProcessorsOffset.resize(num_procs);
00413     rProcessorsOffset[0] = 0;
00414     for (PetscInt i=1; i<num_procs; i++)
00415     {
00416         rProcessorsOffset[i] = rProcessorsOffset[i-1] + num_nodes_per_process[i-1];
00417     }
00418     unsigned my_num_nodes = num_nodes_per_process[local_proc_index];
00419     delete[] num_nodes_per_process;
00420 
00421     // Figure out who owns what - new node numbering
00422     IS new_global_node_indices;
00423     ISPartitioningToNumbering(new_process_numbers, &new_global_node_indices);
00424 
00425     // Index sets only give local information, we want global
00426     AO ordering;
00427     AOCreateBasicIS(new_global_node_indices, PETSC_NULL /* natural ordering */, &ordering);
00428 
00429     // The locally owned range under the new numbering
00430     PetscInt* local_range = new PetscInt[my_num_nodes];
00431     for (unsigned i=0; i<my_num_nodes; i++)
00432     {
00433         local_range[i] = rProcessorsOffset[local_proc_index] + i;
00434     }
00435     AOApplicationToPetsc(ordering, my_num_nodes, local_range);
00436     //AOView(ordering, PETSC_VIEWER_STDOUT_WORLD);
00437 
00438     // Fill in rNodesOwned
00439     rNodesOwned.insert(local_range, local_range + my_num_nodes);
00440     delete[] local_range;
00441 
00442     // Once we know the offsets we can compute the permutation vector
00443     PetscInt* global_range = new PetscInt[num_nodes];
00444     for (unsigned i=0; i<num_nodes; i++)
00445     {
00446         global_range[i] = i;
00447     }
00448     AOPetscToApplication(ordering, num_nodes, global_range);
00449 
00450     rNodePermutation.resize(num_nodes);
00451     std::copy(global_range, global_range+num_nodes, rNodePermutation.begin());
00452     delete[] global_range;
00453 
00454     AODestroy(PETSC_DESTROY_PARAM(ordering));
00455     ISDestroy(PETSC_DESTROY_PARAM(new_process_numbers));
00456     ISDestroy(PETSC_DESTROY_PARAM(new_global_node_indices));
00457 
00458     PetscTools::Barrier();
00459     if (PetscTools::AmMaster())
00460     {
00461         Timer::PrintAndReset("PETSc-ParMETIS output manipulation");
00462     }
00463 }
00464 
00465 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00466 void NodePartitioner<ELEMENT_DIM, SPACE_DIM>::GeometricPartitioning(AbstractMeshReader<ELEMENT_DIM, SPACE_DIM>& rMeshReader,
00467                                     std::vector<unsigned>& rNodePermutation,
00468                                     std::set<unsigned>& rNodesOwned,
00469                                     std::vector<unsigned>& rProcessorsOffset,
00470                                     ChasteCuboid<SPACE_DIM>* pRegion)
00471 {
00472     PetscTools::Barrier();
00473 
00474     unsigned num_nodes =  rMeshReader.GetNumNodes();
00475 
00476     // Work out where each node should lie.
00477     std::vector<unsigned> node_ownership(num_nodes, 0);
00478     std::vector<unsigned> node_index_ownership(num_nodes, UNSIGNED_UNSET);
00479 
00480     for (unsigned node=0; node < num_nodes; node++)
00481     {
00482         std::vector<double> location = rMeshReader.GetNextNode();
00483 
00484         // Make sure it is the correct size
00485         assert(location.size() == SPACE_DIM);
00486 
00487         // Establish whether it lies in the domain. ChasteCuboid::DoesContain is
00488         // insufficient for this as it treats all boundaries as open.
00489         ChastePoint<SPACE_DIM> lower = pRegion->rGetLowerCorner();
00490         ChastePoint<SPACE_DIM> upper = pRegion->rGetUpperCorner();
00491 
00492         bool does_contain = true;
00493 
00494         for (unsigned d=0; d<SPACE_DIM; d++)
00495         {
00496             bool boundary_check;
00497             boundary_check = ((location[d] > lower[d]) || sqrt((location[d]-lower[d])*(location[d]-lower[d])) < DBL_EPSILON);
00498             boundary_check *= (location[d] < upper[d]);
00499             does_contain *= boundary_check;
00500         }
00501 
00502         if(does_contain)
00503         {
00504             node_ownership[node] = 1;
00505             rNodesOwned.insert(node);
00506             node_index_ownership[node] = PetscTools::GetMyRank();
00507         }
00508     }
00509 
00510     // Make sure each node will be owned by exactly on process
00511     std::vector<unsigned> global_ownership(num_nodes, 0);
00512     std::vector<unsigned> global_index_ownership(num_nodes, UNSIGNED_UNSET);
00513 
00514     MPI_Allreduce(&node_ownership[0], &global_ownership[0], num_nodes, MPI_UNSIGNED, MPI_LXOR, PETSC_COMM_WORLD);
00515     for (unsigned i=0; i<num_nodes; i++)
00516     {
00517         if (global_ownership[i] == 0)
00518         {
00519             EXCEPTION("A node is either not in geometric region, or the regions are not disjoint.");
00520         }
00521     }
00522 
00523     // Create the permutation and offset vectors.
00524     MPI_Allreduce(&node_index_ownership[0], &global_index_ownership[0], num_nodes, MPI_UNSIGNED, MPI_MIN, PETSC_COMM_WORLD);
00525 
00526     for (unsigned proc=0; proc<PetscTools::GetNumProcs(); proc++)
00527     {
00528         rProcessorsOffset.push_back(rNodePermutation.size());
00529         for (unsigned node=0; node<num_nodes; node++)
00530         {
00531             if (global_index_ownership[node] == proc)
00532             {
00533                 rNodePermutation.push_back(node);
00534             }
00535         }
00536 
00537     }
00538     assert(rNodePermutation.size() == num_nodes);
00539 }
00540 
00542 // Explicit instantiation
00544 
00545 template class NodePartitioner<1,1>;
00546 template class NodePartitioner<1,2>;
00547 template class NodePartitioner<1,3>;
00548 template class NodePartitioner<2,2>;
00549 template class NodePartitioner<2,3>;
00550 template class NodePartitioner<3,3>;
00551 
00552 
00553 
00554 
00555 

Generated by  doxygen 1.6.2