Chaste  Release::3.4
NodePartitioner.cpp
1 /*
2 
3 Copyright (c) 2005-2016, University of Oxford.
4 All rights reserved.
5 
6 University of Oxford means the Chancellor, Masters and Scholars of the
7 University of Oxford, having an administrative office at Wellington
8 Square, Oxford OX1 2JD, UK.
9 
10 This file is part of Chaste.
11 
12 Redistribution and use in source and binary forms, with or without
13 modification, are permitted provided that the following conditions are met:
14  * Redistributions of source code must retain the above copyright notice,
15  this list of conditions and the following disclaimer.
16  * Redistributions in binary form must reproduce the above copyright notice,
17  this list of conditions and the following disclaimer in the documentation
18  and/or other materials provided with the distribution.
19  * Neither the name of the University of Oxford nor the names of its
20  contributors may be used to endorse or promote products derived from this
21  software without specific prior written permission.
22 
23 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 
34 */
35 #include <cassert>
36 #include <algorithm>
37 
38 #include "Exception.hpp"
39 #include "NodePartitioner.hpp"
40 #include "PetscMatTools.hpp"
41 #include "PetscTools.hpp"
42 #include "Timer.hpp"
43 #include "TrianglesMeshReader.hpp"
44 #include "Warnings.hpp"
45 #include "petscao.h"
46 #include "petscmat.h"
47 
48 /*
49  * The following definition fixes an odd incompatibility of METIS 4.0 and Chaste. Since
50  * the library was compiled with a plain-C compiler, it fails to link using a C++ compiler.
51  * Note that METIS 4.0 fails to compile with g++ or icpc, so a C compiler should be used.
52  *
53  * Somebody had this problem before: http://www-users.cs.umn.edu/~karypis/.discus/messages/15/113.html?1119486445
54  *
55  * Note that it was thought necessary to define the function header before the #include statement.
56 */
57 
58 #include <parmetis.h>
59 
61 #if (PARMETIS_MAJOR_VERSION >= 4) //ParMETIS 4.x and above implies METIS 5.x and above
62 //Redefine the index type so that we can still use the old name "idxtype"
63 #define idxtype idx_t
64 #else
65 //Old version of ParMETIS does not need the type redefinition
66 //Old version of ParMETIS is missing the METIS prototype signature definition
67 extern "C" {
68 extern void METIS_PartMeshNodal(int*, int*, int*, int*, int*, int*, int*, int*, int*);
69 }
70 #endif
71 
72 
73 
74 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
76  std::set<unsigned>& rNodesOwned)
77 {
78  //Note that if there is not DistributedVectorFactory in the mesh, then a dumb partition based
79  //on rMesh.GetNumNodes() is applied automatically.
80  //If there is a DistributedVectorFactory then that one will be returned
81  if (rMesh.GetDistributedVectorFactory()->GetProblemSize() != rMesh.GetNumNodes())
82  {
83  EXCEPTION("The distributed vector factory size in the mesh doesn't match the total number of nodes.");
84  }
85 
86  for (unsigned node_index = rMesh.GetDistributedVectorFactory()->GetLow();
87  node_index < rMesh.GetDistributedVectorFactory()->GetHigh();
88  node_index++)
89  {
90  rNodesOwned.insert(node_index);
91  }
92 }
93 
94 
95 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
97  std::vector<unsigned>& rNodePermutation,
98  std::set<unsigned>& rNodesOwned,
99  std::vector<unsigned>& rProcessorsOffset)
100 {
101  assert(PetscTools::IsParallel());
103  WARN_ONCE_ONLY("METIS_LIBRARY partitioning is deprecated and will be removed from later versions of Chaste");
104  assert(ELEMENT_DIM==2 || ELEMENT_DIM==3); // Metis works with triangles and tetras
105 
107 
108  unsigned order_of_elements = 1;
109  if (p_mesh_reader)
110  {
111  //A triangles mesh reader will let you read with non-linear elements
112  order_of_elements = p_mesh_reader->GetOrderOfElements();
113  }
114 
115  // If it is a quadratic TrianglesMeshReader
116  if (order_of_elements == 2)
117  {
118  //Metis 4.0 cannot partition second order meshes
119  EXCEPTION("Metis cannot partition a quadratic mesh.");
120  }
121 
122  idxtype nn = rMeshReader.GetNumNodes();
123  idxtype* npart = new idxtype[nn];
124  assert(npart != NULL);
125 
126  //Only the master process will access the element data and perform the partitioning
127  if (PetscTools::AmMaster())
128  {
129  idxtype ne = rMeshReader.GetNumElements();
130  idxtype* elmnts = new idxtype[ne * (ELEMENT_DIM+1)];
131  assert(elmnts != NULL);
132 
133  unsigned counter=0;
134  for (unsigned element_number = 0; element_number < rMeshReader.GetNumElements(); element_number++)
135  {
136  ElementData element_data = rMeshReader.GetNextElementData();
137 
138  for (unsigned i=0; i<ELEMENT_DIM+1; i++)
139  {
140  elmnts[counter++] = element_data.NodeIndices[i];
141  }
142  }
143  rMeshReader.Reset();
144 
145  idxtype nparts = PetscTools::GetNumProcs();
146  idxtype edgecut;
147  idxtype* epart = new idxtype[ne];
148  assert(epart != NULL);
149 
150  Timer::Reset();
151 #if (PARMETIS_MAJOR_VERSION >= 4) //ParMETIS 4.x and above implies METIS 5.x and above
152  //New interface
153  //Where to find information on element i in the elmnts array
154  idxtype* eptr = new idxtype[ne+1];
155  for (idxtype i=0; i<=ne; i++)
156  {
157  eptr[i] = i * (ELEMENT_DIM+1);
158  }
159  METIS_PartMeshNodal(&ne, &nn, eptr, elmnts,
160  NULL /*vwgt*/, NULL /*vsize*/, &nparts, NULL /*tpwgts*/,
161  NULL /*options*/, &edgecut /*aka objval*/, epart, npart);
162 #else
163  //Old interface
164  int numflag = 0; //0 means C-style numbering is assumed
165  int etype;
166  switch (ELEMENT_DIM)
167  {
168  case 2:
169  etype = 1; //1 is Metis speak for triangles
170  break;
171  case 3:
172  etype = 2; //2 is Metis speak for tetrahedra
173  break;
174  default:
176  }
177 
178  METIS_PartMeshNodal(&ne, &nn, elmnts, &etype, &numflag, &nparts, &edgecut, epart, npart);
179 #endif
180  delete[] elmnts;
181  delete[] epart;
182  }
183 
184  //Here's the new bottle-neck: share all the node ownership data
185  //idxtype is normally int (see metis-4.0/Lib/struct.h 17-22) but is 64bit on Windows
186  MPI_Datatype mpi_idxtype = MPI_LONG_LONG_INT;
187  if (sizeof(idxtype) == sizeof(int))
188  {
189  mpi_idxtype = MPI_INT;
190  }
191  MPI_Bcast(npart /*data*/, nn /*size*/, mpi_idxtype, 0 /*From Master*/, PETSC_COMM_WORLD);
192 
193  assert(rProcessorsOffset.size() == 0); // Making sure the vector is empty. After calling resize() only newly created memory will be initialised to 0.
194  rProcessorsOffset.resize(PetscTools::GetNumProcs(), 0);
195 
196  for (unsigned node_index=0; node_index<rMeshReader.GetNumNodes(); node_index++)
197  {
198  unsigned part_read = npart[node_index];
199 
200  // METIS output says I own this node
201  if (part_read == PetscTools::GetMyRank())
202  {
203  rNodesOwned.insert(node_index);
204  }
205 
206  // Offset is defined as the first node owned by a processor. We compute it incrementally.
207  // i.e. if node_index belongs to proc 3 (of 6) we have to shift the processors 4, 5, and 6
208  // offset a position.
209  for (unsigned proc=part_read+1; proc<PetscTools::GetNumProcs(); proc++)
210  {
211  rProcessorsOffset[proc]++;
212  }
213  }
214 
215  /*
216  * Once we know the offsets we can compute the permutation vector
217  */
218  std::vector<unsigned> local_index(PetscTools::GetNumProcs(), 0);
219 
220  rNodePermutation.resize(rMeshReader.GetNumNodes());
221 
222  for (unsigned node_index=0; node_index<rMeshReader.GetNumNodes(); node_index++)
223  {
224  unsigned part_read = npart[node_index];
225 
226  rNodePermutation[node_index] = rProcessorsOffset[part_read] + local_index[part_read];
227 
228  local_index[part_read]++;
229  }
230 
231  delete[] npart;
232 }
233 
234 
235 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
237  std::vector<unsigned>& rNodePermutation,
238  std::set<unsigned>& rNodesOwned,
239  std::vector<unsigned>& rProcessorsOffset)
240 {
241  assert(PetscTools::IsParallel());
242  assert(ELEMENT_DIM==2 || ELEMENT_DIM==3); // Metis works with triangles and tetras
243 
244  if (!PetscTools::HasParMetis()) //We must have ParMetis support compiled into Petsc
245  {
246 #define COVERAGE_IGNORE
247  WARN_ONCE_ONLY("PETSc support for ParMetis is not installed.");
248 #undef COVERAGE_IGNORE
249  }
250 
251  unsigned num_nodes = rMeshReader.GetNumNodes();
252  PetscInt num_procs = PetscTools::GetNumProcs();
253  unsigned local_proc_index = PetscTools::GetMyRank();
254 
255  unsigned num_elements = rMeshReader.GetNumElements();
256  unsigned num_local_elements = num_elements / num_procs;
257  unsigned first_local_element = num_local_elements * local_proc_index;
258  if (PetscTools::AmTopMost())
259  {
260  // Take the excess elements
261  num_local_elements += num_elements - (num_local_elements * num_procs);
262  }
263 
265  Timer::Reset();
266 
267  /*
268  * Create PETSc matrix which will have 1 for adjacent nodes.
269  */
270  Mat connectivity_matrix;
272  PetscTools::SetupMat(connectivity_matrix, num_nodes, num_nodes, 54, PETSC_DECIDE, PETSC_DECIDE, false);
273 
274  if ( ! rMeshReader.IsFileFormatBinary() )
275  {
276  // Advance the file pointer to the first element I own
277  for (unsigned element_index = 0; element_index < first_local_element; element_index++)
278  {
279  rMeshReader.GetNextElementData();
280  }
281  }
282 
283  // In the loop below we assume that there exist edges between any pair of nodes in an element. This is
284  // a reasonable assumption for triangles and tetrahedra. This won't be the case for quadratic simplices,
285  // squares or hexahedra (or higher order elements), leading to slightly suboptimal partitions in these
286  // cases.
287  // We allow ELEMENT_DIM smaller than SPACE_DIM in case this is a 2D mesh in
288  // a 3D space.
289  assert(SPACE_DIM >= ELEMENT_DIM);
290 
291  for (unsigned element_index = 0; element_index < num_local_elements; element_index++)
292  {
293  ElementData element_data;
294 
295  if ( rMeshReader.IsFileFormatBinary() )
296  {
297  element_data = rMeshReader.GetElementData(first_local_element + element_index);
298  }
299  else
300  {
301  element_data = rMeshReader.GetNextElementData();
302  }
303 
304  for (unsigned i=0; i<element_data.NodeIndices.size(); i++)
305  {
306  unsigned row = element_data.NodeIndices[i];
307  for (unsigned j=0; j<i; j++)
308  {
309  unsigned col = element_data.NodeIndices[j];
310  MatSetValue(connectivity_matrix, row, col, 1.0, INSERT_VALUES);
311  MatSetValue(connectivity_matrix, col, row, 1.0, INSERT_VALUES);
312  }
313  }
314  }
316  PetscMatTools::Finalise(connectivity_matrix);
317 
319  if (PetscTools::AmMaster())
320  {
321  Timer::PrintAndReset("Connectivity matrix assembly");
322  }
323 
324  rMeshReader.Reset();
325 
326  PetscInt connectivity_matrix_lo;
327  PetscInt connectivity_matrix_hi;
328  MatGetOwnershipRange(connectivity_matrix, &connectivity_matrix_lo, &connectivity_matrix_hi);
329 
330  unsigned num_local_nodes = connectivity_matrix_hi - connectivity_matrix_lo;
331 
332  /* PETSc MatCreateMPIAdj and parMETIS rely on adjacency arrays
333  * Named here: xadj adjncy
334  * parMETIS name: xadj adjncy (see S4.2 in parMETIS manual)
335  * PETSc name: i j (see MatCreateMPIAdj in PETSc manual)
336  *
337  * The adjacency information of all nodes is listed in the main array, adjncy. Since each node
338  * has a variable number of adjacent nodes, the array xadj is used to store the index (in adjncy) where
339  * this information starts. Since xadj[i] is the start of node i's information, xadj[i+1] marks the end.
340  *
341  *
342  */
343  MatInfo matrix_info;
344  MatGetInfo(connectivity_matrix, MAT_LOCAL, &matrix_info);
345  unsigned local_num_nz = (unsigned) matrix_info.nz_used;
346 
347  size_t size = (num_local_nodes+1)*sizeof(PetscInt);
348  void* ptr;
349  PetscMalloc(size, &ptr);
350  PetscInt* xadj = (PetscInt*) ptr;
351  size = local_num_nz*sizeof(PetscInt);
352  PetscMalloc(size, &ptr);
353  PetscInt* adjncy = (PetscInt*) ptr;
354 
355  PetscInt row_num_nz;
356  const PetscInt* column_indices;
357 
358  xadj[0]=0;
359  for (PetscInt row_global_index=connectivity_matrix_lo; row_global_index<connectivity_matrix_hi; row_global_index++)
360  {
361  MatGetRow(connectivity_matrix, row_global_index, &row_num_nz, &column_indices, PETSC_NULL);
362 
363  unsigned row_local_index = row_global_index - connectivity_matrix_lo;
364  xadj[row_local_index+1] = xadj[row_local_index] + row_num_nz;
365  for (PetscInt col_index=0; col_index<row_num_nz; col_index++)
366  {
367  adjncy[xadj[row_local_index] + col_index] = column_indices[col_index];
368  }
369 
370  MatRestoreRow(connectivity_matrix, row_global_index, &row_num_nz,&column_indices, PETSC_NULL);
371  }
372 
373  PetscTools::Destroy(connectivity_matrix);
374 
375  // Convert to an adjacency matrix
376  Mat adj_matrix;
377  MatCreateMPIAdj(PETSC_COMM_WORLD, num_local_nodes, num_nodes, xadj, adjncy, PETSC_NULL, &adj_matrix);
378 
380  if (PetscTools::AmMaster())
381  {
382  Timer::PrintAndReset("Adjacency matrix creation");
383  }
384 
385  // Get PETSc to call ParMETIS
386  MatPartitioning part;
387  MatPartitioningCreate(PETSC_COMM_WORLD, &part);
388  MatPartitioningSetAdjacency(part, adj_matrix);
389  MatPartitioningSetFromOptions(part);
390  IS new_process_numbers;
391 
392  MatPartitioningApply(part, &new_process_numbers);
393  MatPartitioningDestroy(PETSC_DESTROY_PARAM(part));
394 
396  PetscTools::Destroy(adj_matrix);
397 
399  if (PetscTools::AmMaster())
400  {
401  Timer::PrintAndReset("PETSc-ParMETIS call");
402  }
403 
404  // Figure out who owns what - processor offsets
405  PetscInt* num_nodes_per_process = new PetscInt[num_procs];
406 #if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
407  ISPartitioningCount(new_process_numbers, num_procs, num_nodes_per_process);
408 #else
409  ISPartitioningCount(new_process_numbers, num_nodes_per_process);
410 #endif
411 
412  rProcessorsOffset.resize(num_procs);
413  rProcessorsOffset[0] = 0;
414  for (PetscInt i=1; i<num_procs; i++)
415  {
416  rProcessorsOffset[i] = rProcessorsOffset[i-1] + num_nodes_per_process[i-1];
417  }
418  unsigned my_num_nodes = num_nodes_per_process[local_proc_index];
419  delete[] num_nodes_per_process;
420 
421  // Figure out who owns what - new node numbering
422  IS new_global_node_indices;
423  ISPartitioningToNumbering(new_process_numbers, &new_global_node_indices);
424 
425  // Index sets only give local information, we want global
426  AO ordering;
427  AOCreateBasicIS(new_global_node_indices, PETSC_NULL /* natural ordering */, &ordering);
428 
429  // The locally owned range under the new numbering
430  PetscInt* local_range = new PetscInt[my_num_nodes];
431  for (unsigned i=0; i<my_num_nodes; i++)
432  {
433  local_range[i] = rProcessorsOffset[local_proc_index] + i;
434  }
435  AOApplicationToPetsc(ordering, my_num_nodes, local_range);
436  //AOView(ordering, PETSC_VIEWER_STDOUT_WORLD);
437 
438  // Fill in rNodesOwned
439  rNodesOwned.insert(local_range, local_range + my_num_nodes);
440  delete[] local_range;
441 
442  // Once we know the offsets we can compute the permutation vector
443  PetscInt* global_range = new PetscInt[num_nodes];
444  for (unsigned i=0; i<num_nodes; i++)
445  {
446  global_range[i] = i;
447  }
448  AOPetscToApplication(ordering, num_nodes, global_range);
449 
450  rNodePermutation.resize(num_nodes);
451  std::copy(global_range, global_range+num_nodes, rNodePermutation.begin());
452  delete[] global_range;
453 
454  AODestroy(PETSC_DESTROY_PARAM(ordering));
455  ISDestroy(PETSC_DESTROY_PARAM(new_process_numbers));
456  ISDestroy(PETSC_DESTROY_PARAM(new_global_node_indices));
457 
459  if (PetscTools::AmMaster())
460  {
461  Timer::PrintAndReset("PETSc-ParMETIS output manipulation");
462  }
463 }
464 
465 template <unsigned ELEMENT_DIM, unsigned SPACE_DIM>
467  std::vector<unsigned>& rNodePermutation,
468  std::set<unsigned>& rNodesOwned,
469  std::vector<unsigned>& rProcessorsOffset,
470  ChasteCuboid<SPACE_DIM>* pRegion)
471 {
473 
474  unsigned num_nodes = rMeshReader.GetNumNodes();
475 
476  // Work out where each node should lie.
477  std::vector<unsigned> node_ownership(num_nodes, 0);
478  std::vector<unsigned> node_index_ownership(num_nodes, UNSIGNED_UNSET);
479 
480  for (unsigned node=0; node < num_nodes; node++)
481  {
482  std::vector<double> location = rMeshReader.GetNextNode();
483 
484  // Make sure it is the correct size
485  assert(location.size() == SPACE_DIM);
486 
487  // Establish whether it lies in the domain. ChasteCuboid::DoesContain is
488  // insufficient for this as it treats all boundaries as open.
489  ChastePoint<SPACE_DIM> lower = pRegion->rGetLowerCorner();
490  ChastePoint<SPACE_DIM> upper = pRegion->rGetUpperCorner();
491 
492  bool does_contain = true;
493 
494  for (unsigned d=0; d<SPACE_DIM; d++)
495  {
496  bool boundary_check;
497  boundary_check = ((location[d] > lower[d]) || sqrt((location[d]-lower[d])*(location[d]-lower[d])) < DBL_EPSILON);
498  boundary_check *= (location[d] < upper[d]);
499  does_contain *= boundary_check;
500  }
501 
502  if(does_contain)
503  {
504  node_ownership[node] = 1;
505  rNodesOwned.insert(node);
506  node_index_ownership[node] = PetscTools::GetMyRank();
507  }
508  }
509 
510  // Make sure each node will be owned by exactly on process
511  std::vector<unsigned> global_ownership(num_nodes, 0);
512  std::vector<unsigned> global_index_ownership(num_nodes, UNSIGNED_UNSET);
513 
514  MPI_Allreduce(&node_ownership[0], &global_ownership[0], num_nodes, MPI_UNSIGNED, MPI_LXOR, PETSC_COMM_WORLD);
515  for (unsigned i=0; i<num_nodes; i++)
516  {
517  if (global_ownership[i] == 0)
518  {
519  EXCEPTION("A node is either not in geometric region, or the regions are not disjoint.");
520  }
521  }
522 
523  // Create the permutation and offset vectors.
524  MPI_Allreduce(&node_index_ownership[0], &global_index_ownership[0], num_nodes, MPI_UNSIGNED, MPI_MIN, PETSC_COMM_WORLD);
525 
526  for (unsigned proc=0; proc<PetscTools::GetNumProcs(); proc++)
527  {
528  rProcessorsOffset.push_back(rNodePermutation.size());
529  for (unsigned node=0; node<num_nodes; node++)
530  {
531  if (global_index_ownership[node] == proc)
532  {
533  rNodePermutation.push_back(node);
534  }
535  }
536 
537  }
538  assert(rNodePermutation.size() == num_nodes);
539 }
540 
542 // Explicit instantiation
544 
545 template class NodePartitioner<1,1>;
546 template class NodePartitioner<1,2>;
547 template class NodePartitioner<1,3>;
548 template class NodePartitioner<2,2>;
549 template class NodePartitioner<2,3>;
550 template class NodePartitioner<3,3>;
551 
552 
553 
554 
555 
virtual DistributedVectorFactory * GetDistributedVectorFactory()
virtual ElementData GetNextElementData()=0
virtual ElementData GetElementData(unsigned index)
static void Barrier(const std::string callerId="")
Definition: PetscTools.cpp:134
#define PETSC_DESTROY_PARAM(x)
Definition: PetscTools.hpp:69
#define EXCEPTION(message)
Definition: Exception.hpp:143
static bool AmMaster()
Definition: PetscTools.cpp:120
static void DumbPartitioning(AbstractMesh< ELEMENT_DIM, SPACE_DIM > &rMesh, std::set< unsigned > &rNodesOwned)
static void GeometricPartitioning(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, std::vector< unsigned > &rNodePermutation, std::set< unsigned > &rNodesOwned, std::vector< unsigned > &rProcessorsOffset, ChasteCuboid< SPACE_DIM > *pRegion)
virtual unsigned GetNumNodes() const
static void PrintAndReset(std::string message)
Definition: Timer.cpp:70
#define NEVER_REACHED
Definition: Exception.hpp:206
std::vector< unsigned > NodeIndices
static void MetisLibraryPartitioning(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, std::vector< unsigned > &rNodePermutation, std::set< unsigned > &rNodesOwned, std::vector< unsigned > &rProcessorsOffset)
const ChastePoint< SPACE_DIM > & rGetUpperCorner() const
static void PetscMatrixPartitioning(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, std::vector< unsigned > &rNodePermutation, std::set< unsigned > &rNodesOwned, std::vector< unsigned > &rProcessorsOffset)
virtual unsigned GetNumElements() const =0
virtual void Reset()=0
static bool HasParMetis()
Definition: PetscTools.cpp:445
const unsigned UNSIGNED_UNSET
Definition: Exception.hpp:52
static void SetupMat(Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true)
Definition: PetscTools.cpp:268
static void Destroy(Vec &rVec)
Definition: PetscTools.hpp:351
static bool IsParallel()
Definition: PetscTools.cpp:97
virtual bool IsFileFormatBinary()
static void Finalise(Mat matrix)
static bool AmTopMost()
Definition: PetscTools.cpp:126
static void Reset()
Definition: Timer.cpp:44
static unsigned GetNumProcs()
Definition: PetscTools.cpp:108
virtual std::vector< double > GetNextNode()=0
static unsigned GetMyRank()
Definition: PetscTools.cpp:114
virtual unsigned GetNumNodes() const =0
const ChastePoint< SPACE_DIM > & rGetLowerCorner() const