Chaste  Release::3.4
AbstractTetrahedralMeshWriter.cpp
1 /*
2 
3 Copyright (c) 2005-2016, University of Oxford.
4 All rights reserved.
5 
6 University of Oxford means the Chancellor, Masters and Scholars of the
7 University of Oxford, having an administrative office at Wellington
8 Square, Oxford OX1 2JD, UK.
9 
10 This file is part of Chaste.
11 
12 Redistribution and use in source and binary forms, with or without
13 modification, are permitted provided that the following conditions are met:
14  * Redistributions of source code must retain the above copyright notice,
15  this list of conditions and the following disclaimer.
16  * Redistributions in binary form must reproduce the above copyright notice,
17  this list of conditions and the following disclaimer in the documentation
18  and/or other materials provided with the distribution.
19  * Neither the name of the University of Oxford nor the names of its
20  contributors may be used to endorse or promote products derived from this
21  software without specific prior written permission.
22 
23 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 
34 */
35 
36 // Disable PETSc logging of MPI calls (we don't use this, anyway) to fix
37 // "right-hand operand of comma has no effect" warnings when building with
38 // PETSc 2.2.1.
39 #define PETSC_HAVE_BROKEN_RECURSIVE_MACRO
40 
41 #include "AbstractTetrahedralMeshWriter.hpp"
42 #include "AbstractTetrahedralMesh.hpp"
43 #include "DistributedTetrahedralMesh.hpp"
44 #include "MixedDimensionMesh.hpp"
45 #include "Version.hpp"
46 #include "Exception.hpp"
47 
48 #include <mpi.h> // For MPI_Send, MPI_Recv
49 
50 const char* MeshEventHandler::EventName[] = { "Tri write","BinTri write","VTK write","PVTK write", "node write", "ele write", "face write", "ncl write", "comm1","comm2","Total"};
51 
55 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
57 {
64 };
65 
67 // Implementation
69 
70 
71 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
73  const std::string& rBaseName,
74  const bool clearOutputDir)
75  : AbstractMeshWriter<ELEMENT_DIM, SPACE_DIM>(rDirectory, rBaseName, clearOutputDir),
76  mpNodeMap(NULL),
77  mNodesPerElement(ELEMENT_DIM+1),
78  mNodesPerBoundaryElement(ELEMENT_DIM),
79  mpMesh(NULL),
80  mpDistributedMesh(NULL),
81  mpMixedMesh(NULL),
82  mpIters(new MeshWriterIterators<ELEMENT_DIM,SPACE_DIM>),
83  mNodeCounterForParallelMesh(0),
84  mElementCounterForParallelMesh(0),
85  mBoundaryElementCounterForParallelMesh(0),
86  mCableElementCounterForParallelMesh(0),
87  mFilesAreBinary(false)
88 {
89  mpIters->pNodeIter = NULL;
90  mpIters->pElemIter = NULL;
91  mpIters->pBoundaryElemIter = NULL;
92 }
93 
94 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
96 {
97  if (mpIters->pNodeIter)
98  {
99  delete mpIters->pNodeIter;
100  }
101  if (mpIters->pElemIter)
102  {
103  delete mpIters->pElemIter;
104  }
105  if (mpIters->pBoundaryElemIter)
106  {
107  delete mpIters->pBoundaryElemIter;
108  }
109 
110  delete mpIters;
111 
112  if (mpNodeMap)
113  {
114  delete mpNodeMap;
115  }
116 }
117 
118 
119 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
121 {
122  // if we are writing from a mesh..
123  assert(PetscTools::AmMaster());
124  if (mpMesh)
125  {
126  std::vector<double> coords(SPACE_DIM);
127 
128  //Iterate over the locally-owned nodes
129  if ( (*(mpIters->pNodeIter)) != mpMesh->GetNodeIteratorEnd())
130  {
131  // Either this is a sequential mesh (and we own it all)
132  // or it's parallel (and the master owns the first chunk)
133  for (unsigned j=0; j<SPACE_DIM; j++)
134  {
135  coords[j] = (*(mpIters->pNodeIter))->GetPoint()[j];
136  }
137 
138  mNodeCounterForParallelMesh=(*(mpIters->pNodeIter))->GetIndex() + 1;//Ready for when we run out of local nodes
139 
140  ++(*(mpIters->pNodeIter));
141  return coords;
142  }
143 
144  // If we didn't return then the iterator has reached the end of the local nodes.
145  // It must be a parallel mesh and we are expecting messages...
146 
147  assert( mpDistributedMesh != NULL );
148 
149  MPI_Status status;
150  status.MPI_ERROR = MPI_SUCCESS; //For MPICH2
151  // do receive, convert to std::vector on master
152  boost::scoped_array<double> raw_coords(new double[SPACE_DIM]);
153  MPI_Recv(raw_coords.get(), SPACE_DIM, MPI_DOUBLE, MPI_ANY_SOURCE, mNodeCounterForParallelMesh, PETSC_COMM_WORLD, &status);
154  assert(status.MPI_ERROR == MPI_SUCCESS);
155  for (unsigned j=0; j<coords.size(); j++)
156  {
157  coords[j] = raw_coords[j];
158  }
159 
160  mNodeCounterForParallelMesh++;
161  return coords;
162  }
163  else
164  {
166  }
167 }
168 
169 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
171 {
172  assert(PetscTools::AmMaster());
173  // if we are writing from a mesh..
174  if (mpMesh)
175  {
176  ElementData elem_data;
177  elem_data.NodeIndices.resize(mNodesPerElement);
178 
179  if ( mpDistributedMesh == NULL ) // not using parallel mesh
180  {
181  // Use the iterator
182  assert(this->mNumElements==mpMesh->GetNumElements());
183 
184  for (unsigned j=0; j<elem_data.NodeIndices.size(); j++)
185  {
186  unsigned old_index = (*(mpIters->pElemIter))->GetNodeGlobalIndex(j);
187  elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
188  }
189  // Set attribute
190  elem_data.AttributeValue = (*(mpIters->pElemIter))->GetAttribute();
191 
192  ++(*(mpIters->pElemIter));
193 
194  return elem_data;
195  }
196  else //Parallel mesh
197  {
198  //Use the mElementCounterForParallelMesh variable to identify next element
199  if ( mpDistributedMesh->CalculateDesignatedOwnershipOfElement( mElementCounterForParallelMesh ) == true )
200  {
201  //Master owns this element
202  Element<ELEMENT_DIM, SPACE_DIM>* p_element = mpDistributedMesh->GetElement(mElementCounterForParallelMesh);
203  assert(elem_data.NodeIndices.size() == mNodesPerElement);
204  assert( ! p_element->IsDeleted() );
205  //Master can use the local data to recover node indices & attribute
206  for (unsigned j=0; j<mNodesPerElement; j++)
207  {
208  elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
209  }
210  elem_data.AttributeValue = p_element->GetAttribute();
211  }
212  else
213  {
214  //Master doesn't own this element.
215  UnpackElement(elem_data, mElementCounterForParallelMesh, mNodesPerElement, this->mNumNodes + mElementCounterForParallelMesh);
216  }
217  // increment element counter
218  mElementCounterForParallelMesh++;
219 
220  return elem_data; // non-master processors will return garbage here - but they should never write to file
221  }
222  }
223  else // not writing from a mesh
224  {
226  }
227 }
228 
229 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
231 {
232  assert(PetscTools::AmMaster());
233  // if we are writing from a mesh..
234  if (mpMesh)
235  {
236  ElementData boundary_elem_data;
237  boundary_elem_data.NodeIndices.resize(mNodesPerBoundaryElement);
238 
239  if ( mpDistributedMesh == NULL ) // not using parallel mesh
240  {
241  // Use the iterator
242  assert(this->mNumBoundaryElements==mpMesh->GetNumBoundaryElements());
243 
244  for (unsigned j=0; j<boundary_elem_data.NodeIndices.size(); j++)
245  {
246  unsigned old_index = (*(*(mpIters->pBoundaryElemIter)))->GetNodeGlobalIndex(j);
247  boundary_elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
248  }
249  boundary_elem_data.AttributeValue = (*(*(mpIters->pBoundaryElemIter)))->GetAttribute();
250 
251  ++(*(mpIters->pBoundaryElemIter));
252  return boundary_elem_data;
253  }
254  else //Parallel mesh
255  {
256  //Use the mElementCounterForParallelMesh variable to identify next element
257  if ( mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement( mBoundaryElementCounterForParallelMesh ) == true )
258  {
259  //Master owns this boundary element
260  BoundaryElement<ELEMENT_DIM-1, SPACE_DIM>* p_boundary_element = mpDistributedMesh->GetBoundaryElement(mBoundaryElementCounterForParallelMesh);
261  assert(boundary_elem_data.NodeIndices.size() == ELEMENT_DIM);
262  assert( ! p_boundary_element->IsDeleted() );
263  //Master can use the local data to recover node indices & attribute
264  for (unsigned j=0; j<ELEMENT_DIM; j++)
265  {
266  boundary_elem_data.NodeIndices[j] = p_boundary_element->GetNodeGlobalIndex(j);
267  }
268  boundary_elem_data.AttributeValue = p_boundary_element->GetAttribute();
269  }
270  else
271  {
272  //Master doesn't own this boundary element.
273  UnpackElement(boundary_elem_data, mBoundaryElementCounterForParallelMesh, ELEMENT_DIM, this->mNumNodes + this->mNumElements + mBoundaryElementCounterForParallelMesh);
274  }
275  // increment element counter
276  mBoundaryElementCounterForParallelMesh++;
277 
278  return boundary_elem_data; // non-master processors will return garbage here - but they should never write to file
279  }
280  }
281  else // not writing from a mesh
282  {
284  }
285 }
286 
287 
288 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
290 {
291  assert(PetscTools::AmMaster());
292 
293  // if we are writing from a mesh..
294  if (mpMesh)
295  {
296  // Need to be using a MixedDimensionMesh or there will be no cable data
297  assert(mpMixedMesh);
298 
299  ElementData elem_data;
300  elem_data.NodeIndices.resize(2);
301 
302  //Use the mCableElementCounterForParallelMesh variable to identify next element
303  if ( mpMixedMesh->CalculateDesignatedOwnershipOfCableElement( mCableElementCounterForParallelMesh ) == true )
304  {
305  //Master owns this element
306  Element<1, SPACE_DIM>* p_element = mpMixedMesh->GetCableElement(mCableElementCounterForParallelMesh);
307  assert( ! p_element->IsDeleted() );
308  //Master can use the local data to recover node indices & attribute
309  for (unsigned j=0; j<2; j++)
310  {
311  elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
312  }
313  elem_data.AttributeValue = p_element->GetAttribute();
314  }
315  else
316  {
317  //Master doesn't own this element.
318  UnpackElement(elem_data, mCableElementCounterForParallelMesh, 2, this->mNumNodes + this->mNumElements + this->mNumBoundaryElements + mCableElementCounterForParallelMesh);
319  }
320  // increment element counter
321  mCableElementCounterForParallelMesh++;
322 
323  return elem_data; // non-master processors will return garbage here - but they should never write to file
324  }
325  else // not writing from a mesh
326  {
328  }
329 }
330 
331 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
334  bool invertMeshPermutation)
335 {
336  MeshEventHandler::BeginEvent(MeshEventHandler::NCL);
337  unsigned max_elements_all;
339  {
340  max_elements_all = rMesh.CalculateMaximumContainingElementsPerProcess();
341  }
342  else
343  {
344  unsigned max_elements_per_process = rMesh.CalculateMaximumContainingElementsPerProcess();
345  MPI_Allreduce(&max_elements_per_process, &max_elements_all, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
346  }
347 
348  std::string node_connect_list_file_name = this->mBaseName + ".ncl";
349  if (invertMeshPermutation && !rMesh.rGetNodePermutation().empty())
350  {
351  node_connect_list_file_name += "-tmp";
352  }
353 
355  {
356  out_stream p_ncl_file = out_stream(NULL);
357 
358  if (PetscTools::AmMaster())
359  {
360  //Open the file for the first time
361  p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::binary | std::ios::trunc);
362 
363  // Write the ncl header
364  *p_ncl_file << rMesh.GetNumNodes() << "\t";
365  *p_ncl_file << max_elements_all << "\t";
366  *p_ncl_file << "\tBIN\n";
367  }
368  else
369  {
370  // Append to the existing file
371  p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::binary | std::ios::app);
372  }
373 
374  // Write each node's data
375  unsigned default_marker = UINT_MAX;
376 
377  typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
378  for (NodeIterType iter = rMesh.GetNodeIteratorBegin();
379  iter != rMesh.GetNodeIteratorEnd();
380  ++iter)
381  {
382  // Get the containing element indices from the node's set and sort them
383  std::set<unsigned>& r_elem_set = iter->rGetContainingElementIndices();
384  std::vector<unsigned> elem_vector(r_elem_set.begin(), r_elem_set.end());
385  std::sort(elem_vector.begin(), elem_vector.end());
386  // Pad the vector with unsigned markers
387  for (unsigned elem_index=elem_vector.size(); elem_index<max_elements_all; elem_index++)
388  {
389  elem_vector.push_back(default_marker);
390  }
391  assert(elem_vector.size() == max_elements_all);
392  // Write raw data out of std::vector into the file
393  if (max_elements_all > 0u)
394  {
395  p_ncl_file->write((char*)&elem_vector[0], elem_vector.size()*sizeof(unsigned));
396  }
397  }
398 
399  if (PetscTools::AmTopMost())
400  {
401  *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
402  }
403 
404  p_ncl_file->close();
405  }
407 
408  if (invertMeshPermutation && PetscTools::AmMaster() && !rMesh.rGetNodePermutation().empty() && max_elements_all > 0u)
409  {
410  // Open files
411  const std::string real_node_connect_list_file_name = this->mBaseName + ".ncl";
412  out_stream p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(real_node_connect_list_file_name, std::ios::binary | std::ios::trunc);
413  FileFinder temp_ncl_path = this->mpOutputFileHandler->FindFile(node_connect_list_file_name);
414  std::ifstream temp_ncl_file(temp_ncl_path.GetAbsolutePath().c_str(), std::ios::binary);
415  // Copy the header
416  std::string header_line;
417  getline(temp_ncl_file, header_line, '\n');
418  (*p_ncl_file) << header_line << "\n";
419  const std::streampos data_start = temp_ncl_file.tellg();
420  const std::streamoff item_width = max_elements_all * sizeof(unsigned);
421  // Copy the binary data, permuted
422  std::vector<unsigned> elem_vector(max_elements_all);
423  for (unsigned node_index=0; node_index<rMesh.GetNumAllNodes(); node_index++)
424  {
425  unsigned permuted_index = rMesh.rGetNodePermutation()[node_index];
426  temp_ncl_file.seekg(data_start + item_width * permuted_index, std::ios_base::beg);
427  temp_ncl_file.read((char*)&elem_vector[0], max_elements_all*sizeof(unsigned));
428  p_ncl_file->write((char*)&elem_vector[0], max_elements_all*sizeof(unsigned));
429  }
430  // Footer
431  *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
432  p_ncl_file->close();
433  // Remove temp file
434  remove(temp_ncl_path.GetAbsolutePath().c_str());
435  }
436  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteNclFile");
437  MeshEventHandler::EndEvent(MeshEventHandler::NCL);
438 }
439 
441 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
444  bool keepOriginalElementIndexing)
445 {
446  this->mpMeshReader = NULL;
447  mpMesh = &rMesh;
448 
449  this->mNumNodes = mpMesh->GetNumNodes();
450  this->mNumElements = mpMesh->GetNumElements();
451  this->mNumBoundaryElements = mpMesh->GetNumBoundaryElements();
452  this->mNumCableElements = mpMesh->GetNumCableElements();
453 
454  typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
455  mpIters->pNodeIter = new NodeIterType(mpMesh->GetNodeIteratorBegin());
456 
458  mpIters->pElemIter = new ElemIterType(mpMesh->GetElementIteratorBegin());
459 
461  mpIters->pBoundaryElemIter = new BoundaryElemIterType(mpMesh->GetBoundaryElementIteratorBegin());
462 
463  //Use this process's first element to gauge the size of all the elements
464  if ( (*(mpIters->pElemIter)) != mpMesh->GetElementIteratorEnd())
465  {
466  mNodesPerElement = (*(mpIters->pElemIter))->GetNumNodes();
467  }
468 
469  //Use this process's first boundary element to gauge the size of all the boundary elements
470  if ( (*(mpIters->pBoundaryElemIter)) != mpMesh->GetBoundaryElementIteratorEnd())
471  {
472  mNodesPerBoundaryElement = (*(*(mpIters->pBoundaryElemIter)))->GetNumNodes();
473  }
474 
475  //Connectivity file is written when we write to a binary file (only available for TrianglesMeshWriter) and if we are preserving the element order
476  if (this->mFilesAreBinary && keepOriginalElementIndexing)
477  {
478  WriteNclFile(*mpMesh);
479  }
480 
481  // Have we got a parallel mesh?
483  mpDistributedMesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>* >(&rMesh);
484 
485  // Have we got a MixedDimensionMesh?
487  mpMixedMesh = dynamic_cast<MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>* >(this->mpMesh);
488 
489  if (mpDistributedMesh != NULL)
490  {
491  // It's a parallel mesh
492  WriteFilesUsingParallelMesh(keepOriginalElementIndexing);
493  return;
494  }
495 
496  if (!PetscTools::AmMaster())
497  {
498  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with Master process writing files
499  return;
500  }
501 
502  // Set up node map if we might have deleted nodes
503  unsigned node_map_index = 0;
504  if (mpMesh->IsMeshChanging())
505  {
506  mpNodeMap = new NodeMap(mpMesh->GetMaximumNodeIndex());
507  for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
508  {
509  mpNodeMap->SetNewIndex(it->GetIndex(), node_map_index++);
510  }
511  }
512 
513  this->WriteFiles();
514  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); // Paired with waiting Slave processes
515  delete mpIters->pNodeIter;
516  mpIters->pNodeIter = NULL;
517  delete mpIters->pElemIter;
518  mpIters->pElemIter = NULL;
519  delete mpIters->pBoundaryElemIter;
520  mpIters->pBoundaryElemIter = NULL;
521 }
522 
523 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
527 {
528  WriteNclFile(rMesh, true);
529  this->WriteFilesUsingMeshReader(rMeshReader);
530 }
531 
532 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
534 {
535  if (keepOriginalElementIndexing)
536  {
537  // Master goes on to write as usual
538  if (PetscTools::AmMaster())
539  {
540  this->WriteFiles();
541  }
542  else
543  {
544 // PetscTools::Barrier("DodgyBarrierBeforeNODE");
545  MeshEventHandler::BeginEvent(MeshEventHandler::NODE);
546  boost::scoped_array<double> raw_coords(new double[SPACE_DIM]);
547  // Slaves concentrate the Nodes
548  typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
549  for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
550  {
551  for (unsigned j=0; j<SPACE_DIM; j++)
552  {
553  raw_coords[j] = it->GetPoint()[j];
554  }
555  MPI_Ssend(raw_coords.get(), SPACE_DIM, MPI_DOUBLE, 0, it->GetIndex(), PETSC_COMM_WORLD);//Nodes sent with positive tags
556  }
557 // PetscTools::Barrier("DodgyBarrierAfterNODE");
558  MeshEventHandler::EndEvent(MeshEventHandler::NODE);
559 
560  MeshEventHandler::BeginEvent(MeshEventHandler::ELE);
561  // Slaves concentrate the Elements for which they are owners
562  boost::scoped_array<unsigned> raw_indices(new unsigned[mNodesPerElement]); // Assuming that we don't have parallel quadratic elements
564  for (ElementIterType it = mpMesh->GetElementIteratorBegin(); it != mpMesh->GetElementIteratorEnd(); ++it)
565  {
566  unsigned index = it->GetIndex();
567  if ( mpDistributedMesh->CalculateDesignatedOwnershipOfElement( index ) == true )
568  {
569  for (unsigned j=0; j<mNodesPerElement; j++)
570  {
571  raw_indices[j] = it->GetNodeGlobalIndex(j);
572  }
573  PostElement(index, raw_indices.get(), mNodesPerElement, this->mNumNodes + index, it->GetAttribute());
574  }
575  }
576 // PetscTools::Barrier("DodgyBarrierAfterELE");
577  MeshEventHandler::EndEvent(MeshEventHandler::ELE);
578  MeshEventHandler::BeginEvent(MeshEventHandler::FACE);
579  // Slaves concentrate the Faces for which they are owners (not in 1-d)
580  if (ELEMENT_DIM != 1)
581  {
582  boost::scoped_array<unsigned> raw_face_indices(new unsigned[ELEMENT_DIM]); // Assuming that we don't have parallel quadratic meshes
583  typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator BoundaryElementIterType;
584  for (BoundaryElementIterType it = mpMesh->GetBoundaryElementIteratorBegin(); it != mpMesh->GetBoundaryElementIteratorEnd(); ++it)
585  {
586  unsigned index = (*it)->GetIndex();
587  if ( mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement( index ) == true )
588  {
589  for (unsigned j=0; j<ELEMENT_DIM; j++)
590  {
591  raw_face_indices[j] = (*it)->GetNodeGlobalIndex(j);
592  }
593  PostElement(index, raw_face_indices.get(), ELEMENT_DIM, this->mNumNodes + this->mNumElements + index, (*it)->GetAttribute());
594  }
595  }
596  }
597 // PetscTools::Barrier("DodgyBarrierAfterFACE");
598  MeshEventHandler::EndEvent(MeshEventHandler::FACE);
599 
600  // Slaves concentrate the cable elements for which they are owners
601  if (mpMixedMesh)
602  {
603  typedef typename MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>::CableElementIterator CableElementIterType;
604  for (CableElementIterType it = mpMixedMesh->GetCableElementIteratorBegin(); it != mpMixedMesh->GetCableElementIteratorEnd(); ++it)
605  {
606  unsigned index =(*it)->GetIndex();
607  if ( mpMixedMesh->CalculateDesignatedOwnershipOfCableElement( index ) == true )
608  {
609  unsigned raw_cable_element_indices[2];
610  for (unsigned j=0; j<2; j++)
611  {
612  raw_cable_element_indices[j] = (*it)->GetNodeGlobalIndex(j);
613  }
614  PostElement(index, raw_cable_element_indices, 2, this->mNumNodes + this->mNumElements + this->mNumBoundaryElements + index, (*it)->GetAttribute());
615  }
616  }
617  }
618  }
619  PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingParallelMesh");
620  }
621  else
622  {
624 
625  if (PetscTools::AmMaster())
626  {
627  // Make sure headers are written first
628  assert(PetscTools::GetMyRank() == 0);
629  CreateFilesWithHeaders();
630  }
631 
632  AppendLocalDataToFiles();
633 
634  if (PetscTools::AmTopMost())
635  {
636  // Make sure footers are written last
638  WriteFilesFooter();
639  }
640 
642  }
643 }
644 
645 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
647 {
648  // If control reaches this line you haven't implemented the optimised
649  // parallel write for whichever visualiser you are writing for.
651 }
652 
653 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
655 {
656  // If control reaches this line you haven't implemented the optimised
657  // parallel write for whichever visualiser you are writing for.
659 }
660 
661 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
663 {
664  // If control reaches this line you haven't implemented the optimised
665  // parallel write for whichever visualiser you are writing for.
667 }
668 
670 // Explicit instantiation
672 
virtual ElementData GetNextCableElement()
static void Barrier(const std::string callerId="")
Definition: PetscTools.cpp:134
unsigned GetNodeGlobalIndex(unsigned localIndex) const
const std::vector< unsigned > & rGetNodePermutation() const
std::string GetAbsolutePath() const
Definition: FileFinder.cpp:221
AbstractTetrahedralMeshWriter(const std::string &rDirectory, const std::string &rBaseName, const bool clearOutputDir=true)
static bool AmMaster()
Definition: PetscTools.cpp:120
NodeIterator GetNodeIteratorEnd()
unsigned CalculateMaximumContainingElementsPerProcess() const
virtual unsigned GetNumNodes() const
AbstractMesh< ELEMENT_DIM, SPACE_DIM >::NodeIterator * pNodeIter
AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::BoundaryElementIterator * pBoundaryElemIter
virtual unsigned GetNumAllNodes() const
virtual std::vector< double > GetNextNode()
#define NEVER_REACHED
Definition: Exception.hpp:206
AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ElementIterator * pElemIter
MeshWriterIterators< ELEMENT_DIM, SPACE_DIM > * mpIters
std::vector< unsigned > NodeIndices
bool IsDeleted() const
void SetNewIndex(unsigned oldIndex, unsigned newIndex)
Definition: NodeMap.cpp:66
void WriteNclFile(AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh, bool invertMeshPermutation=false)
static bool IsSequential()
Definition: PetscTools.cpp:91
static const char * EventName[11]
static void EndRoundRobin()
Definition: PetscTools.cpp:160
virtual void WriteFilesUsingParallelMesh(bool keepOriginalElementIndexing=true)
static void BeginRoundRobin()
Definition: PetscTools.cpp:150
NodeIterator GetNodeIteratorBegin(bool skipDeletedNodes=true)
virtual ElementData GetNextBoundaryElement()
static bool AmTopMost()
Definition: PetscTools.cpp:126
static std::string GetProvenanceString()
static unsigned GetNumProcs()
Definition: PetscTools.cpp:108
std::vector< BoundaryElement< ELEMENT_DIM-1, SPACE_DIM > * >::const_iterator BoundaryElementIterator
static unsigned GetMyRank()
Definition: PetscTools.cpp:114
virtual ElementData GetNextElement()
virtual void WriteFilesUsingMesh(AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh, bool keepOriginalElementIndexing=true)
void WriteFilesUsingMeshReaderAndMesh(AbstractMeshReader< ELEMENT_DIM, SPACE_DIM > &rMeshReader, AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh)
std::vector< Element< 1, SPACE_DIM > * >::const_iterator CableElementIterator