AbstractTetrahedralMeshWriter.cpp

00001 /*
00002 
00003 Copyright (c) 2005-2015, University of Oxford.
00004 All rights reserved.
00005 
00006 University of Oxford means the Chancellor, Masters and Scholars of the
00007 University of Oxford, having an administrative office at Wellington
00008 Square, Oxford OX1 2JD, UK.
00009 
00010 This file is part of Chaste.
00011 
00012 Redistribution and use in source and binary forms, with or without
00013 modification, are permitted provided that the following conditions are met:
00014  * Redistributions of source code must retain the above copyright notice,
00015    this list of conditions and the following disclaimer.
00016  * Redistributions in binary form must reproduce the above copyright notice,
00017    this list of conditions and the following disclaimer in the documentation
00018    and/or other materials provided with the distribution.
00019  * Neither the name of the University of Oxford nor the names of its
00020    contributors may be used to endorse or promote products derived from this
00021    software without specific prior written permission.
00022 
00023 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00024 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00025 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00026 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
00027 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00028 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
00029 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
00030 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00031 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
00032 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 
00034 */
00035 
00036 // Disable PETSc logging of MPI calls (we don't use this, anyway) to fix
00037 // "right-hand operand of comma has no effect" warnings when building with
00038 // PETSc 2.2.1.
00039 #define PETSC_HAVE_BROKEN_RECURSIVE_MACRO
00040 
00041 #include "AbstractTetrahedralMeshWriter.hpp"
00042 #include "AbstractTetrahedralMesh.hpp"
00043 #include "DistributedTetrahedralMesh.hpp"
00044 #include "MixedDimensionMesh.hpp"
00045 #include "Version.hpp"
00046 #include "Exception.hpp"
00047 
00048 #include <mpi.h> // For MPI_Send, MPI_Recv
00049 
00050 const char* MeshEventHandler::EventName[] = { "Tri write","BinTri write","VTK write","PVTK write", "node write", "ele write", "face write", "ncl write", "comm1","comm2","Total"};
00051 
00055 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00056 struct MeshWriterIterators
00057 {
00059     typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator* pNodeIter;
00061     typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator* pElemIter;
00063     typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator* pBoundaryElemIter;
00064 };
00065 
00067 // Implementation
00069 
00070 
00071 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00072 AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::AbstractTetrahedralMeshWriter(const std::string &rDirectory,
00073                    const std::string &rBaseName,
00074                    const bool clearOutputDir)
00075     : AbstractMeshWriter<ELEMENT_DIM, SPACE_DIM>(rDirectory, rBaseName, clearOutputDir),
00076       mpNodeMap(NULL),
00077       mNodesPerElement(ELEMENT_DIM+1),
00078       mNodesPerBoundaryElement(ELEMENT_DIM),
00079       mpMesh(NULL),
00080       mpDistributedMesh(NULL),
00081       mpMixedMesh(NULL),
00082       mpIters(new MeshWriterIterators<ELEMENT_DIM,SPACE_DIM>),
00083       mNodeCounterForParallelMesh(0),
00084       mElementCounterForParallelMesh(0),
00085       mBoundaryElementCounterForParallelMesh(0),
00086       mCableElementCounterForParallelMesh(0),
00087       mFilesAreBinary(false)
00088 {
00089     mpIters->pNodeIter = NULL;
00090     mpIters->pElemIter = NULL;
00091     mpIters->pBoundaryElemIter = NULL;
00092 }
00093 
00094 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00095 AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::~AbstractTetrahedralMeshWriter()
00096 {
00097     if (mpIters->pNodeIter)
00098     {
00099         delete mpIters->pNodeIter;
00100     }
00101     if (mpIters->pElemIter)
00102     {
00103         delete mpIters->pElemIter;
00104     }
00105     if (mpIters->pBoundaryElemIter)
00106     {
00107         delete mpIters->pBoundaryElemIter;
00108     }
00109 
00110     delete mpIters;
00111 
00112     if (mpNodeMap)
00113     {
00114         delete mpNodeMap;
00115     }
00116 }
00117 
00118 
00119 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00120 std::vector<double> AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextNode()
00121 {
00122     // if we are writing from a mesh..
00123     assert(PetscTools::AmMaster());
00124     if (mpMesh)
00125     {
00126         std::vector<double> coords(SPACE_DIM);
00127 
00128         //Iterate over the locally-owned nodes
00129         if ( (*(mpIters->pNodeIter)) != mpMesh->GetNodeIteratorEnd())
00130         {
00131             // Either this is a sequential mesh (and we own it all)
00132             // or it's parallel (and the master owns the first chunk)
00133             for (unsigned j=0; j<SPACE_DIM; j++)
00134             {
00135                 coords[j] = (*(mpIters->pNodeIter))->GetPoint()[j];
00136             }
00137 
00138             mNodeCounterForParallelMesh=(*(mpIters->pNodeIter))->GetIndex() + 1;//Ready for when we run out of local nodes
00139 
00140             ++(*(mpIters->pNodeIter));
00141             return coords;
00142         }
00143 
00144         // If we didn't return then the iterator has reached the end of the local nodes.
00145         // It must be a parallel mesh and we are expecting messages...
00146 
00147         assert( mpDistributedMesh != NULL );
00148 
00149         MPI_Status status;
00150         status.MPI_ERROR = MPI_SUCCESS; //For MPICH2
00151         // do receive, convert to std::vector on master
00152         boost::scoped_array<double> raw_coords(new double[SPACE_DIM]);
00153         MPI_Recv(raw_coords.get(), SPACE_DIM, MPI_DOUBLE, MPI_ANY_SOURCE, mNodeCounterForParallelMesh, PETSC_COMM_WORLD, &status);
00154         assert(status.MPI_ERROR == MPI_SUCCESS);
00155         for (unsigned j=0; j<coords.size(); j++)
00156         {
00157             coords[j] = raw_coords[j];
00158         }
00159 
00160         mNodeCounterForParallelMesh++;
00161         return coords;
00162     }
00163     else
00164     {
00165         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextNode();
00166     }
00167 }
00168 
00169 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00170 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextElement()
00171 {
00172     assert(PetscTools::AmMaster());
00173     // if we are writing from a mesh..
00174     if (mpMesh)
00175     {
00176         ElementData elem_data;
00177         elem_data.NodeIndices.resize(mNodesPerElement);
00178 
00179         if ( mpDistributedMesh == NULL ) // not using parallel mesh
00180         {
00181             // Use the iterator
00182             assert(this->mNumElements==mpMesh->GetNumElements());
00183 
00184             for (unsigned j=0; j<elem_data.NodeIndices.size(); j++)
00185             {
00186                 unsigned old_index = (*(mpIters->pElemIter))->GetNodeGlobalIndex(j);
00187                 elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
00188             }
00189             // Set attribute
00190             elem_data.AttributeValue = (*(mpIters->pElemIter))->GetAttribute();
00191 
00192             ++(*(mpIters->pElemIter));
00193 
00194             return elem_data;
00195         }
00196         else //Parallel mesh
00197         {
00198             //Use the mElementCounterForParallelMesh variable to identify next element
00199             if ( mpDistributedMesh->CalculateDesignatedOwnershipOfElement( mElementCounterForParallelMesh ) == true )
00200             {
00201                 //Master owns this element
00202                 Element<ELEMENT_DIM, SPACE_DIM>* p_element = mpDistributedMesh->GetElement(mElementCounterForParallelMesh);
00203                 assert(elem_data.NodeIndices.size() == mNodesPerElement);
00204                 assert( ! p_element->IsDeleted() );
00205                 //Master can use the local data to recover node indices & attribute
00206                 for (unsigned j=0; j<mNodesPerElement; j++)
00207                 {
00208                     elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
00209                 }
00210                 elem_data.AttributeValue = p_element->GetAttribute();
00211             }
00212             else
00213             {
00214                 //Master doesn't own this element.
00215                 UnpackElement(elem_data, mElementCounterForParallelMesh, mNodesPerElement, this->mNumNodes +  mElementCounterForParallelMesh);
00216             }
00217             // increment element counter
00218             mElementCounterForParallelMesh++;
00219 
00220             return elem_data; // non-master processors will return garbage here - but they should never write to file
00221         }
00222     }
00223     else // not writing from a mesh
00224     {
00225         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextElement();
00226     }
00227 }
00228 
00229 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00230 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextBoundaryElement()
00231 {
00232     assert(PetscTools::AmMaster());
00233     // if we are writing from a mesh..
00234     if (mpMesh)
00235     {
00236         ElementData boundary_elem_data;
00237         boundary_elem_data.NodeIndices.resize(mNodesPerBoundaryElement);
00238 
00239         if ( mpDistributedMesh == NULL ) // not using parallel mesh
00240         {
00241             // Use the iterator
00242             assert(this->mNumBoundaryElements==mpMesh->GetNumBoundaryElements());
00243 
00244             for (unsigned j=0; j<boundary_elem_data.NodeIndices.size(); j++)
00245             {
00246                 unsigned old_index = (*(*(mpIters->pBoundaryElemIter)))->GetNodeGlobalIndex(j);
00247                 boundary_elem_data.NodeIndices[j] = mpMesh->IsMeshChanging() ? mpNodeMap->GetNewIndex(old_index) : old_index;
00248             }
00249             boundary_elem_data.AttributeValue = (*(*(mpIters->pBoundaryElemIter)))->GetAttribute();
00250 
00251             ++(*(mpIters->pBoundaryElemIter));
00252             return boundary_elem_data;
00253         }
00254         else //Parallel mesh
00255         {
00256             //Use the mElementCounterForParallelMesh variable to identify next element
00257             if ( mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement( mBoundaryElementCounterForParallelMesh ) == true )
00258             {
00259                 //Master owns this boundary element
00260                 BoundaryElement<ELEMENT_DIM-1, SPACE_DIM>* p_boundary_element = mpDistributedMesh->GetBoundaryElement(mBoundaryElementCounterForParallelMesh);
00261                 assert(boundary_elem_data.NodeIndices.size() == ELEMENT_DIM);
00262                 assert( ! p_boundary_element->IsDeleted() );
00263                 //Master can use the local data to recover node indices & attribute
00264                 for (unsigned j=0; j<ELEMENT_DIM; j++)
00265                 {
00266                     boundary_elem_data.NodeIndices[j] = p_boundary_element->GetNodeGlobalIndex(j);
00267                 }
00268                 boundary_elem_data.AttributeValue = p_boundary_element->GetAttribute();
00269             }
00270             else
00271             {
00272                 //Master doesn't own this boundary element.
00273                 UnpackElement(boundary_elem_data, mBoundaryElementCounterForParallelMesh, ELEMENT_DIM, this->mNumNodes + this->mNumElements + mBoundaryElementCounterForParallelMesh);
00274             }
00275             // increment element counter
00276             mBoundaryElementCounterForParallelMesh++;
00277 
00278             return boundary_elem_data; // non-master processors will return garbage here - but they should never write to file
00279         }
00280     }
00281     else // not writing from a mesh
00282     {
00283         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextBoundaryElement();
00284     }
00285 }
00286 
00287 
00288 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00289 ElementData AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::GetNextCableElement()
00290 {
00291     assert(PetscTools::AmMaster());
00292 
00293     // if we are writing from a mesh..
00294     if (mpMesh)
00295     {
00296         // Need to be using a MixedDimensionMesh or there will be no cable data
00297         assert(mpMixedMesh);
00298 
00299         ElementData elem_data;
00300         elem_data.NodeIndices.resize(2);
00301 
00302         //Use the mCableElementCounterForParallelMesh variable to identify next element
00303         if ( mpMixedMesh->CalculateDesignatedOwnershipOfCableElement( mCableElementCounterForParallelMesh ) == true )
00304         {
00305             //Master owns this element
00306             Element<1, SPACE_DIM>* p_element = mpMixedMesh->GetCableElement(mCableElementCounterForParallelMesh);
00307             assert( ! p_element->IsDeleted() );
00308             //Master can use the local data to recover node indices & attribute
00309             for (unsigned j=0; j<2; j++)
00310             {
00311                 elem_data.NodeIndices[j] = p_element->GetNodeGlobalIndex(j);
00312             }
00313             elem_data.AttributeValue = p_element->GetAttribute();
00314         }
00315         else
00316         {
00317             //Master doesn't own this element.
00318             UnpackElement(elem_data, mCableElementCounterForParallelMesh, 2, this->mNumNodes + this->mNumElements + this->mNumBoundaryElements + mCableElementCounterForParallelMesh);
00319         }
00320         // increment element counter
00321         mCableElementCounterForParallelMesh++;
00322 
00323         return elem_data; // non-master processors will return garbage here - but they should never write to file
00324     }
00325     else // not writing from a mesh
00326     {
00327         return AbstractMeshWriter<ELEMENT_DIM,SPACE_DIM>::GetNextCableElement();
00328     }
00329 }
00330 
00331 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00332 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteNclFile(
00333         AbstractTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>& rMesh,
00334         bool invertMeshPermutation)
00335 {
00336     MeshEventHandler::BeginEvent(MeshEventHandler::NCL);
00337     unsigned max_elements_all;
00338     if (PetscTools::IsSequential())
00339     {
00340         max_elements_all = rMesh.CalculateMaximumContainingElementsPerProcess();
00341     }
00342     else
00343     {
00344         unsigned max_elements_per_process = rMesh.CalculateMaximumContainingElementsPerProcess();
00345         MPI_Allreduce(&max_elements_per_process, &max_elements_all, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
00346     }
00347 
00348     std::string node_connect_list_file_name = this->mBaseName + ".ncl";
00349     if (invertMeshPermutation && !rMesh.rGetNodePermutation().empty())
00350     {
00351         node_connect_list_file_name += "-tmp";
00352     }
00353 
00354     PetscTools::BeginRoundRobin();
00355     {
00356         out_stream p_ncl_file = out_stream(NULL);
00357 
00358         if (PetscTools::AmMaster())
00359         {
00360             //Open the file for the first time
00361             p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::binary | std::ios::trunc);
00362 
00363             // Write the ncl header
00364             *p_ncl_file << rMesh.GetNumNodes() << "\t";
00365             *p_ncl_file << max_elements_all << "\t";
00366             *p_ncl_file << "\tBIN\n";
00367         }
00368         else
00369         {
00370             // Append to the existing file
00371             p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(node_connect_list_file_name, std::ios::binary | std::ios::app);
00372         }
00373 
00374         // Write each node's data
00375         unsigned default_marker = UINT_MAX;
00376 
00377         typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00378         for (NodeIterType iter = rMesh.GetNodeIteratorBegin();
00379              iter != rMesh.GetNodeIteratorEnd();
00380              ++iter)
00381         {
00382             // Get the containing element indices from the node's set and sort them
00383             std::set<unsigned>& r_elem_set = iter->rGetContainingElementIndices();
00384             std::vector<unsigned> elem_vector(r_elem_set.begin(), r_elem_set.end());
00385             std::sort(elem_vector.begin(), elem_vector.end());
00386             // Pad the vector with unsigned markers
00387             for (unsigned elem_index=elem_vector.size();  elem_index<max_elements_all; elem_index++)
00388             {
00389                 elem_vector.push_back(default_marker);
00390             }
00391             assert(elem_vector.size() == max_elements_all);
00392             // Write raw data out of std::vector into the file
00393             if (max_elements_all > 0u)
00394             {
00395                 p_ncl_file->write((char*)&elem_vector[0], elem_vector.size()*sizeof(unsigned));
00396             }
00397         }
00398 
00399         if (PetscTools::AmTopMost())
00400         {
00401             *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
00402         }
00403 
00404         p_ncl_file->close();
00405     }
00406     PetscTools::EndRoundRobin();
00407 
00408     if (invertMeshPermutation && PetscTools::AmMaster() && !rMesh.rGetNodePermutation().empty() && max_elements_all > 0u)
00409     {
00410         // Open files
00411         const std::string real_node_connect_list_file_name = this->mBaseName + ".ncl";
00412         out_stream p_ncl_file = this->mpOutputFileHandler->OpenOutputFile(real_node_connect_list_file_name, std::ios::binary | std::ios::trunc);
00413         FileFinder temp_ncl_path = this->mpOutputFileHandler->FindFile(node_connect_list_file_name);
00414         std::ifstream temp_ncl_file(temp_ncl_path.GetAbsolutePath().c_str(), std::ios::binary);
00415         // Copy the header
00416         std::string header_line;
00417         getline(temp_ncl_file, header_line, '\n');
00418         (*p_ncl_file) << header_line << "\n";
00419         const std::streampos data_start = temp_ncl_file.tellg();
00420         const std::streamoff item_width = max_elements_all * sizeof(unsigned);
00421         // Copy the binary data, permuted
00422         std::vector<unsigned> elem_vector(max_elements_all);
00423         for (unsigned node_index=0; node_index<rMesh.GetNumAllNodes(); node_index++)
00424         {
00425             unsigned permuted_index = rMesh.rGetNodePermutation()[node_index];
00426             temp_ncl_file.seekg(data_start + item_width * permuted_index, std::ios_base::beg);
00427             temp_ncl_file.read((char*)&elem_vector[0], max_elements_all*sizeof(unsigned));
00428             p_ncl_file->write((char*)&elem_vector[0], max_elements_all*sizeof(unsigned));
00429         }
00430         // Footer
00431         *p_ncl_file << "#\n# " + ChasteBuildInfo::GetProvenanceString();
00432         p_ncl_file->close();
00433         // Remove temp file
00434         remove(temp_ncl_path.GetAbsolutePath().c_str());
00435     }
00436     PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteNclFile");
00437     MeshEventHandler::EndEvent(MeshEventHandler::NCL);
00438 }
00439 
00441 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00442 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingMesh(
00443       AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>& rMesh,
00444       bool keepOriginalElementIndexing)
00445 {
00446     this->mpMeshReader = NULL;
00447     mpMesh = &rMesh;
00448 
00449     this->mNumNodes = mpMesh->GetNumNodes();
00450     this->mNumElements = mpMesh->GetNumElements();
00451     this->mNumBoundaryElements = mpMesh->GetNumBoundaryElements();
00452     this->mNumCableElements = mpMesh->GetNumCableElements();
00453 
00454     typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00455     mpIters->pNodeIter = new NodeIterType(mpMesh->GetNodeIteratorBegin());
00456 
00457     typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator ElemIterType;
00458     mpIters->pElemIter = new ElemIterType(mpMesh->GetElementIteratorBegin());
00459 
00460     typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator BoundaryElemIterType;
00461     mpIters->pBoundaryElemIter = new BoundaryElemIterType(mpMesh->GetBoundaryElementIteratorBegin());
00462 
00463     //Use this process's first element to gauge the size of all the elements
00464     if ( (*(mpIters->pElemIter)) != mpMesh->GetElementIteratorEnd())
00465     {
00466         mNodesPerElement = (*(mpIters->pElemIter))->GetNumNodes();
00467     }
00468 
00469     //Use this process's first boundary element to gauge the size of all the boundary elements
00470     if ( (*(mpIters->pBoundaryElemIter)) != mpMesh->GetBoundaryElementIteratorEnd())
00471     {
00472         mNodesPerBoundaryElement = (*(*(mpIters->pBoundaryElemIter)))->GetNumNodes();
00473     }
00474 
00475     //Connectivity file is written when we write to a binary file (only available for TrianglesMeshWriter) and if we are preserving the element order
00476     if (this->mFilesAreBinary && keepOriginalElementIndexing)
00477     {
00478         WriteNclFile(*mpMesh);
00479     }
00480 
00481     // Have we got a parallel mesh?
00483     mpDistributedMesh = dynamic_cast<DistributedTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>* >(&rMesh);
00484 
00485     // Have we got a MixedDimensionMesh?
00487     mpMixedMesh = dynamic_cast<MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>* >(this->mpMesh);
00488 
00489     if (mpDistributedMesh != NULL)
00490     {
00491         // It's a parallel mesh
00492         WriteFilesUsingParallelMesh(keepOriginalElementIndexing);
00493         return;
00494     }
00495 
00496     if (!PetscTools::AmMaster())
00497     {
00498         PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); //Paired with Master process writing files
00499         return;
00500     }
00501 
00502     // Set up node map if we might have deleted nodes
00503     unsigned node_map_index = 0;
00504     if (mpMesh->IsMeshChanging())
00505     {
00506         mpNodeMap = new NodeMap(mpMesh->GetMaximumNodeIndex());
00507         for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
00508         {
00509             mpNodeMap->SetNewIndex(it->GetIndex(), node_map_index++);
00510         }
00511     }
00512 
00513     this->WriteFiles();
00514     PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingMesh"); // Paired with waiting Slave processes
00515     delete mpIters->pNodeIter;
00516     mpIters->pNodeIter = NULL;
00517     delete mpIters->pElemIter;
00518     mpIters->pElemIter = NULL;
00519     delete mpIters->pBoundaryElemIter;
00520     mpIters->pBoundaryElemIter = NULL;
00521 }
00522 
00523 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00524 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingMeshReaderAndMesh(
00525         AbstractMeshReader<ELEMENT_DIM, SPACE_DIM>& rMeshReader,
00526         AbstractTetrahedralMesh<ELEMENT_DIM, SPACE_DIM>& rMesh)
00527 {
00528     WriteNclFile(rMesh, true);
00529     this->WriteFilesUsingMeshReader(rMeshReader);
00530 }
00531 
00532 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00533 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesUsingParallelMesh(bool keepOriginalElementIndexing)
00534 {
00535     if (keepOriginalElementIndexing)
00536     {
00537         // Master goes on to write as usual
00538         if (PetscTools::AmMaster())
00539         {
00540             this->WriteFiles();
00541         }
00542         else
00543         {
00544 //            PetscTools::Barrier("DodgyBarrierBeforeNODE");
00545             MeshEventHandler::BeginEvent(MeshEventHandler::NODE);
00546             boost::scoped_array<double> raw_coords(new double[SPACE_DIM]);
00547             // Slaves concentrate the Nodes
00548             typedef typename AbstractMesh<ELEMENT_DIM,SPACE_DIM>::NodeIterator NodeIterType;
00549             for (NodeIterType it = mpMesh->GetNodeIteratorBegin(); it != mpMesh->GetNodeIteratorEnd(); ++it)
00550             {
00551                 for (unsigned j=0; j<SPACE_DIM; j++)
00552                 {
00553                     raw_coords[j] = it->GetPoint()[j];
00554                 }
00555                 MPI_Ssend(raw_coords.get(), SPACE_DIM, MPI_DOUBLE, 0, it->GetIndex(), PETSC_COMM_WORLD);//Nodes sent with positive tags
00556             }
00557 //            PetscTools::Barrier("DodgyBarrierAfterNODE");
00558             MeshEventHandler::EndEvent(MeshEventHandler::NODE);
00559 
00560             MeshEventHandler::BeginEvent(MeshEventHandler::ELE);
00561             // Slaves concentrate the Elements for which they are owners
00562             boost::scoped_array<unsigned> raw_indices(new unsigned[mNodesPerElement]); // Assuming that we don't have parallel quadratic elements
00563             typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::ElementIterator ElementIterType;
00564             for (ElementIterType it = mpMesh->GetElementIteratorBegin(); it != mpMesh->GetElementIteratorEnd(); ++it)
00565             {
00566                 unsigned index = it->GetIndex();
00567                 if ( mpDistributedMesh->CalculateDesignatedOwnershipOfElement( index ) == true )
00568                 {
00569                     for (unsigned j=0; j<mNodesPerElement; j++)
00570                     {
00571                         raw_indices[j] = it->GetNodeGlobalIndex(j);
00572                     }
00573                     PostElement(index, raw_indices.get(), mNodesPerElement, this->mNumNodes + index, it->GetAttribute());
00574                 }
00575             }
00576 //            PetscTools::Barrier("DodgyBarrierAfterELE");
00577             MeshEventHandler::EndEvent(MeshEventHandler::ELE);
00578             MeshEventHandler::BeginEvent(MeshEventHandler::FACE);
00579             // Slaves concentrate the Faces for which they are owners (not in 1-d)
00580             if (ELEMENT_DIM != 1)  
00581             {
00582                 boost::scoped_array<unsigned> raw_face_indices(new unsigned[ELEMENT_DIM]); // Assuming that we don't have parallel quadratic meshes
00583                 typedef typename AbstractTetrahedralMesh<ELEMENT_DIM,SPACE_DIM>::BoundaryElementIterator BoundaryElementIterType;
00584                 for (BoundaryElementIterType it = mpMesh->GetBoundaryElementIteratorBegin(); it != mpMesh->GetBoundaryElementIteratorEnd(); ++it)
00585                 {
00586                     unsigned index = (*it)->GetIndex();
00587                     if ( mpDistributedMesh->CalculateDesignatedOwnershipOfBoundaryElement( index ) == true )
00588                     {
00589                         for (unsigned j=0; j<ELEMENT_DIM; j++)
00590                         {
00591                             raw_face_indices[j] = (*it)->GetNodeGlobalIndex(j);
00592                         }
00593                         PostElement(index, raw_face_indices.get(), ELEMENT_DIM, this->mNumNodes + this->mNumElements + index, (*it)->GetAttribute());
00594                     }
00595                 }
00596             }
00597 //            PetscTools::Barrier("DodgyBarrierAfterFACE");
00598             MeshEventHandler::EndEvent(MeshEventHandler::FACE);
00599 
00600             // Slaves concentrate the cable elements for which they are owners
00601             if (mpMixedMesh)
00602             {
00603                 typedef typename MixedDimensionMesh<ELEMENT_DIM,SPACE_DIM>::CableElementIterator CableElementIterType;
00604                 for (CableElementIterType it = mpMixedMesh->GetCableElementIteratorBegin(); it != mpMixedMesh->GetCableElementIteratorEnd(); ++it)
00605                 {
00606                     unsigned index =(*it)->GetIndex();
00607                     if ( mpMixedMesh->CalculateDesignatedOwnershipOfCableElement( index ) == true )
00608                     {
00609                         unsigned raw_cable_element_indices[2];
00610                         for (unsigned j=0; j<2; j++)
00611                         {
00612                             raw_cable_element_indices[j] = (*it)->GetNodeGlobalIndex(j);
00613                         }
00614                         PostElement(index, raw_cable_element_indices, 2, this->mNumNodes + this->mNumElements + this->mNumBoundaryElements + index, (*it)->GetAttribute());
00615                     }
00616                 }
00617             }
00618         }
00619         PetscTools::Barrier("AbstractTetrahedralMeshWriter::WriteFilesUsingParallelMesh");
00620     }
00621     else
00622     {
00623         PetscTools::BeginRoundRobin();
00624 
00625         if (PetscTools::AmMaster())
00626         {
00627             // Make sure headers are written first
00628             assert(PetscTools::GetMyRank() == 0);
00629             CreateFilesWithHeaders();
00630         }
00631 
00632         AppendLocalDataToFiles();
00633 
00634         if (PetscTools::AmTopMost())
00635         {
00636             // Make sure footers are written last
00637             assert(PetscTools::GetMyRank() == PetscTools::GetNumProcs()-1);
00638             WriteFilesFooter();
00639         }
00640 
00641         PetscTools::EndRoundRobin();
00642     }
00643 }
00644 
00645 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00646 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::CreateFilesWithHeaders()
00647 {
00648     // If control reaches this line you haven't implemented the optimised
00649     // parallel write for whichever visualiser you are writing for.
00650     NEVER_REACHED;
00651 }
00652 
00653 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00654 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::AppendLocalDataToFiles()
00655 {
00656     // If control reaches this line you haven't implemented the optimised
00657     // parallel write for whichever visualiser you are writing for.
00658     NEVER_REACHED;
00659 }
00660 
00661 template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
00662 void AbstractTetrahedralMeshWriter<ELEMENT_DIM, SPACE_DIM>::WriteFilesFooter()
00663 {
00664     // If control reaches this line you haven't implemented the optimised
00665     // parallel write for whichever visualiser you are writing for.
00666     NEVER_REACHED;
00667 }
00668 
00670 // Explicit instantiation
00672 
00673 template class AbstractTetrahedralMeshWriter<1,1>;
00674 template class AbstractTetrahedralMeshWriter<1,2>;
00675 template class AbstractTetrahedralMeshWriter<1,3>;
00676 template class AbstractTetrahedralMeshWriter<2,2>;
00677 template class AbstractTetrahedralMeshWriter<2,3>;
00678 template class AbstractTetrahedralMeshWriter<3,3>;

Generated by  doxygen 1.6.2