Chaste Commit::1fd4e48e3990e67db148bc1bc4cf6991a0049d0c
PetscTools Class Reference

#include <PetscTools.hpp>

+ Collaboration diagram for PetscTools:

Static Public Member Functions

static void ResetCache ()
 
static bool IsInitialised ()
 
static bool IsSequential ()
 
static bool IsParallel ()
 
static bool IsIsolated ()
 
static unsigned GetNumProcs ()
 
static unsigned GetMyRank ()
 
static bool AmMaster ()
 
static bool AmTopMost ()
 
static void Barrier (const std::string callerId="")
 
static void BeginRoundRobin ()
 
static void EndRoundRobin ()
 
static void IsolateProcesses (bool isolate=true)
 
static MPI_Comm GetWorld ()
 
static Vec CreateVec (int size, int localSize=PETSC_DECIDE, bool ignoreOffProcEntries=true)
 
static Vec CreateVec (std::vector< double > data)
 
static Vec CreateAndSetVec (int size, double value)
 
static void SetupMat (Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true)
 
static bool ReplicateBool (bool flag)
 
static void ReplicateException (bool flag)
 
static void DumpPetscObject (const Mat &rMat, const std::string &rOutputFileFullPath)
 
static void DumpPetscObject (const Vec &rVec, const std::string &rOutputFileFullPath)
 
static void ReadPetscObject (Mat &rMat, const std::string &rOutputFileFullPath, Vec rParallelLayout=nullptr)
 
static void ReadPetscObject (Vec &rVec, const std::string &rOutputFileFullPath, Vec rParallelLayout=nullptr)
 
static bool HasParMetis ()
 
static void Destroy (Vec &rVec)
 
static void Destroy (Mat &rMat)
 
static void SetOption (const char *pOptionName, const char *pOptionValue)
 

Static Public Attributes

static const unsigned MASTER_RANK =0
 

Static Private Member Functions

static void CheckCache ()
 

Static Private Attributes

static bool mPetscIsInitialised = false
 
static unsigned mNumProcessors = 0
 
static unsigned mRank = 0
 
static bool mIsolateProcesses = false
 

Detailed Description

A helper class of static methods.

Any PETSc operation that can be performed using the methods in this class, should be.

This ensures a consistent interface in Chaste even when PETSc arguments change between PETSc versions. For example VecDestroy takes different arguments in 3.2, and using PetscTools::Destroy(vec); takes care of this.

Definition at line 125 of file PetscTools.hpp.

Member Function Documentation

◆ AmMaster()

bool PetscTools::AmMaster ( )
static
Returns
whether it is the master process or not.

If not running in parallel, or if IsolateProcesses has been called, always returns true.

Definition at line 120 of file PetscTools.cpp.

References CheckCache(), MASTER_RANK, mIsolateProcesses, and mRank.

Referenced by ArchiveOpener< Archive, Stream >::ArchiveOpener(), Hdf5ToTxtConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToTxtConverter(), ProgressReporter::ProgressReporter(), LinearSystem::~LinearSystem(), ProgressReporter::~ProgressReporter(), NodeBasedCellPopulation< DIM >::AddReceivedCells(), NodeBasedCellPopulation< DIM >::AddReceivedHaloCells(), ParallelColumnDataWriter::AdvanceAlongUnlimitedDimension(), ParallelColumnDataWriter::Close(), OutputFileHandler::CommonConstructor(), NumericFileComparison::CompareFiles(), FileComparison::CompareFiles(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructCuboid(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructLinearMesh(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructRectangularMesh(), AbstractConvergenceTester< CELL, CARDIAC_PROBLEM, DIM, PROBLEM_DIM >::Converge(), CellMLToSharedLibraryConverter::ConvertCellmlToSo(), OutputFileHandler::CopyFileTo(), OutputDirectoryFifoQueue::CreateNextDir(), CardiacSimulation::CreateResumeXmlFile(), AbstractConvergenceTester< CELL, CARDIAC_PROBLEM, DIM, PROBLEM_DIM >::DisplayRun(), ParallelColumnDataWriter::EndDefineMode(), NodesOnlyMesh< SPACE_DIM >::EnlargeBoxCollection(), ActivationOutputModifier::FinaliseAtEnd(), AbstractCellPopulation< DIM >::GetNeighbouringEdgeIndices(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextBoundaryElement(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextCableElement(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextElement(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextNode(), DistributedBoxCollection< DIM >::GetProcessOwningNode(), DistributedTetrahedralMesh< DIM, DIM >::GetProcessRegion(), NodeBasedCellPopulation< DIM >::GetReceivedCells(), GenericEventHandler< NUM_EVENTS, CONCRETE >::HeadingsImpl(), AbstractBidomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), DistributedBoxCollection< DIM >::LoadBalance(), OutputFileHandler::MakeFoldersAndReturnFullPath(), NodeBasedCellPopulation< DIM >::NonBlockingSendCellsToNeighbourProcesses(), AbstractCellBasedSimulation< ELEMENT_DIM, SPACE_DIM >::OutputSimulationSetup(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), StreeterFibreGenerator< SPACE_DIM >::PreWriteCalculations(), Citations::Print(), ExecutableSupport::Print(), ExecutableSupport::PrintError(), ProgressReporter::PrintFinalising(), ProgressReporter::PrintInitialising(), Hdf5DataWriter::PutUnlimitedVariable(), ParallelColumnDataWriter::PutVariable(), ParallelColumnDataWriter::PutVector(), GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl(), AbstractFileComparison::ResetFiles(), HeartConfig::save(), AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::save(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), NodeBasedCellPopulation< DIM >::SendCellsToNeighbourProcesses(), AbstractFileComparison::Setup(), AbstractPdeModifier< DIM >::SetupSolve(), ExecutableSupport::ShowCopyright(), AbstractFileComparison::SkipHeaderLines(), LinearSystem::Solve(), ProgressReporter::Update(), AbstractPdeModifier< DIM >::UpdateAtEndOfOutputTimeStep(), AbstractPdeModifier< DIM >::UpdateAtEndOfSolve(), CellMutationStatesCountWriter< ELEMENT_DIM, SPACE_DIM >::Visit(), CellProliferativePhasesCountWriter< ELEMENT_DIM, SPACE_DIM >::Visit(), CellProliferativeTypesCountWriter< ELEMENT_DIM, SPACE_DIM >::Visit(), CellMutationStatesCountWriter< ELEMENT_DIM, SPACE_DIM >::VisitAnyPopulation(), CellProliferativePhasesCountWriter< ELEMENT_DIM, SPACE_DIM >::VisitAnyPopulation(), CellProliferativeTypesCountWriter< ELEMENT_DIM, SPACE_DIM >::VisitAnyPopulation(), HeartConfig::Write(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::Write(), Hdf5ToMeshalyzerConverter< ELEMENT_DIM, SPACE_DIM >::Write(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::WriteCmguiScript(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentPressureSolution(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentSpatialSolution(), AbstractPerElementWriter< ELEMENT_DIM, SPACE_DIM, DATA_SIZE >::WriteData(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFiles(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteGenericFileToMeshalyzer(), CellMutationStatesCountWriter< ELEMENT_DIM, SPACE_DIM >::WriteHeader(), BidomainProblem< DIM >::WriteInfo(), ExtendedBidomainProblem< DIM >::WriteInfo(), MonodomainProblem< ELEMENT_DIM, SPACE_DIM >::WriteInfo(), MonodomainPurkinjeProblem< ELEMENT_DIM, SPACE_DIM >::WriteInfo(), AbstractHdf5Converter< ELEMENT_DIM, SPACE_DIM >::WriteInfoFile(), HeartGeometryInformation< SPACE_DIM >::WriteLayerForEachNode(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), PseudoEcgCalculator< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::WritePseudoEcg(), OdeSolution::WriteToFile(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteVariablesOverTimeAtNodes(), OffLatticeSimulation< ELEMENT_DIM, SPACE_DIM >::WriteVisualizerSetupFile(), and XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteXdmfMasterFile().

◆ AmTopMost()

◆ Barrier()

void PetscTools::Barrier ( const std::string  callerId = "")
static

If MPI is set up, perform a barrier synchronisation. If not, or if IsolateProcesses has been called, it's a noop.

Parameters
callerIdonly used in debug mode; printed before & after the barrier call

Definition at line 134 of file PetscTools.cpp.

References CHASTE_PETSC_NULLPTR, CheckCache(), GetMyRank(), mIsolateProcesses, and mPetscIsInitialised.

Referenced by Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToCmguiConverter(), Hdf5ToMeshalyzerConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToMeshalyzerConverter(), ParallelColumnDataWriter::AdvanceAlongUnlimitedDimension(), GenericEventHandler< NUM_EVENTS, CONCRETE >::BeginEventImpl(), BeginRoundRobin(), ElectrodesStimulusFactory< DIM >::CheckForElectrodesIntersection(), ParallelColumnDataWriter::Close(), OutputFileHandler::CommonConstructor(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructCuboid(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructLinearMesh(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructRectangularMesh(), OutputFileHandler::CopyFileTo(), OutputDirectoryFifoQueue::CreateNextDir(), GenericEventHandler< NUM_EVENTS, CONCRETE >::EndEventImpl(), EndRoundRobin(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::GeometricPartitioning(), DistributedTetrahedralMesh< DIM, DIM >::GetProcessRegion(), GenericEventHandler< NUM_EVENTS, CONCRETE >::HeadingsImpl(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::InitialiseWriter(), OutputFileHandler::MakeFoldersAndReturnFullPath(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), ExtendedBidomainProblem< DIM >::ProcessExtracellularStimulus(), GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl(), HeartConfig::save(), AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::save(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), AbstractFileComparison::Setup(), CardiacElectroMechanicsProblem< DIM, ELEC_PROB_DIM >::Solve(), NodeBasedCellPopulation< DIM >::Update(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentPressureSolution(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentSpatialSolution(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), HeartGeometryInformation< SPACE_DIM >::WriteLayerForEachNode(), and AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile().

◆ BeginRoundRobin()

void PetscTools::BeginRoundRobin ( )
static

Call at the start of a block of code that should be executed by each process in turn.

Note that this is not reliable for printing output to stdout in an ordered fashion, since on some systems each process may have a separate stdout buffer, and there's no way to force a flush to the underlying output stream. See e.g. http://stackoverflow.com/questions/5182045/openmpi-mpi-barrier-problems for more info.

Definition at line 154 of file PetscTools.cpp.

References Barrier(), and GetMyRank().

Referenced by ActivationOutputModifier::FinaliseAtEnd(), GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl(), ExecutableSupport::ShowParallelLaunching(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteGenericFileToMeshalyzer(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), and AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::WriteResultsToFiles().

◆ CheckCache()

static void PetscTools::CheckCache ( )
inlinestaticprivate

Private method makes sure that (if this is the first use within a test) then PETSc has been probed.

Definition at line 142 of file PetscTools.hpp.

References mNumProcessors, and ResetCache().

Referenced by AmMaster(), AmTopMost(), Barrier(), GetMyRank(), GetNumProcs(), IsInitialised(), IsParallel(), IsSequential(), and ReplicateBool().

◆ CreateAndSetVec()

Vec PetscTools::CreateAndSetVec ( int  size,
double  value 
)
static

Create a vector of the specified size with all values set to be the given constant. SetFromOptions is called.

Parameters
sizethe size of the vector
valuethe value to set each entry
Returns
new PETSc vector

Definition at line 258 of file PetscTools.cpp.

References CreateVec().

Referenced by OdeLinearSystemSolver::OdeLinearSystemSolver(), ParabolicBoxDomainPdeModifier< DIM >::SetupInitialSolutionVector(), NodeBasedCellPopulationWithBuskeUpdate< DIM >::UpdateNodeLocations(), ParabolicGrowingDomainPdeModifier< DIM >::UpdateSolutionVector(), and AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::VerifyJacobian().

◆ CreateVec() [1/2]

Vec PetscTools::CreateVec ( int  size,
int  localSize = PETSC_DECIDE,
bool  ignoreOffProcEntries = true 
)
static

◆ CreateVec() [2/2]

Vec PetscTools::CreateVec ( std::vector< double data)
static

Create a Vec from the given data.

Parameters
datasome data
Returns
new PETSc vector

Definition at line 238 of file PetscTools.cpp.

References CreateVec().

◆ Destroy() [1/2]

static void PetscTools::Destroy ( Mat rMat)
inlinestatic

Destroy method Note that PETSc 3.1 and previous destroy based on a PETSc object but PETSc 3.2 and later destroy based on a pointer to a PETSc object

Parameters
rMata reference to the PETSc object

Definition at line 382 of file PetscTools.hpp.

◆ Destroy() [2/2]

static void PetscTools::Destroy ( Vec rVec)
inlinestatic

Destroy method Note that PETSc 3.1 and previous destroy based on a PETSc object but PETSc 3.2 and later destroy based on a pointer to a PETSc object

Parameters
rVeca reference to the PETSc object

Definition at line 366 of file PetscTools.hpp.

Referenced by DistributedVectorFactory::DistributedVectorFactory(), DistributedVectorFactory::DistributedVectorFactory(), Hdf5ToTxtConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToTxtConverter(), Hdf5ToVtkConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToVtkConverter(), VoltageInterpolaterOntoMechanicsMesh< DIM >::VoltageInterpolaterOntoMechanicsMesh(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::~AbstractCardiacProblem(), AbstractContinuumMechanicsSolver< DIM >::~AbstractContinuumMechanicsSolver(), AbstractPdeModifier< DIM >::~AbstractPdeModifier(), CellVecData::~CellVecData(), ExtendedBidomainSolver< ELEM_DIM, SPACE_DIM >::~ExtendedBidomainSolver(), Hdf5DataWriter::~Hdf5DataWriter(), LinearSystem::~LinearSystem(), MonodomainPurkinjeSolver< ELEMENT_DIM, SPACE_DIM >::~MonodomainPurkinjeSolver(), MonodomainSolver< ELEMENT_DIM, SPACE_DIM >::~MonodomainSolver(), OdeLinearSystemSolver::~OdeLinearSystemSolver(), OperatorSplittingMonodomainSolver< ELEMENT_DIM, SPACE_DIM >::~OperatorSplittingMonodomainSolver(), ParallelColumnDataWriter::~ParallelColumnDataWriter(), PCTwoLevelsBlockDiagonal::~PCTwoLevelsBlockDiagonal(), AbstractContinuumMechanicsSolver< DIM >::AllocateMatrixMemory(), AbstractContinuumMechanicsSolver< DIM >::ApplyDirichletBoundaryConditions(), PetscMatTools::CheckEquality(), PetscMatTools::CheckSymmetry(), AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ComputeJacobianNumerically(), PseudoEcgCalculator< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ComputePseudoEcgAtOneTimeStep(), AbstractBidomainSolver< DIM, DIM >::FinaliseForBath(), HasParMetis(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Initialise(), AbstractLinearPdeSolver< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::InitialiseForSolve(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::load(), ExtendedBidomainProblem< DIM >::load(), PCBlockDiagonal::PCBlockDiagonalCreate(), PCLDUFactorisation::PCLDUFactorisationCreate(), PCTwoLevelsBlockDiagonal::PCTwoLevelsBlockDiagonalCreate(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), Hdf5DataWriter::PutStripedVector(), Hdf5DataWriter::PutVector(), ParallelColumnDataWriter::PutVectorStripe(), ReadPetscObject(), ReplicatableVector::RemovePetscContext(), ReplicatableVector::Replicate(), MonodomainPurkinjeSolver< ELEMENT_DIM, SPACE_DIM >::SetIdentityBlockToLhsMatrix(), PetscVecTools::SetupInterleavedVectorScatterGather(), StokesFlowSolver< DIM >::Solve(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Solve(), CardiacElectroMechanicsProblem< DIM, ELEC_PROB_DIM >::Solve(), AbstractDynamicLinearPdeSolver< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Solve(), SimpleNewtonNonlinearSolver::Solve(), SimplePetscNonlinearSolver::Solve(), LinearSystem::Solve(), LinearParabolicPdeSystemWithCoupledOdeSystemSolver< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::SolveAndWriteResultsToFile(), OdeLinearSystemSolver::SolveOneTimeStep(), AbstractNonlinearElasticitySolver< DIM >::SolveSnes(), AbstractNonlinearElasticitySolver< DIM >::TakeNewtonStep(), EllipticBoxDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), EllipticGrowingDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), ParabolicBoxDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), ParabolicGrowingDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), NodeBasedCellPopulationWithBuskeUpdate< DIM >::UpdateNodeLocations(), ParabolicGrowingDomainPdeModifier< DIM >::UpdateSolutionVector(), AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::VerifyJacobian(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::Write(), Hdf5ToMeshalyzerConverter< ELEMENT_DIM, SPACE_DIM >::Write(), ExtendedBidomainProblem< DIM >::WriteOneStep(), and PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteOutputDataToHdf5().

◆ DumpPetscObject() [1/2]

void PetscTools::DumpPetscObject ( const Mat rMat,
const std::string &  rOutputFileFullPath 
)
static

Dumps a given PETSc object to disk.

Parameters
rMata matrix
rOutputFileFullPathwhere to dump the matrix to disk

Definition at line 337 of file PetscTools.cpp.

References PETSC_DESTROY_PARAM.

◆ DumpPetscObject() [2/2]

void PetscTools::DumpPetscObject ( const Vec rVec,
const std::string &  rOutputFileFullPath 
)
static

Dumps a given PETSc object to disk.

Parameters
rVeca vector
rOutputFileFullPathwhere to dump the vector to disk

Definition at line 351 of file PetscTools.cpp.

References PETSC_DESTROY_PARAM.

◆ EndRoundRobin()

◆ GetMyRank()

unsigned PetscTools::GetMyRank ( )
static
Returns
our rank.

If PETSc has not been initialized, returns 0.

Definition at line 114 of file PetscTools.cpp.

References CheckCache(), and mRank.

Referenced by ArchiveOpener< Archive, Stream >::ArchiveOpener(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::AddPointData(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::AddPointData(), CompressibleNonlinearElasticitySolver< DIM >::AssembleSystem(), IncompressibleNonlinearElasticitySolver< DIM >::AssembleSystem(), CellId::AssignCellId(), Barrier(), BeginRoundRobin(), AbstractContinuumMechanicsAssembler< DIM, CAN_ASSEMBLE_VECTOR, CAN_ASSEMBLE_MATRIX >::DoAssemble(), EndRoundRobin(), ActivationOutputModifier::FinaliseAtEnd(), FormDebugHead(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::GeometricPartitioning(), NodesOnlyMesh< 2 >::GetAllNodeIndices(), NodesOnlyMesh< SPACE_DIM >::GetMaximumNodeIndex(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::GetNodeOrHaloNode(), DistributedBoxCollection< DIM >::GetProcessOwningNode(), NodesOnlyMesh< 2 >::IsOwned(), DistributedBoxCollection< DIM >::LoadBalance(), CardiacSimulationArchiver< PROBLEM_CLASS >::Migrate(), NodeBasedCellPopulation< DIM >::NonBlockingSendCellsToNeighbourProcesses(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ParMetisLibraryNodeAndElementPartitioning(), DistributedTetrahedralMesh< DIM, DIM >::ParMetisLibraryNodeAndElementPartitioning(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), Timer::Print(), ExecutableSupport::PrintError(), GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl(), NodeBasedCellPopulation< DIM >::SendCellsToNeighbourProcesses(), ExecutableSupport::ShowParallelLaunching(), StokesFlowSolver< DIM >::Solve(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::SolveBoundaryElementMapping(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::SolveElementMapping(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::SolveNodeMapping(), NodesOnlyMesh< SPACE_DIM >::SolveNodeMapping(), AbstractNonlinearElasticitySolver< DIM >::TakeNewtonStep(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::UpdateQueueFromRemote(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), ExecutableSupport::WriteMachineInfoFile(), ExecutableSupport::WriteProvenanceInfoFile(), NodeBasedCellPopulation< DIM >::WriteVtkResultsToFile(), and NodeBasedCellPopulationWithParticles< DIM >::WriteVtkResultsToFile().

◆ GetNumProcs()

unsigned PetscTools::GetNumProcs ( )
static
Returns
total number of processors.

Definition at line 108 of file PetscTools.cpp.

References CheckCache(), and mNumProcessors.

Referenced by DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::DistanceMapCalculator(), DistributedBoxCollection< DIM >::DistributedBoxCollection(), ObjectCommunicator< CLASS >::ObjectCommunicator(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::AddPointData(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::AddPointData(), CellId::AssignCellId(), AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::CalculateHaloNodesFromNodeExchange(), AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::CalculateNodeExchange(), DistributedVectorFactory::CalculateOwnership(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::ComputeDistanceMap(), EndRoundRobin(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::GeometricPartitioning(), NodesOnlyMesh< 2 >::GetAllNodeIndices(), NodesOnlyMesh< SPACE_DIM >::GetMaximumNodeIndex(), DistributedBoxCollection< DIM >::GetProcessOwningNode(), CardiacSimulationArchiver< PROBLEM_CLASS >::Migrate(), NodeBasedCellPopulation< DIM >::NonBlockingSendCellsToNeighbourProcesses(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ParMetisLibraryNodeAndElementPartitioning(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl(), DistributedVectorFactory::rGetGlobalLows(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), NodeBasedCellPopulation< DIM >::SendCellsToNeighbourProcesses(), AbstractMesh< ELEMENT_DIM, SPACE_DIM >::SetDistributedVectorFactory(), PeriodicNodesOnlyMesh< SPACE_DIM >::SetUpBoxCollection(), ExecutableSupport::ShowParallelLaunching(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::UpdateQueueFromRemote(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::WorkOnLocalQueue(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), and ExecutableSupport::WriteMachineInfoFile().

◆ GetWorld()

MPI_Comm PetscTools::GetWorld ( )
static

Get the MPI Communicator representing the whole set of running processes. This will normally be PETSC_COMM_WORLD, unless IsolateProcesses has been called, in which case it will be PETSC_COMM_SELF.

Returns
the MPI Communicator representing the whole set of running processes

Definition at line 178 of file PetscTools.cpp.

References mIsolateProcesses.

Referenced by NodeBasedCellPopulation< DIM >::GetSizeOfCellPopulation(), ObjectCommunicator< CLASS >::IRecvObject(), NodesOnlyMesh< SPACE_DIM >::IsANodeCloseToDomainBoundary(), ObjectCommunicator< CLASS >::ISendObject(), ObjectCommunicator< CLASS >::RecvObject(), GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl(), AbstractCellPopulation< DIM >::rGetMesh(), ObjectCommunicator< CLASS >::SendObject(), and ObjectCommunicator< CLASS >::SendRecvObject().

◆ HasParMetis()

bool PetscTools::HasParMetis ( )
static

Checks if PETSc has been configured with ParMetis partioning support.

Returns
true If ParMetis partitioning (via PETSc) is available

Definition at line 453 of file PetscTools.cpp.

References Destroy(), PETSC_DESTROY_PARAM, and SetupMat().

Referenced by DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ComputeMeshPartitioning(), and NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning().

◆ IsInitialised()

bool PetscTools::IsInitialised ( )
static
Returns
whether PETSc has been initialised yet.

Definition at line 85 of file PetscTools.cpp.

References CheckCache(), and mPetscIsInitialised.

Referenced by Citations::Register().

◆ IsIsolated()

bool PetscTools::IsIsolated ( )
static
Returns
whether this process is isolated, i.e. IsolateProcesses has been called.

Definition at line 103 of file PetscTools.cpp.

References mIsolateProcesses.

Referenced by GenericEventHandler< NUM_EVENTS, CONCRETE >::ReportImpl().

◆ IsolateProcesses()

void PetscTools::IsolateProcesses ( bool  isolate = true)
static

Where work can be split between isolated processes, it would be nice to be able to do so easily without worrying about collective calls made inside classes such as OutputFileHandler leading to deadlock. This method attempts to enable this behaviour. If the flag is set then AmMaster and AmTopMost always return true, Barrier becomes a no-op, and ReplicateBool doesn't replicate.

Parameters
isolatewhether to consider processes as isolated

Definition at line 173 of file PetscTools.cpp.

References mIsolateProcesses.

◆ IsParallel()

◆ IsSequential()

bool PetscTools::IsSequential ( )
static

◆ ReadPetscObject() [1/2]

void PetscTools::ReadPetscObject ( Mat rMat,
const std::string &  rOutputFileFullPath,
Vec  rParallelLayout = nullptr 
)
static

Read a previously dumped PETSc object from disk.

Parameters
rMata matrix
rOutputFileFullPathwhere to read the matrix from
rParallelLayoutIf provided, rMat will have the same parallel layout. Its content is irrelevant.
Todo:
: #1082 work out appropriate nz allocation.

Definition at line 365 of file PetscTools.cpp.

References Destroy(), PETSC_DESTROY_PARAM, and SetupMat().

◆ ReadPetscObject() [2/2]

void PetscTools::ReadPetscObject ( Vec rVec,
const std::string &  rOutputFileFullPath,
Vec  rParallelLayout = nullptr 
)
static

Read a previously dumped PETSc object from disk.

Parameters
rVeca vector
rOutputFileFullPathwhere to read the matrix from
rParallelLayoutIf provided, rMat will have the same parallel layout. Its content is irrelevant.

Definition at line 420 of file PetscTools.cpp.

References PETSC_DESTROY_PARAM.

◆ ReplicateBool()

◆ ReplicateException()

◆ ResetCache()

void PetscTools::ResetCache ( )
static

Reset our cached values: whether PETSc is initialised, how many processors there are, and which one we are.

Definition at line 58 of file PetscTools.cpp.

References mNumProcessors, mPetscIsInitialised, and mRank.

Referenced by CheckCache(), PetscSetupUtils::InitialisePetsc(), and PetscSetupUtils::ResetStatusCache().

◆ SetOption()

static void PetscTools::SetOption ( const char *  pOptionName,
const char *  pOptionValue 
)
inlinestatic

Set a PETSc option. This is a wrapper for PetscOptionsSetValue, which changed signature in PETSc 3.7.

Parameters
pOptionNamethe option name
pOptionValuethe option value

Definition at line 398 of file PetscTools.hpp.

Referenced by PCBlockDiagonal::PCBlockDiagonalSetUp(), PCLDUFactorisation::PCLDUFactorisationSetUp(), PCTwoLevelsBlockDiagonal::PCTwoLevelsBlockDiagonalSetUp(), LinearSystem::ResetKspSolver(), AbstractNonlinearElasticitySolver< DIM >::SetKspSolverAndPcType(), LinearSystem::Solve(), and AbstractNonlinearElasticitySolver< DIM >::SolveSnes().

◆ SetupMat()

void PetscTools::SetupMat ( Mat rMat,
int  numRows,
int  numColumns,
unsigned  rowPreallocation,
int  numLocalRows = PETSC_DECIDE,
int  numLocalColumns = PETSC_DECIDE,
bool  ignoreOffProcEntries = true,
bool  newAllocationError = true 
)
static

Set up a matrix - set the size using the given parameters. The number of local rows and columns is by default PETSC_DECIDE. SetFromOptions is called.

Parameters
rMatthe matrix
numRowsthe number of rows in the matrix
numColumnsthe number of columns in the matrix
rowPreallocationthe max number of nonzero entries expected on a row A value of 0 is allowed: no preallocation is then done and the user must preallocate the memory for the matrix themselves.
numLocalRowsthe number of local rows (defaults to PETSC_DECIDE)
numLocalColumnsthe number of local columns (defaults to PETSC_DECIDE)
ignoreOffProcEntriestells PETSc to drop off-processor entries
newAllocationErrortells PETSc whether to set the MAT_NEW_NONZERO_ALLOCATION_ERR. ** currently only used in PETSc 3.3 and later ** in PETSc 3.2 and earlier MAT_NEW_NONZERO_ALLOCATION_ERR defaults to false in PETSc 3.3 MAT_NEW_NONZERO_ALLOCATION_ERR defaults to true

Definition at line 272 of file PetscTools.cpp.

References CHASTE_PETSC_NULLPTR, and IsSequential().

Referenced by LinearSystem::LinearSystem(), LinearSystem::LinearSystem(), AbstractContinuumMechanicsSolver< DIM >::AllocateMatrixMemory(), Hdf5DataWriter::ApplyPermutation(), HasParMetis(), BidomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), ExtendedBidomainSolver< ELEM_DIM, SPACE_DIM >::InitialiseForSolve(), MonodomainPurkinjeSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), MonodomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), OperatorSplittingMonodomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), ReadPetscObject(), LinearSystem::SetPrecondMatrixIsDifferentFromLhs(), SimplePetscNonlinearSolver::Solve(), and AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::VerifyJacobian().

Member Data Documentation

◆ MASTER_RANK

const unsigned PetscTools::MASTER_RANK =0
static

As a convention, we consider processor 0 the master process.

Definition at line 155 of file PetscTools.hpp.

Referenced by AmMaster().

◆ mIsolateProcesses

bool PetscTools::mIsolateProcesses = false
staticprivate

Whether to pretend that we're just running many master processes independently.

Definition at line 139 of file PetscTools.hpp.

Referenced by AmMaster(), AmTopMost(), Barrier(), GetWorld(), IsIsolated(), IsolateProcesses(), IsSequential(), and ReplicateBool().

◆ mNumProcessors

unsigned PetscTools::mNumProcessors = 0
staticprivate

The total number of processors.

Definition at line 133 of file PetscTools.hpp.

Referenced by AmTopMost(), CheckCache(), GetNumProcs(), IsParallel(), IsSequential(), and ResetCache().

◆ mPetscIsInitialised

bool PetscTools::mPetscIsInitialised = false
staticprivate

Whether PETSc has been initialised.

Definition at line 130 of file PetscTools.hpp.

Referenced by Barrier(), IsInitialised(), ReplicateBool(), and ResetCache().

◆ mRank

unsigned PetscTools::mRank = 0
staticprivate

Which processors we are.

Definition at line 136 of file PetscTools.hpp.

Referenced by AmMaster(), AmTopMost(), GetMyRank(), and ResetCache().


The documentation for this class was generated from the following files: