Chaste
Release::2017.1
|
#include <PetscTools.hpp>
Static Public Member Functions | |
static void | ResetCache () |
static bool | IsInitialised () |
static bool | IsSequential () |
static bool | IsParallel () |
static bool | IsIsolated () |
static unsigned | GetNumProcs () |
static unsigned | GetMyRank () |
static bool | AmMaster () |
static bool | AmTopMost () |
static void | Barrier (const std::string callerId="") |
static void | BeginRoundRobin () |
static void | EndRoundRobin () |
static void | IsolateProcesses (bool isolate=true) |
static MPI_Comm | GetWorld () |
static Vec | CreateVec (int size, int localSize=PETSC_DECIDE, bool ignoreOffProcEntries=true) |
static Vec | CreateVec (std::vector< double > data) |
static Vec | CreateAndSetVec (int size, double value) |
static void | SetupMat (Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true) |
static bool | ReplicateBool (bool flag) |
static void | ReplicateException (bool flag) |
static void | DumpPetscObject (const Mat &rMat, const std::string &rOutputFileFullPath) |
static void | DumpPetscObject (const Vec &rVec, const std::string &rOutputFileFullPath) |
static void | ReadPetscObject (Mat &rMat, const std::string &rOutputFileFullPath, Vec rParallelLayout=nullptr) |
static void | ReadPetscObject (Vec &rVec, const std::string &rOutputFileFullPath, Vec rParallelLayout=nullptr) |
static bool | HasParMetis () |
static void | Destroy (Vec &rVec) |
static void | Destroy (Mat &rMat) |
static void | SetOption (const char *pOptionName, const char *pOptionValue) |
Static Public Attributes | |
static const unsigned | MASTER_RANK =0 |
Static Private Member Functions | |
static void | CheckCache () |
Static Private Attributes | |
static bool | mPetscIsInitialised = false |
static unsigned | mNumProcessors = 0 |
static unsigned | mRank = 0 |
static bool | mIsolateProcesses = false |
A helper class of static methods.
Any PETSc operation that can be performed using the methods in this class, should be.
This ensures a consistent interface in Chaste even when PETSc arguments change between PETSc versions. For example VecDestroy takes different arguments in 3.2, and using PetscTools::Destroy(vec); takes care of this.
Definition at line 111 of file PetscTools.hpp.
|
static |
If not running in parallel, or if IsolateProcesses has been called, always returns true.
Definition at line 120 of file PetscTools.cpp.
References CheckCache(), MASTER_RANK, mIsolateProcesses, and mRank.
Referenced by NodeBasedCellPopulation< DIM >::AddReceivedCells(), NodeBasedCellPopulation< DIM >::AddReceivedHaloCells(), ParallelColumnDataWriter::AdvanceAlongUnlimitedDimension(), ArchiveOpener< Archive, Stream >::ArchiveOpener(), ParallelColumnDataWriter::Close(), OutputFileHandler::CommonConstructor(), NumericFileComparison::CompareFiles(), FileComparison::CompareFiles(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ComputeMeshPartitioning(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructCuboid(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructLinearMesh(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructRectangularMesh(), AbstractConvergenceTester< CELL, CARDIAC_PROBLEM, DIM, PROBLEM_DIM >::Converge(), CellMLToSharedLibraryConverter::ConvertCellmlToSo(), OutputFileHandler::CopyFileTo(), OutputDirectoryFifoQueue::CreateNextDir(), CellMLToSharedLibraryConverter::CreateOptionsFile(), CardiacSimulation::CreateResumeXmlFile(), AbstractConvergenceTester< CELL, CARDIAC_PROBLEM, DIM, PROBLEM_DIM >::DisplayRun(), ParallelColumnDataWriter::EndDefineMode(), NodesOnlyMesh< SPACE_DIM >::EnlargeBoxCollection(), ActivationOutputModifier::FinaliseAtEnd(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextBoundaryElement(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextCableElement(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextElement(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::GetNextNode(), NodeBasedCellPopulation< DIM >::GetReceivedCells(), Hdf5ToTxtConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToTxtConverter(), GenericEventHandler< 11, MeshEventHandler >::HeadingsImpl(), AbstractBidomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), DistributedBoxCollection< DIM >::LoadBalance(), OutputFileHandler::MakeFoldersAndReturnFullPath(), NodeBasedCellPopulation< DIM >::NonBlockingSendCellsToNeighbourProcesses(), AbstractCellBasedSimulation< ELEMENT_DIM, SPACE_DIM >::OutputSimulationSetup(), PCBlockDiagonal::PCBlockDiagonal(), PCLDUFactorisation::PCLDUFactorisation(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), StreeterFibreGenerator< SPACE_DIM >::PreWriteCalculations(), Citations::Print(), ExecutableSupport::Print(), ExecutableSupport::PrintError(), ProgressReporter::PrintFinalising(), ProgressReporter::PrintInitialising(), ProgressReporter::ProgressReporter(), Hdf5DataWriter::PutUnlimitedVariable(), ParallelColumnDataWriter::PutVariable(), ParallelColumnDataWriter::PutVector(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), AbstractFileComparison::ResetFiles(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), HeartConfig::save(), AbstractTetrahedralMesh< SPACE_DIM, SPACE_DIM >::save(), NodeBasedCellPopulation< DIM >::SendCellsToNeighbourProcesses(), DistributedBoxCollection< SPACE_DIM >::serialize(), AbstractFileComparison::Setup(), DistributedBoxCollection< DIM >::SetupHaloBoxes(), AbstractPdeModifier< DIM >::SetupSolve(), ExecutableSupport::ShowCopyright(), AbstractFileComparison::SkipHeaderLines(), LinearSystem::Solve(), AbstractCellBasedSimulation< ELEMENT_DIM, SPACE_DIM >::Solve(), ProgressReporter::Update(), AbstractPdeModifier< DIM >::UpdateAtEndOfOutputTimeStep(), AbstractPdeModifier< DIM >::UpdateAtEndOfSolve(), CellProliferativeTypesCountWriter< ELEMENT_DIM, SPACE_DIM >::Visit(), CellProliferativePhasesCountWriter< ELEMENT_DIM, SPACE_DIM >::Visit(), CellMutationStatesCountWriter< ELEMENT_DIM, SPACE_DIM >::Visit(), CellProliferativeTypesCountWriter< ELEMENT_DIM, SPACE_DIM >::VisitAnyPopulation(), CellProliferativePhasesCountWriter< ELEMENT_DIM, SPACE_DIM >::VisitAnyPopulation(), CellMutationStatesCountWriter< ELEMENT_DIM, SPACE_DIM >::VisitAnyPopulation(), Hdf5ToMeshalyzerConverter< ELEMENT_DIM, SPACE_DIM >::Write(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::Write(), HeartConfig::Write(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::WriteCmguiScript(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentPressureSolution(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentSpatialSolution(), AbstractPerElementWriter< SPACE_DIM, SPACE_DIM, SPACE_DIM *SPACE_DIM >::WriteData(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFiles(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMeshReader(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteGenericFileToMeshalyzer(), CellMutationStatesCountWriter< ELEMENT_DIM, SPACE_DIM >::WriteHeader(), MonodomainProblem< ELEMENT_DIM, SPACE_DIM >::WriteInfo(), MonodomainPurkinjeProblem< ELEMENT_DIM, SPACE_DIM >::WriteInfo(), BidomainProblem< DIM >::WriteInfo(), ExtendedBidomainProblem< DIM >::WriteInfo(), AbstractHdf5Converter< ELEMENT_DIM, SPACE_DIM >::WriteInfoFile(), HeartGeometryInformation< SPACE_DIM >::WriteLayerForEachNode(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), PseudoEcgCalculator< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::WritePseudoEcg(), AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::WriteResultsToFiles(), OdeSolution::WriteToFile(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteVariablesOverTimeAtNodes(), OffLatticeSimulation< ELEMENT_DIM, SPACE_DIM >::WriteVisualizerSetupFile(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteXdmfMasterFile(), LinearSystem::~LinearSystem(), and ProgressReporter::~ProgressReporter().
|
static |
If not running in parallel, or if IsolateProcesses has been called, always returns true.
Definition at line 126 of file PetscTools.cpp.
References CheckCache(), mIsolateProcesses, mNumProcessors, and mRank.
Referenced by NodeBasedCellPopulation< DIM >::AddReceivedCells(), NodeBasedCellPopulation< DIM >::AddReceivedHaloCells(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructFromMeshReader(), NodesOnlyMesh< SPACE_DIM >::EnlargeBoxCollection(), NodeBasedCellPopulation< DIM >::GetReceivedCells(), DistributedBoxCollection< DIM >::LoadBalance(), NodeBasedCellPopulation< DIM >::NonBlockingSendCellsToNeighbourProcesses(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), NodeBasedCellPopulation< DIM >::SendCellsToNeighbourProcesses(), DistributedBoxCollection< DIM >::SetupHaloBoxes(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteGenericFileToMeshalyzer(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), and AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::WriteResultsToFiles().
|
static |
If MPI is set up, perform a barrier synchronisation. If not, or if IsolateProcesses has been called, it's a noop.
callerId | only used in debug mode; printed before & after the barrier call |
Definition at line 134 of file PetscTools.cpp.
References CheckCache(), GetMyRank(), mIsolateProcesses, and mPetscIsInitialised.
Referenced by ParallelColumnDataWriter::AdvanceAlongUnlimitedDimension(), ArchiveOpener< Archive, Stream >::ArchiveOpener(), GenericEventHandler< 11, MeshEventHandler >::BeginEventImpl(), BeginRoundRobin(), ElectrodesStimulusFactory< DIM >::CheckForElectrodesIntersection(), ParallelColumnDataWriter::Close(), OutputFileHandler::CommonConstructor(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ComputeMeshPartitioning(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructCuboid(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructFromMeshReader(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructLinearMesh(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructRectangularMesh(), OutputFileHandler::CopyFileTo(), OutputDirectoryFifoQueue::CreateNextDir(), CellMLToSharedLibraryConverter::CreateOptionsFile(), GenericEventHandler< 11, MeshEventHandler >::EndEventImpl(), EndRoundRobin(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::GeometricPartitioning(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToCmguiConverter(), Hdf5ToMeshalyzerConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToMeshalyzerConverter(), GenericEventHandler< 11, MeshEventHandler >::HeadingsImpl(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::InitialiseWriter(), OutputFileHandler::MakeFoldersAndReturnFullPath(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), ExtendedBidomainProblem< DIM >::ProcessExtracellularStimulus(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), HeartConfig::save(), AbstractTetrahedralMesh< SPACE_DIM, SPACE_DIM >::save(), AbstractFileComparison::Setup(), CardiacElectroMechanicsProblem< DIM, ELEC_PROB_DIM >::Solve(), NodeBasedCellPopulation< DIM >::Update(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentPressureSolution(), AbstractContinuumMechanicsSolver< DIM >::WriteCurrentSpatialSolution(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), HeartGeometryInformation< SPACE_DIM >::WriteLayerForEachNode(), and AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile().
|
static |
Call at the start of a block of code that should be executed by each process in turn.
Note that this is not reliable for printing output to stdout in an ordered fashion, since on some systems each process may have a separate stdout buffer, and there's no way to force a flush to the underlying output stream. See e.g. http://stackoverflow.com/questions/5182045/openmpi-mpi-barrier-problems for more info.
Definition at line 150 of file PetscTools.cpp.
References Barrier(), and GetMyRank().
Referenced by ActivationOutputModifier::FinaliseAtEnd(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), ExecutableSupport::ShowParallelLaunching(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteGenericFileToMeshalyzer(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), and AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::WriteResultsToFiles().
|
inlinestaticprivate |
Private method makes sure that (if this is the first use within a test) then PETSc has been probed.
Definition at line 128 of file PetscTools.hpp.
References ResetCache().
Referenced by AmMaster(), AmTopMost(), Barrier(), GetMyRank(), GetNumProcs(), IsInitialised(), IsParallel(), IsSequential(), and ReplicateBool().
Create a vector of the specified size with all values set to be the given constant. SetFromOptions is called.
size | the size of the vector |
value | the value to set each entry |
Definition at line 254 of file PetscTools.cpp.
References CreateVec().
Referenced by OdeLinearSystemSolver::OdeLinearSystemSolver(), ParabolicBoxDomainPdeModifier< DIM >::SetupInitialSolutionVector(), NodeBasedCellPopulationWithBuskeUpdate< DIM >::UpdateNodeLocations(), ParabolicGrowingDomainPdeModifier< DIM >::UpdateSolutionVector(), and AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::VerifyJacobian().
|
static |
Create a vector of the specified size. SetFromOptions is called.
size | the size of the vector |
localSize | the local number of items owned by this process |
ignoreOffProcEntries | whether to ignore entries destined to be stored on a separate processor when assembling (eliminates global reductions). |
Definition at line 214 of file PetscTools.cpp.
Referenced by AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ComputeJacobianNumerically(), PseudoEcgCalculator< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ComputePseudoEcgAtOneTimeStep(), CreateAndSetVec(), DistributedVectorFactory::CreateVec(), CreateVec(), DistributedVectorFactory::DistributedVectorFactory(), PetscMatTools::GetMatrixRowDistributed(), LinearSystem::LinearSystem(), PCBlockDiagonal::PCBlockDiagonalCreate(), PCLDUFactorisation::PCLDUFactorisationCreate(), PCTwoLevelsBlockDiagonal::PCTwoLevelsBlockDiagonalCreate(), Hdf5DataWriter::PutStripedVector(), Hdf5DataWriter::PutVector(), CellVecData::serialize(), PetscVecTools::SetupInterleavedVectorScatterGather(), and VoltageInterpolaterOntoMechanicsMesh< DIM >::VoltageInterpolaterOntoMechanicsMesh().
Create a Vec from the given data.
data | some data |
Definition at line 234 of file PetscTools.cpp.
References CreateVec().
|
inlinestatic |
Destroy method Note that PETSc 3.1 and previous destroy based on a PETSc object but PETSc 3.2 and later destroy based on a pointer to a PETSc object
rVec | a reference to the PETSc object |
Definition at line 352 of file PetscTools.hpp.
Referenced by AbstractContinuumMechanicsSolver< DIM >::AllocateMatrixMemory(), AbstractContinuumMechanicsSolver< DIM >::ApplyDirichletBoundaryConditions(), BoundaryConditionsContainer< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ApplyDirichletToLinearProblem(), BidomainSolver< ELEMENT_DIM, SPACE_DIM >::BidomainSolver(), PetscMatTools::CheckEquality(), PetscMatTools::CheckSymmetry(), AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ComputeJacobianNumerically(), PseudoEcgCalculator< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::ComputePseudoEcgAtOneTimeStep(), DistributedVectorFactory::DistributedVectorFactory(), AbstractBidomainSolver< ELEMENT_DIM, SPACE_DIM >::FinaliseLinearSystem(), AbstractExtendedBidomainSolver< ELEMENT_DIM, SPACE_DIM >::FinaliseLinearSystem(), HasParMetis(), Hdf5ToTxtConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToTxtConverter(), Hdf5ToVtkConverter< ELEMENT_DIM, SPACE_DIM >::Hdf5ToVtkConverter(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Initialise(), AbstractLinearPdeSolver< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::InitialiseForSolve(), ExtendedBidomainProblem< DIM >::load(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, 2 >::load(), PCBlockDiagonal::PCBlockDiagonal(), PCBlockDiagonal::PCBlockDiagonalCreate(), PCLDUFactorisation::PCLDUFactorisation(), PCLDUFactorisation::PCLDUFactorisationCreate(), PCTwoLevelsBlockDiagonal::PCTwoLevelsBlockDiagonalCreate(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), Hdf5DataWriter::PutStripedVector(), Hdf5DataWriter::PutVector(), ParallelColumnDataWriter::PutVectorStripe(), ReadPetscObject(), ReplicatableVector::RemovePetscContext(), ReplicatableVector::Replicate(), MonodomainPurkinjeSolver< ELEMENT_DIM, SPACE_DIM >::SetIdentityBlockToLhsMatrix(), PetscVecTools::SetupInterleavedVectorScatterGather(), SimplePetscNonlinearSolver::Solve(), StokesFlowSolver< DIM >::Solve(), SimpleNewtonNonlinearSolver::Solve(), AbstractDynamicLinearPdeSolver< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Solve(), CardiacElectroMechanicsProblem< DIM, ELEC_PROB_DIM >::Solve(), LinearSystem::Solve(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Solve(), LinearParabolicPdeSystemWithCoupledOdeSystemSolver< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::SolveAndWriteResultsToFile(), OdeLinearSystemSolver::SolveOneTimeStep(), AbstractNonlinearElasticitySolver< DIM >::SolveSnes(), AbstractNonlinearElasticitySolver< DIM >::TakeNewtonStep(), ParabolicGrowingDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), EllipticGrowingDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), ParabolicBoxDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), EllipticBoxDomainPdeModifier< DIM >::UpdateAtEndOfTimeStep(), NodeBasedCellPopulationWithBuskeUpdate< DIM >::UpdateNodeLocations(), ParabolicGrowingDomainPdeModifier< DIM >::UpdateSolutionVector(), AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::VerifyJacobian(), VoltageInterpolaterOntoMechanicsMesh< DIM >::VoltageInterpolaterOntoMechanicsMesh(), Hdf5ToMeshalyzerConverter< ELEMENT_DIM, SPACE_DIM >::Write(), Hdf5ToCmguiConverter< ELEMENT_DIM, SPACE_DIM >::Write(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::WriteExtraVariablesOneStep(), ExtendedBidomainProblem< DIM >::WriteOneStep(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteOutputDataToHdf5(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::~AbstractCardiacProblem(), AbstractContinuumMechanicsSolver< DIM >::~AbstractContinuumMechanicsSolver(), AbstractPdeModifier< DIM >::~AbstractPdeModifier(), CellVecData::~CellVecData(), ExtendedBidomainSolver< ELEM_DIM, SPACE_DIM >::~ExtendedBidomainSolver(), Hdf5DataWriter::~Hdf5DataWriter(), LinearSystem::~LinearSystem(), MonodomainPurkinjeSolver< ELEMENT_DIM, SPACE_DIM >::~MonodomainPurkinjeSolver(), MonodomainSolver< ELEMENT_DIM, SPACE_DIM >::~MonodomainSolver(), OdeLinearSystemSolver::~OdeLinearSystemSolver(), OperatorSplittingMonodomainSolver< ELEMENT_DIM, SPACE_DIM >::~OperatorSplittingMonodomainSolver(), ParallelColumnDataWriter::~ParallelColumnDataWriter(), and PCTwoLevelsBlockDiagonal::~PCTwoLevelsBlockDiagonal().
|
inlinestatic |
Destroy method Note that PETSc 3.1 and previous destroy based on a PETSc object but PETSc 3.2 and later destroy based on a pointer to a PETSc object
rMat | a reference to the PETSc object |
Definition at line 368 of file PetscTools.hpp.
|
static |
Dumps a given PETSc object to disk.
rMat | a matrix |
rOutputFileFullPath | where to dump the matrix to disk |
Definition at line 333 of file PetscTools.cpp.
References PETSC_DESTROY_PARAM.
Referenced by LinearSystem::AddRhsMultipleValues(), ParabolicGrowingDomainPdeModifier< DIM >::serialize(), EllipticGrowingDomainPdeModifier< DIM >::serialize(), ParabolicBoxDomainPdeModifier< DIM >::serialize(), and EllipticBoxDomainPdeModifier< DIM >::serialize().
|
static |
Dumps a given PETSc object to disk.
rVec | a vector |
rOutputFileFullPath | where to dump the vector to disk |
Definition at line 347 of file PetscTools.cpp.
References PETSC_DESTROY_PARAM.
|
static |
Call at the end of a block of code that should be executed by each process in turn.
Definition at line 160 of file PetscTools.cpp.
References Barrier(), GetMyRank(), and GetNumProcs().
Referenced by ActivationOutputModifier::FinaliseAtEnd(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), ExecutableSupport::ShowParallelLaunching(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), PostProcessingWriter< ELEMENT_DIM, SPACE_DIM >::WriteGenericFileToMeshalyzer(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), and AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::WriteResultsToFiles().
|
static |
If PETSc has not been initialized, returns 0.
Definition at line 114 of file PetscTools.cpp.
References CheckCache(), and mRank.
Referenced by VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::AddPointData(), ArchiveOpener< Archive, Stream >::ArchiveOpener(), CompressibleNonlinearElasticitySolver< DIM >::AssembleSystem(), IncompressibleNonlinearElasticitySolver< DIM >::AssembleSystem(), CellId::AssignCellId(), Barrier(), BeginRoundRobin(), NodesOnlyMesh< SPACE_DIM >::CalculateNodesOutsideLocalDomain(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructFromMeshReader(), AbstractContinuumMechanicsAssembler< DIM, CAN_ASSEMBLE_VECTOR, CAN_ASSEMBLE_MATRIX >::DoAssemble(), EndRoundRobin(), ActivationOutputModifier::FinaliseAtEnd(), FormDebugHead(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::GeometricPartitioning(), MixedDimensionMesh< ELEMENT_DIM, SPACE_DIM >::GetCableElement(), AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::GetCardiacCellOrHaloCell(), NodesOnlyMesh< SPACE_DIM >::GetMaximumNodeIndex(), NodesOnlyMesh< SPACE_DIM >::GetNextAvailableIndex(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::GetNodeOrHaloNode(), DistributedBoxCollection< DIM >::GetProcessOwningNode(), DistributedBoxCollection< DIM >::LoadBalance(), CardiacSimulationArchiver< PROBLEM_CLASS >::Migrate(), NodeBasedCellPopulation< DIM >::NonBlockingSendCellsToNeighbourProcesses(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ParMetisLibraryNodeAndElementPartitioning(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), Timer::Print(), ExecutableSupport::PrintError(), TetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ReadNodesPerProcessorFile(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), NodeBasedCellPopulation< DIM >::SendCellsToNeighbourProcesses(), DistributedBoxCollection< SPACE_DIM >::serialize(), ExecutableSupport::ShowParallelLaunching(), StokesFlowSolver< DIM >::Solve(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::SolveBoundaryElementMapping(), AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::SolveCellSystems(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::SolveElementMapping(), NodesOnlyMesh< SPACE_DIM >::SolveNodeMapping(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::SolveNodeMapping(), AbstractNonlinearElasticitySolver< DIM >::TakeNewtonStep(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::UpdateQueueFromRemote(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), ExecutableSupport::WriteMachineInfoFile(), ExecutableSupport::WriteProvenanceInfoFile(), NodeBasedCellPopulationWithParticles< DIM >::WriteVtkResultsToFile(), and NodeBasedCellPopulation< DIM >::WriteVtkResultsToFile().
|
static |
Definition at line 108 of file PetscTools.cpp.
References CheckCache(), and mNumProcessors.
Referenced by VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::AddPointData(), CellId::AssignCellId(), AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::CalculateHaloNodesFromNodeExchange(), AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::CalculateNodeExchange(), DistributedVectorFactory::CalculateOwnership(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::ComputeDistanceMap(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::DistanceMapCalculator(), DistributedBoxCollection< DIM >::DistributedBoxCollection(), EndRoundRobin(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::GeometricPartitioning(), NodesOnlyMesh< SPACE_DIM >::GetMaximumNodeIndex(), NodesOnlyMesh< SPACE_DIM >::GetNextAvailableIndex(), CardiacSimulationArchiver< PROBLEM_CLASS >::Migrate(), ObjectCommunicator< CLASS >::ObjectCommunicator(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ParMetisLibraryNodeAndElementPartitioning(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), TetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ReadNodesPerProcessorFile(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), DistributedVectorFactory::rGetGlobalLows(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), DistributedQuadraticMesh< DIM >::serialize(), DistributedVectorFactory::serialize(), DistributedBoxCollection< SPACE_DIM >::serialize(), AbstractMesh< ELEMENT_DIM, SPACE_DIM >::SetDistributedVectorFactory(), DistributedVectorFactory::SetOriginalFactory(), ExecutableSupport::ShowParallelLaunching(), AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::SolveCellSystems(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::UpdateQueueFromRemote(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::WorkOnLocalQueue(), XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingParallelMesh(), and ExecutableSupport::WriteMachineInfoFile().
|
static |
Get the MPI Communicator representing the whole set of running processes. This will normally be PETSC_COMM_WORLD, unless IsolateProcesses has been called, in which case it will be PETSC_COMM_SELF.
Definition at line 174 of file PetscTools.cpp.
References mIsolateProcesses.
Referenced by AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::GetCellCyclePhaseCount(), AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::GetCellMutationStateCount(), AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::GetCellProliferativeTypeCount(), NodeBasedCellPopulation< DIM >::GetSizeOfCellPopulation(), NodesOnlyMesh< SPACE_DIM >::GetWidth(), ObjectCommunicator< CLASS >::IRecvObject(), NodesOnlyMesh< SPACE_DIM >::IsANodeCloseToDomainBoundary(), ObjectCommunicator< CLASS >::ISendObject(), ObjectCommunicator< CLASS >::RecvObject(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), ObjectCommunicator< CLASS >::SendObject(), and ObjectCommunicator< CLASS >::SendRecvObject().
|
static |
Checks if PETSc has been configured with ParMetis partioning support.
Definition at line 445 of file PetscTools.cpp.
References Destroy(), PETSC_DESTROY_PARAM, and SetupMat().
Referenced by DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ComputeMeshPartitioning(), and NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning().
|
static |
Definition at line 85 of file PetscTools.cpp.
References CheckCache(), and mPetscIsInitialised.
Referenced by Citations::Register().
|
static |
Definition at line 103 of file PetscTools.cpp.
References mIsolateProcesses.
Referenced by GenericEventHandler< 11, MeshEventHandler >::ReportImpl().
|
static |
Where work can be split between isolated processes, it would be nice to be able to do so easily without worrying about collective calls made inside classes such as OutputFileHandler leading to deadlock. This method attempts to enable this behaviour. If the flag is set then AmMaster and AmTopMost always return true, Barrier becomes a no-op, and ReplicateBool doesn't replicate.
isolate | whether to consider processes as isolated |
Definition at line 169 of file PetscTools.cpp.
References mIsolateProcesses.
|
static |
Definition at line 97 of file PetscTools.cpp.
References CheckCache(), and mNumProcessors.
Referenced by DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ComputeMeshPartitioning(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructFromMeshReader(), FormDebugHead(), AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::GetCellCyclePhaseCount(), AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::GetCellMutationStateCount(), AbstractCellPopulation< ELEMENT_DIM, SPACE_DIM >::GetCellProliferativeTypeCount(), AbstractMesh< ELEMENT_DIM, SPACE_DIM >::GetDistributedVectorFactory(), GenericEventHandler< 11, MeshEventHandler >::HeadingsImpl(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Initialise(), DistributedBoxCollection< DIM >::IsHaloBox(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ParMetisLibraryNodeAndElementPartitioning(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), Timer::Print(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ReorderNodes(), GenericEventHandler< 11, MeshEventHandler >::ReportImpl(), CellId::serialize(), ExecutableSupport::ShowParallelLaunching(), and XdmfMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh().
|
static |
Definition at line 91 of file PetscTools.cpp.
References CheckCache(), mIsolateProcesses, and mNumProcessors.
Referenced by AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::AbstractCardiacTissue(), AbstractContinuumMechanicsSolver< DIM >::AllocateMatrixMemory(), CylindricalHoneycombMeshGenerator::CylindricalHoneycombMeshGenerator(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::DistanceMapCalculator(), NodeBasedCellPopulation< DIM >::GetTetrahedralMeshForPdeModifier(), VertexBasedCellPopulation< DIM >::GetTetrahedralMeshForPdeModifier(), HoneycombMeshGenerator::HoneycombMeshGenerator(), DistributedBoxCollection< DIM >::IsInteriorBox(), NodeBasedCellPopulationWithParticles< DIM >::NodeBasedCellPopulationWithParticles(), PCTwoLevelsBlockDiagonal::PCTwoLevelsBlockDiagonalCreate(), AbstractNonlinearElasticitySolver< DIM >::SetKspSolverAndPcType(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::SetParallelFiles(), SetupMat(), FineCoarseMeshPair< DIM >::ShareCoarseElementData(), FineCoarseMeshPair< DIM >::ShareFineElementData(), VtkMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteFilesUsingMesh(), AbstractTetrahedralMeshWriter< ELEMENT_DIM, SPACE_DIM >::WriteNclFile(), NodeBasedCellPopulationWithParticles< DIM >::WriteVtkResultsToFile(), and NodeBasedCellPopulation< DIM >::WriteVtkResultsToFile().
|
static |
Read a previously dumped PETSc object from disk.
rMat | a matrix |
rOutputFileFullPath | where to read the matrix from |
rParallelLayout | If provided, rMat will have the same parallel layout. Its content is irrelevant. |
Definition at line 361 of file PetscTools.cpp.
References Destroy(), PETSC_DESTROY_PARAM, and SetupMat().
Referenced by LinearSystem::AddRhsMultipleValues(), ParabolicGrowingDomainPdeModifier< DIM >::serialize(), EllipticGrowingDomainPdeModifier< DIM >::serialize(), ParabolicBoxDomainPdeModifier< DIM >::serialize(), and EllipticBoxDomainPdeModifier< DIM >::serialize().
|
static |
Read a previously dumped PETSc object from disk.
rVec | a vector |
rOutputFileFullPath | where to read the matrix from |
rParallelLayout | If provided, rMat will have the same parallel layout. Its content is irrelevant. |
Definition at line 412 of file PetscTools.cpp.
References PETSC_DESTROY_PARAM.
Boolean OR of flags between processes.
flag | is set to true on this process. |
Definition at line 186 of file PetscTools.cpp.
References CheckCache(), mIsolateProcesses, and mPetscIsInitialised.
Referenced by AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::AbstractCardiacTissue(), BidomainProblem< DIM >::AnalyseMeshForBath(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::ComputeDistanceMap(), BoundaryConditionsContainer< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::DefineConstantDirichletOnMeshBoundary(), AbstractBoundaryConditionsContainer< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::HasDirichletBoundaryConditions(), ReplicateException(), CardiacSimulationArchiver< PROBLEM_CLASS >::Save(), DistanceMapCalculator< ELEMENT_DIM, SPACE_DIM >::UpdateQueueFromRemote(), and Hdf5DataWriter::WriteCache().
|
static |
Ensure exceptions are handled cleanly in parallel code, by causing all processes to throw if any one does.
flag | is set to true if this process has thrown. |
Definition at line 198 of file PetscTools.cpp.
References EXCEPTION, and ReplicateBool().
Referenced by AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::AbstractCardiacTissue(), AbstractFunctionalCalculator< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Calculate(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::CalculateBoundingBox(), DistributedTetrahedralMesh< ELEMENT_DIM, SPACE_DIM >::ConstructFromMeshReader(), CellMLToSharedLibraryConverter::ConvertCellmlToSo(), BidomainTissue< SPACE_DIM >::CreateExtracellularConductivityTensors(), ExtendedBidomainTissue< SPACE_DIM >::CreateExtracellularConductivityTensors(), ExtendedBidomainTissue< SPACE_DIM >::CreateGGapConductivities(), AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::CreateIntracellularConductivityTensor(), ExtendedBidomainTissue< SPACE_DIM >::CreateIntracellularConductivityTensorSecondCell(), ExtendedBidomainTissue< SPACE_DIM >::ExtendedBidomainTissue(), ReplicatableVector::Resize(), ElectrodesStimulusFactory< DIM >::SetCompatibleExtracellularStimulus(), AbstractCardiacProblem< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::Solve(), ExtendedBidomainTissue< SPACE_DIM >::SolveCellSystems(), and AbstractCardiacTissue< ELEMENT_DIM, SPACE_DIM >::SolveCellSystems().
|
static |
Reset our cached values: whether PETSc is initialised, how many processors there are, and which one we are.
Definition at line 58 of file PetscTools.cpp.
References mNumProcessors, mPetscIsInitialised, and mRank.
Referenced by CheckCache(), PetscSetupUtils::InitialisePetsc(), and PetscSetupUtils::ResetStatusCache().
|
inlinestatic |
Set a PETSc option. This is a wrapper for PetscOptionsSetValue, which changed signature in PETSc 3.7.
pOptionName | the option name |
pOptionValue | the option value |
Definition at line 384 of file PetscTools.hpp.
Referenced by PCBlockDiagonal::PCBlockDiagonalSetUp(), PCLDUFactorisation::PCLDUFactorisationSetUp(), PCTwoLevelsBlockDiagonal::PCTwoLevelsBlockDiagonalSetUp(), LinearSystem::ResetKspSolver(), AbstractNonlinearElasticitySolver< DIM >::SetKspSolverAndPcType(), LinearSystem::Solve(), and AbstractNonlinearElasticitySolver< DIM >::SolveSnes().
|
static |
Set up a matrix - set the size using the given parameters. The number of local rows and columns is by default PETSC_DECIDE. SetFromOptions is called.
rMat | the matrix |
numRows | the number of rows in the matrix |
numColumns | the number of columns in the matrix |
rowPreallocation | the max number of nonzero entries expected on a row A value of 0 is allowed: no preallocation is then done and the user must preallocate the memory for the matrix themselves. |
numLocalRows | the number of local rows (defaults to PETSC_DECIDE) |
numLocalColumns | the number of local columns (defaults to PETSC_DECIDE) |
ignoreOffProcEntries | tells PETSc to drop off-processor entries |
newAllocationError | tells PETSc whether to set the MAT_NEW_NONZERO_ALLOCATION_ERR. ** currently only used in PETSc 3.3 and later ** in PETSc 3.2 and earlier MAT_NEW_NONZERO_ALLOCATION_ERR defaults to false in PETSc 3.3 MAT_NEW_NONZERO_ALLOCATION_ERR defaults to true |
Definition at line 268 of file PetscTools.cpp.
References IsSequential().
Referenced by AbstractContinuumMechanicsSolver< DIM >::AllocateMatrixMemory(), Hdf5DataWriter::ApplyPermutation(), Hdf5DataWriter::DefineFixedDimensionUsingMatrix(), HasParMetis(), ExtendedBidomainSolver< ELEM_DIM, SPACE_DIM >::InitialiseForSolve(), BidomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), OperatorSplittingMonodomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), MonodomainSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), MonodomainPurkinjeSolver< ELEMENT_DIM, SPACE_DIM >::InitialiseForSolve(), LinearSystem::LinearSystem(), NodePartitioner< ELEMENT_DIM, SPACE_DIM >::PetscMatrixPartitioning(), ReadPetscObject(), LinearSystem::SetPrecondMatrixIsDifferentFromLhs(), SimplePetscNonlinearSolver::Solve(), and AbstractNonlinearAssemblerSolverHybrid< ELEMENT_DIM, SPACE_DIM, PROBLEM_DIM >::VerifyJacobian().
|
static |
As a convention, we consider processor 0 the master process.
Definition at line 141 of file PetscTools.hpp.
Referenced by AmMaster().
|
staticprivate |
Whether to pretend that we're just running many master processes independently.
Definition at line 125 of file PetscTools.hpp.
Referenced by AmMaster(), AmTopMost(), Barrier(), GetWorld(), IsIsolated(), IsolateProcesses(), IsSequential(), and ReplicateBool().
|
staticprivate |
The total number of processors.
Definition at line 119 of file PetscTools.hpp.
Referenced by AmTopMost(), GetNumProcs(), IsParallel(), IsSequential(), and ResetCache().
|
staticprivate |
Whether PETSc has been initialised.
Definition at line 116 of file PetscTools.hpp.
Referenced by Barrier(), IsInitialised(), ReplicateBool(), and ResetCache().
|
staticprivate |
Which processors we are.
Definition at line 122 of file PetscTools.hpp.
Referenced by AmMaster(), AmTopMost(), GetMyRank(), and ResetCache().