Chaste Commit::55a4425755baea6ba62bb0acadb0aebd7f12fcd8
PetscTools.cpp
1/*
2
3Copyright (c) 2005-2025, University of Oxford.
4All rights reserved.
5
6University of Oxford means the Chancellor, Masters and Scholars of the
7University of Oxford, having an administrative office at Wellington
8Square, Oxford OX1 2JD, UK.
9
10This file is part of Chaste.
11
12Redistribution and use in source and binary forms, with or without
13modification, are permitted provided that the following conditions are met:
14 * Redistributions of source code must retain the above copyright notice,
15 this list of conditions and the following disclaimer.
16 * Redistributions in binary form must reproduce the above copyright notice,
17 this list of conditions and the following disclaimer in the documentation
18 and/or other materials provided with the distribution.
19 * Neither the name of the University of Oxford nor the names of its
20 contributors may be used to endorse or promote products derived from this
21 software without specific prior written permission.
22
23THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33
34*/
35
36#include "PetscTools.hpp"
37#include <cassert>
38#include <cstring> // For strcmp etc. Needed in gcc-4.3
39#include <iostream>
40#include <sstream>
41#include "Exception.hpp"
42#include "Warnings.hpp"
43
46unsigned PetscTools::mRank = 0;
48
49#ifndef NDEBUG
50// Uncomment this to trace calls to PetscTools::Barrier
51//#define DEBUG_BARRIERS
52#endif
53
54#ifdef DEBUG_BARRIERS
55static unsigned mNumBarriers = 0u;
56#endif
57
59{
60 PetscBool is_there;
61 PetscInitialized(&is_there);
62 if (is_there)
63 {
65
66 PetscInt num_procs;
67 MPI_Comm_size(PETSC_COMM_WORLD, &num_procs);
68 mNumProcessors = (unsigned)num_procs;
69
70 PetscInt my_rank;
71 MPI_Comm_rank(PETSC_COMM_WORLD, &my_rank);
72 mRank = (unsigned)my_rank;
73 }
74 else
75 {
76 // No PETSc
77 mPetscIsInitialised = false;
79 mRank = 0;
80 }
81}
82
83// Information methods
84
90
92{
93 CheckCache();
94 return (mIsolateProcesses || mNumProcessors == 1);
95}
96
98{
99 CheckCache();
100 return (mNumProcessors > 1);
101}
102
104{
105 return mIsolateProcesses;
106}
107
109{
110 CheckCache();
111 return mNumProcessors;
112}
113
115{
116 CheckCache();
117 return mRank;
118}
119
121{
122 CheckCache();
123 return (mRank == MASTER_RANK || mIsolateProcesses);
124}
125
127{
128 CheckCache();
129 return (mRank == mNumProcessors - 1 || mIsolateProcesses);
130}
131
132// Little utility methods
133
134void PetscTools::Barrier(const std::string callerId)
135{
136 CheckCache();
138 {
139#ifdef DEBUG_BARRIERS
140 // "Before" is alphabetically before "Post" so that one can sort the output on process/event/barrier
141 std::cout << "DEBUG: proc " << PetscTools::GetMyRank() << ": Before "
142 << "Barrier " << mNumBarriers << " \"" << callerId << "\"." << std::endl
143 << std::flush;
144#endif
145 PetscBarrier(CHASTE_PETSC_NULLPTR);
146#ifdef DEBUG_BARRIERS
147 std::cout << "DEBUG: proc " << PetscTools::GetMyRank() << ": Post "
148 << "Barrier " << mNumBarriers++ << " \"" << callerId << "\"." << std::endl
149 << std::flush;
150#endif
151 }
152}
153
155{
156 Barrier("PetscTools::RoundRobin"); // We want barriers both before all and after all, just in case
157 const unsigned my_rank = GetMyRank();
158 for (unsigned turn = 0; turn < my_rank; turn++)
159 {
160 Barrier("PetscTools::RoundRobin");
161 }
162}
163
165{
166 const unsigned num_procs = GetNumProcs();
167 for (unsigned turn = GetMyRank(); turn < num_procs; turn++)
168 {
169 Barrier("PetscTools::RoundRobin");
170 }
171}
172
174{
175 mIsolateProcesses = isolate;
176}
177
179{
181 {
182 return PETSC_COMM_SELF;
183 }
184 else
185 {
186 return PETSC_COMM_WORLD;
187 }
188}
189
191{
192 CheckCache();
193 unsigned my_flag = (unsigned)flag;
194 unsigned anyones_flag_is_true = my_flag;
196 {
197 MPI_Allreduce(&my_flag, &anyones_flag_is_true, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
198 }
199 return (anyones_flag_is_true == 1);
200}
201
203{
204 bool anyones_error = ReplicateBool(flag);
205 if (flag)
206 {
207 // Return control to exception thrower
208 return;
209 }
210 if (anyones_error)
211 {
212 EXCEPTION("Another process threw an exception; bailing out.");
213 }
214}
215
216// Vector & matrix creation routines
217
218Vec PetscTools::CreateVec(int size, int localSize, bool ignoreOffProcEntries)
219{
220 assert(size >= 0); // There is one test where we create a zero-sized vector
221 Vec ret;
222 VecCreate(PETSC_COMM_WORLD, &ret);
223 VecSetSizes(ret, localSize, size); // localSize usually defaults to PETSC_DECIDE
224 VecSetFromOptions(ret);
225
226 if (ignoreOffProcEntries)
227 {
228#if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
229 VecSetOption(ret, VEC_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE);
230#else
231 VecSetOption(ret, VEC_IGNORE_OFF_PROC_ENTRIES);
232#endif
233 }
234
235 return ret;
236}
237
238Vec PetscTools::CreateVec(std::vector<double> data)
239{
240 assert(data.size() > 0);
241 Vec ret = CreateVec(data.size());
242
243 double* p_ret;
244 VecGetArray(ret, &p_ret);
245 int lo, hi;
246 VecGetOwnershipRange(ret, &lo, &hi);
247
248 for (int global_index = lo; global_index < hi; global_index++)
249 {
250 int local_index = global_index - lo;
251 p_ret[local_index] = data[global_index];
252 }
253 VecRestoreArray(ret, &p_ret);
254
255 return ret;
256}
257
258Vec PetscTools::CreateAndSetVec(int size, double value)
259{
260 assert(size > 0);
261 Vec ret = CreateVec(size);
262
263#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
264 VecSet(&value, ret);
265#else
266 VecSet(ret, value);
267#endif
268
269 return ret;
270}
271
272void PetscTools::SetupMat(Mat& rMat, int numRows, int numColumns,
273 unsigned rowPreallocation,
274 int numLocalRows,
275 int numLocalColumns,
276 bool ignoreOffProcEntries,
277 bool newAllocationError)
278{
279 assert(numRows > 0);
280 assert(numColumns > 0);
281 if ((int)rowPreallocation > numColumns)
282 {
283 WARNING("Preallocation failure: requested number of nonzeros per row greater than number of columns"); //+rowPreallocation+">"+numColumns);
284 rowPreallocation = numColumns;
285 }
286
287#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
288 MatCreate(PETSC_COMM_WORLD, numLocalRows, numLocalColumns, numRows, numColumns, &rMat);
289#else //New API
290 MatCreate(PETSC_COMM_WORLD, &rMat);
291 MatSetSizes(rMat, numLocalRows, numLocalColumns, numRows, numColumns);
292#endif
293
295 {
296 MatSetType(rMat, MATSEQAIJ);
297 if (rowPreallocation > 0)
298 {
299 MatSeqAIJSetPreallocation(rMat, rowPreallocation, CHASTE_PETSC_NULLPTR);
300 }
301 }
302 else
303 {
304 MatSetType(rMat, MATMPIAIJ);
305 if (rowPreallocation > 0)
306 {
307 MatMPIAIJSetPreallocation(rMat, rowPreallocation, CHASTE_PETSC_NULLPTR, rowPreallocation, CHASTE_PETSC_NULLPTR);
308 }
309 }
310
311 MatSetFromOptions(rMat);
312
313 if (ignoreOffProcEntries) //&& IsParallel())
314 {
315 if (rowPreallocation == 0)
316 {
317 // We aren't allowed to do non-zero allocation after setting MAT_IGNORE_OFF_PROC_ENTRIES
318 WARNING("Ignoring MAT_IGNORE_OFF_PROC_ENTRIES flag because we might set non-zeroes later");
319 }
320 else
321 {
322#if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
323 MatSetOption(rMat, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE);
324#else
325 MatSetOption(rMat, MAT_IGNORE_OFF_PROC_ENTRIES);
326#endif
327 }
328 }
329#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 3) //PETSc 3.3 or later
330 if (newAllocationError == false)
331 {
332 MatSetOption(rMat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
333 }
334#endif
335}
336
337void PetscTools::DumpPetscObject(const Mat& rMat, const std::string& rOutputFileFullPath)
338{
339 PetscViewer view;
340#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
341 PetscViewerFileType type = PETSC_FILE_CREATE;
342#else
343 PetscFileMode type = FILE_MODE_WRITE;
344#endif
345
346 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(), type, &view);
347 MatView(rMat, view);
348 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
349}
350
351void PetscTools::DumpPetscObject(const Vec& rVec, const std::string& rOutputFileFullPath)
352{
353 PetscViewer view;
354#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
355 PetscViewerFileType type = PETSC_FILE_CREATE;
356#else
357 PetscFileMode type = FILE_MODE_WRITE;
358#endif
359
360 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(), type, &view);
361 VecView(rVec, view);
362 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
363}
364
365void PetscTools::ReadPetscObject(Mat& rMat, const std::string& rOutputFileFullPath, Vec rParallelLayout)
366{
367
368#if PETSC_VERSION_GE(3, 11, 2) // PETSc 3.11.2 or newer
369 // All modern versions of PETSc allow us to directly read into a non-standard parallel layout
370 PetscViewer view;
371 PetscFileMode type = FILE_MODE_READ;
372 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
373 type, &view);
374
375 if (rParallelLayout != nullptr)
376 {
377 // Create an empty matrix with non-default parallel layout
378 PetscInt num_rows, num_local_rows;
379 VecGetSize(rParallelLayout, &num_rows);
380 VecGetLocalSize(rParallelLayout, &num_local_rows);
382 PetscTools::SetupMat(rMat, num_rows, num_rows, 100, num_local_rows, num_local_rows, false);
383
384 }
385 else
386 {
387 // Create an empty matrix with default parallel layout
388 MatCreate(PETSC_COMM_WORLD, &rMat);
389 MatSetType(rMat, MATMPIAIJ);
390 }
391
392 // Read into parallel matrix
393 MatLoad(rMat, view);
394 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
395#else
396 // Code below is for unsupported versions of PETSc
397 /*
398 * PETSc (as of 3.1) doesn't provide any method for loading a Mat object with a user-defined parallel
399 * layout, i.e. there's no equivalent to VecLoadIntoVector for Mat's.
400 *
401 * It seems to be in their future plans though: http://lists.mcs.anl.gov/pipermail/petsc-users/2010-March/006062.html
402 */
403
404 PetscViewer view;
405#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
406 PetscViewerFileType type = PETSC_FILE_RDONLY;
407#else
408 PetscFileMode type = FILE_MODE_READ;
409#endif
410
411 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
412 type, &view);
413
414#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
415 MatCreate(PETSC_COMM_WORLD, &rMat);
416 MatSetType(rMat, MATMPIAIJ);
417 MatLoad(rMat, view);
418#else
419 MatLoad(view, MATMPIAIJ, &rMat);
420#endif
421
422 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
423
424 if (rParallelLayout != nullptr)
425 {
426 /*
427 * The idea is to copy rMat into a matrix that has the appropriate
428 * parallel layout. Inefficient...
429 */
430 PetscInt num_rows, num_local_rows;
431
432 VecGetSize(rParallelLayout, &num_rows);
433 VecGetLocalSize(rParallelLayout, &num_local_rows);
434
435 Mat temp_mat;
437 PetscTools::SetupMat(temp_mat, num_rows, num_rows, 100, num_local_rows, num_local_rows, false);
438
439 MatCopy(rMat, temp_mat, DIFFERENT_NONZERO_PATTERN);
440
442 rMat = temp_mat;
443 }
444#endif
445}
446
447void PetscTools::ReadPetscObject(Vec& rVec, const std::string& rOutputFileFullPath, Vec rParallelLayout)
448{
449 PetscViewer view;
450#if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
451 PetscViewerFileType type = PETSC_FILE_RDONLY;
452#else
453 PetscFileMode type = FILE_MODE_READ;
454#endif
455
456 PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
457 type, &view);
458 if (rParallelLayout == nullptr)
459 {
460#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
461 VecCreate(PETSC_COMM_WORLD, &rVec);
462 VecSetType(rVec, VECMPI);
463 VecLoad(rVec, view);
464#else
465 VecLoad(view, VECMPI, &rVec);
466#endif
467 }
468 else
469 {
470 VecDuplicate(rParallelLayout, &rVec);
471#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
472 VecLoad(rVec, view);
473#else
474 VecLoadIntoVector(view, rVec);
475#endif
476 }
477 PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
478}
479
481{
482#ifdef __INTEL_COMPILER
483 //Old versions of the intel compiler can result in a PETSC ERROR and the program aborting if parmetis is checked for before
484 //some other calls to Petsc are made. This nasty hack ensures that the HasParMetis method always works on Intel
485 Mat mat;
486 PetscTools::SetupMat(mat, 1, 1, 1);
488#endif
489
490 MatPartitioning part;
491 MatPartitioningCreate(PETSC_COMM_WORLD, &part);
492
493 // We are expecting an error from PETSC on systems that don't have the interface, so suppress it
494 // in case it aborts
495 PetscPushErrorHandler(PetscIgnoreErrorHandler, nullptr);
496
497#if (PETSC_VERSION_MAJOR == 2 || (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR < 2))
498 PetscErrorCode parmetis_installed_error = MatPartitioningSetType(part, MAT_PARTITIONING_PARMETIS);
499#else
500 PetscErrorCode parmetis_installed_error = MatPartitioningSetType(part, MATPARTITIONINGPARMETIS);
501#endif
502
503 // Stop supressing error
504 PetscPopErrorHandler();
505
506 // Note that this method probably leaks memory inside PETSc because if MatPartitioningCreate fails
507 // then there isn't a proper handle to destroy.
508 MatPartitioningDestroy(PETSC_DESTROY_PARAM(part));
509
510 // Get out of jail free card for Windows where the latest configuration of the integration machine shows that our implementation doesn't work as expected.
511#ifdef _MSC_VER
512 //\todo #2016 (or similar ticket). The method NodePartitioner::PetscMatrixPartitioning is not working in parallel
513 if (parmetis_installed_error == 0)
514 {
515 WARN_ONCE_ONLY("The PETSc/parMETIS interface is correctly installed but does not yet work in Windows so matrix-based partitioning will be turned off.");
516 }
517 return false;
518#endif
519
520 return (parmetis_installed_error == 0);
521}
#define EXCEPTION(message)
#define CHASTE_PETSC_NULLPTR
A macro to define PETSc null pointer based on the PETSc version.
#define PETSC_DESTROY_PARAM(x)
PetscTruth PetscBool
static Vec CreateAndSetVec(int size, double value)
static void Destroy(Vec &rVec)
static bool ReplicateBool(bool flag)
static void DumpPetscObject(const Mat &rMat, const std::string &rOutputFileFullPath)
static MPI_Comm GetWorld()
static bool AmMaster()
static unsigned mNumProcessors
static void IsolateProcesses(bool isolate=true)
static bool AmTopMost()
static void ReadPetscObject(Mat &rMat, const std::string &rOutputFileFullPath, Vec rParallelLayout=nullptr)
static unsigned mRank
static bool mIsolateProcesses
static void Barrier(const std::string callerId="")
static bool IsParallel()
static bool IsSequential()
static bool mPetscIsInitialised
static void EndRoundRobin()
static Vec CreateVec(int size, int localSize=PETSC_DECIDE, bool ignoreOffProcEntries=true)
static bool IsIsolated()
static unsigned GetMyRank()
static void ReplicateException(bool flag)
static bool IsInitialised()
static void BeginRoundRobin()
static const unsigned MASTER_RANK
static void SetupMat(Mat &rMat, int numRows, int numColumns, unsigned rowPreallocation, int numLocalRows=PETSC_DECIDE, int numLocalColumns=PETSC_DECIDE, bool ignoreOffProcEntries=true, bool newAllocationError=true)
static void ResetCache()
static unsigned GetNumProcs()
static bool HasParMetis()
static void CheckCache()