PetscTools.cpp

00001 /*
00002 
00003 Copyright (c) 2005-2015, University of Oxford.
00004 All rights reserved.
00005 
00006 University of Oxford means the Chancellor, Masters and Scholars of the
00007 University of Oxford, having an administrative office at Wellington
00008 Square, Oxford OX1 2JD, UK.
00009 
00010 This file is part of Chaste.
00011 
00012 Redistribution and use in source and binary forms, with or without
00013 modification, are permitted provided that the following conditions are met:
00014  * Redistributions of source code must retain the above copyright notice,
00015    this list of conditions and the following disclaimer.
00016  * Redistributions in binary form must reproduce the above copyright notice,
00017    this list of conditions and the following disclaimer in the documentation
00018    and/or other materials provided with the distribution.
00019  * Neither the name of the University of Oxford nor the names of its
00020    contributors may be used to endorse or promote products derived from this
00021    software without specific prior written permission.
00022 
00023 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00024 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00025 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00026 ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
00027 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00028 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
00029 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
00030 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00031 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
00032 OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00033 
00034 */
00035 
00036 #include "PetscTools.hpp"
00037 #include "Exception.hpp"
00038 #include "Warnings.hpp"
00039 #include <iostream>
00040 #include <sstream>
00041 #include <cassert>
00042 #include <cstring> // For strcmp etc. Needed in gcc-4.3
00043 
00044 bool PetscTools::mPetscIsInitialised = false;
00045 unsigned PetscTools::mNumProcessors = 0;
00046 unsigned PetscTools::mRank = 0;
00047 bool PetscTools::mIsolateProcesses = false;
00048 
00049 #ifndef NDEBUG
00050 // Uncomment this to trace calls to PetscTools::Barrier
00051 //#define DEBUG_BARRIERS
00052 #endif
00053 
00054 #ifdef DEBUG_BARRIERS
00055 static unsigned mNumBarriers = 0u;
00056 #endif
00057 
00058 void PetscTools::ResetCache()
00059 {
00060     PetscBool is_there;
00061     PetscInitialized(&is_there);
00062     if (is_there)
00063     {
00064         mPetscIsInitialised = true;
00065 
00066         PetscInt num_procs;
00067         MPI_Comm_size(PETSC_COMM_WORLD, &num_procs);
00068         mNumProcessors = (unsigned) num_procs;
00069 
00070         PetscInt my_rank;
00071         MPI_Comm_rank(PETSC_COMM_WORLD, &my_rank);
00072         mRank = (unsigned) my_rank;
00073     }
00074     else
00075     {
00076         // No PETSc
00077         mPetscIsInitialised = false;
00078         mNumProcessors = 1;
00079         mRank = 0;
00080     }
00081 }
00082 
00083 // Information methods
00084 
00085 bool PetscTools::IsInitialised()
00086 {
00087     CheckCache();
00088     return mPetscIsInitialised;
00089 }
00090 
00091 bool PetscTools::IsSequential()
00092 {
00093     CheckCache();
00094     return (mIsolateProcesses || mNumProcessors == 1);
00095 }
00096 
00097 bool PetscTools::IsParallel()
00098 {
00099     CheckCache();
00100     return (mNumProcessors > 1);
00101 }
00102 
00103 bool PetscTools::IsIsolated()
00104 {
00105     return mIsolateProcesses;
00106 }
00107 
00108 unsigned PetscTools::GetNumProcs()
00109 {
00110     CheckCache();
00111     return mNumProcessors;
00112 }
00113 
00114 unsigned PetscTools::GetMyRank()
00115 {
00116     CheckCache();
00117     return mRank;
00118 }
00119 
00120 bool PetscTools::AmMaster()
00121 {
00122     CheckCache();
00123     return (mRank == MASTER_RANK || mIsolateProcesses);
00124 }
00125 
00126 bool PetscTools::AmTopMost()
00127 {
00128     CheckCache();
00129     return (mRank == mNumProcessors - 1 || mIsolateProcesses);
00130 }
00131 
00132 // Little utility methods
00133 
00134 void PetscTools::Barrier(const std::string callerId)
00135 {
00136     CheckCache();
00137     if (mPetscIsInitialised && !mIsolateProcesses)
00138     {
00139 #ifdef DEBUG_BARRIERS
00140         std::cout << "DEBUG: proc " << PetscTools::GetMyRank() << ": Pre " << callerId << " Barrier " << mNumBarriers << "." << std::endl << std::flush;
00141 #endif
00142         PetscBarrier(PETSC_NULL);
00143 #ifdef DEBUG_BARRIERS
00144         std::cout << "DEBUG: proc " << PetscTools::GetMyRank() << ": Post " << callerId << " Barrier " << mNumBarriers++ << "." << std::endl << std::flush;
00145 #endif
00146     }
00147 }
00148 
00149 void PetscTools::BeginRoundRobin()
00150 {
00151     Barrier("PetscTools::RoundRobin"); // We want barriers both before all and after all, just in case
00152     const unsigned my_rank = GetMyRank();
00153     for (unsigned turn=0; turn<my_rank; turn++)
00154     {
00155         Barrier("PetscTools::RoundRobin");
00156     }
00157 }
00158 
00159 void PetscTools::EndRoundRobin()
00160 {
00161     const unsigned num_procs = GetNumProcs();
00162     for (unsigned turn=GetMyRank(); turn<num_procs; turn++)
00163     {
00164         Barrier("PetscTools::RoundRobin");
00165     }
00166 }
00167 
00168 void PetscTools::IsolateProcesses(bool isolate)
00169 {
00170     mIsolateProcesses = isolate;
00171 }
00172 
00173 MPI_Comm PetscTools::GetWorld()
00174 {
00175     if (mIsolateProcesses)
00176     {
00177         return PETSC_COMM_SELF;
00178     }
00179     else
00180     {
00181         return PETSC_COMM_WORLD;
00182     }
00183 }
00184 
00185 bool PetscTools::ReplicateBool(bool flag)
00186 {
00187     CheckCache();
00188     unsigned my_flag = (unsigned) flag;
00189     unsigned anyones_flag_is_true = my_flag;
00190     if (mPetscIsInitialised && !mIsolateProcesses)
00191     {
00192         MPI_Allreduce(&my_flag, &anyones_flag_is_true, 1, MPI_UNSIGNED, MPI_MAX, PETSC_COMM_WORLD);
00193     }
00194     return (anyones_flag_is_true == 1);
00195 }
00196 
00197 void PetscTools::ReplicateException(bool flag)
00198 {
00199     bool anyones_error=ReplicateBool(flag);
00200     if (flag)
00201     {
00202         // Return control to exception thrower
00203         return;
00204     }
00205     if (anyones_error)
00206     {
00207         EXCEPTION("Another process threw an exception; bailing out.");
00208     }
00209 }
00210 
00211 // Vector & matrix creation routines
00212 
00213 Vec PetscTools::CreateVec(int size, int localSize, bool ignoreOffProcEntries)
00214 {
00215     assert(size >= 0); // There is one test where we create a zero-sized vector
00216     Vec ret;
00217     VecCreate(PETSC_COMM_WORLD, &ret);
00218     VecSetSizes(ret, localSize, size); // localSize usually defaults to PETSC_DECIDE
00219     VecSetFromOptions(ret);
00220 
00221     if (ignoreOffProcEntries)
00222     {
00223 #if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
00224         VecSetOption(ret, VEC_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE);
00225 #else
00226         VecSetOption(ret, VEC_IGNORE_OFF_PROC_ENTRIES);
00227 #endif
00228     }
00229 
00230     return ret;
00231 }
00232 
00233 Vec PetscTools::CreateVec(std::vector<double> data)
00234 {
00235     assert(data.size() > 0);
00236     Vec ret = CreateVec(data.size());
00237 
00238     double* p_ret;
00239     VecGetArray(ret, &p_ret);
00240     int lo, hi;
00241     VecGetOwnershipRange(ret, &lo, &hi);
00242 
00243     for (int global_index=lo; global_index<hi; global_index++)
00244     {
00245         int local_index = global_index - lo;
00246         p_ret[local_index] = data[global_index];
00247     }
00248     VecRestoreArray(ret, &p_ret);
00249 
00250     return ret;
00251 }
00252 
00253 Vec PetscTools::CreateAndSetVec(int size, double value)
00254 {
00255     assert(size > 0);
00256     Vec ret = CreateVec(size);
00257 
00258 #if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
00259     VecSet(&value, ret);
00260 #else
00261     VecSet(ret, value);
00262 #endif
00263 
00264     return ret;
00265 }
00266 
00267 void PetscTools::SetupMat(Mat& rMat, int numRows, int numColumns,
00268                           unsigned rowPreallocation,
00269                           int numLocalRows,
00270                           int numLocalColumns,
00271                           bool ignoreOffProcEntries,
00272                           bool newAllocationError)
00273 {
00274     assert(numRows > 0);
00275     assert(numColumns > 0);
00276     if ((int) rowPreallocation>numColumns)
00277     {
00278         WARNING("Preallocation failure: requested number of nonzeros per row greater than number of columns");//+rowPreallocation+">"+numColumns);
00279         rowPreallocation = numColumns;
00280     }
00281 
00282 #if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
00283     MatCreate(PETSC_COMM_WORLD,numLocalRows,numLocalColumns,numRows,numColumns,&rMat);
00284 #else //New API
00285     MatCreate(PETSC_COMM_WORLD,&rMat);
00286     MatSetSizes(rMat,numLocalRows,numLocalColumns,numRows,numColumns);
00287 #endif
00288 
00289     if (PetscTools::IsSequential())
00290     {
00291         MatSetType(rMat, MATSEQAIJ);
00292         if (rowPreallocation > 0)
00293         {
00294             MatSeqAIJSetPreallocation(rMat, rowPreallocation, PETSC_NULL);
00295         }
00296     }
00297     else
00298     {
00299         MatSetType(rMat, MATMPIAIJ);
00300         if (rowPreallocation > 0)
00301         {
00302             MatMPIAIJSetPreallocation(rMat, rowPreallocation, PETSC_NULL, rowPreallocation, PETSC_NULL);
00303         }
00304     }
00305 
00306     MatSetFromOptions(rMat);
00307 
00308     if (ignoreOffProcEntries)//&& IsParallel())
00309     {
00310         if (rowPreallocation == 0)
00311         {
00312             // We aren't allowed to do non-zero allocation after setting MAT_IGNORE_OFF_PROC_ENTRIES
00313             WARNING("Ignoring MAT_IGNORE_OFF_PROC_ENTRIES flag because we might set non-zeroes later");
00314         }
00315         else
00316         {
00317 #if (PETSC_VERSION_MAJOR == 3) //PETSc 3.x.x
00318             MatSetOption(rMat, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE);
00319 #else
00320             MatSetOption(rMat, MAT_IGNORE_OFF_PROC_ENTRIES);
00321 #endif
00322         }
00323     }
00324 #if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 3) //PETSc 3.3 or later
00325     if (newAllocationError == false)
00326     {
00327         MatSetOption(rMat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
00328     }
00329 #endif
00330 }
00331 
00332 void PetscTools::DumpPetscObject(const Mat& rMat, const std::string& rOutputFileFullPath)
00333 {
00334     PetscViewer view;
00335 #if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
00336     PetscViewerFileType type = PETSC_FILE_CREATE;
00337 #else
00338     PetscFileMode type = FILE_MODE_WRITE;
00339 #endif
00340 
00341     PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(), type, &view);
00342     MatView(rMat, view);
00343     PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
00344 }
00345 
00346 void PetscTools::DumpPetscObject(const Vec& rVec, const std::string& rOutputFileFullPath)
00347 {
00348     PetscViewer view;
00349 #if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
00350     PetscViewerFileType type = PETSC_FILE_CREATE;
00351 #else
00352     PetscFileMode type = FILE_MODE_WRITE;
00353 #endif
00354 
00355     PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(), type, &view);
00356     VecView(rVec, view);
00357     PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
00358 }
00359 
00360 void PetscTools::ReadPetscObject(Mat& rMat, const std::string& rOutputFileFullPath, Vec rParallelLayout)
00361 {
00362     /*
00363      * PETSc (as of 3.1) doesn't provide any method for loading a Mat object with a user-defined parallel
00364      * layout, i.e. there's no equivalent to VecLoadIntoVector for Mat's.
00365      *
00366      * It seems to be in their future plans though: http://lists.mcs.anl.gov/pipermail/petsc-users/2010-March/006062.html
00367      */
00368 
00369     PetscViewer view;
00370 #if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
00371     PetscViewerFileType type = PETSC_FILE_RDONLY;
00372 #else
00373     PetscFileMode type = FILE_MODE_READ;
00374 #endif
00375 
00376     PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
00377                           type, &view);
00378 
00379 #if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
00380     MatCreate(PETSC_COMM_WORLD,&rMat);
00381     MatSetType(rMat,MATMPIAIJ);
00382     MatLoad(rMat,view);
00383 #else
00384     MatLoad(view, MATMPIAIJ, &rMat);
00385 #endif
00386 
00387     PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
00388 
00389     if (rParallelLayout != NULL)
00390     {
00391         /*
00392          * The idea is to copy rMat into a matrix that has the appropriate
00393          * parallel layout. Inefficient...
00394          */
00395         PetscInt num_rows, num_local_rows;
00396 
00397         VecGetSize(rParallelLayout, &num_rows);
00398         VecGetLocalSize(rParallelLayout, &num_local_rows);
00399 
00400         Mat temp_mat;
00402         PetscTools::SetupMat(temp_mat, num_rows, num_rows, 100, num_local_rows, num_local_rows, false);
00403 
00404         MatCopy(rMat, temp_mat, DIFFERENT_NONZERO_PATTERN);
00405 
00406         PetscTools::Destroy(rMat);
00407         rMat = temp_mat;
00408     }
00409 }
00410 
00411 void PetscTools::ReadPetscObject(Vec& rVec, const std::string& rOutputFileFullPath, Vec rParallelLayout)
00412 {
00413     PetscViewer view;
00414 #if (PETSC_VERSION_MAJOR == 2 && PETSC_VERSION_MINOR == 2) //PETSc 2.2
00415     PetscViewerFileType type = PETSC_FILE_RDONLY;
00416 #else
00417     PetscFileMode type = FILE_MODE_READ;
00418 #endif
00419 
00420     PetscViewerBinaryOpen(PETSC_COMM_WORLD, rOutputFileFullPath.c_str(),
00421                           type, &view);
00422     if (rParallelLayout == NULL)
00423     {
00424 #if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
00425         VecCreate(PETSC_COMM_WORLD,&rVec);
00426         VecSetType(rVec,VECMPI);
00427         VecLoad(rVec,view);
00428 #else
00429         VecLoad(view, VECMPI, &rVec);
00430 #endif
00431     }
00432     else
00433     {
00434         VecDuplicate(rParallelLayout, &rVec);
00435 #if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR >= 2) //PETSc 3.2 or later
00436         VecLoad(rVec,view);
00437 #else
00438         VecLoadIntoVector(view, rVec);
00439 #endif
00440     }
00441     PetscViewerDestroy(PETSC_DESTROY_PARAM(view));
00442 }
00443 
00444 bool PetscTools::HasParMetis()
00445 {
00446 #ifdef __INTEL_COMPILER
00447     //Old versions of the intel compiler can result in a PETSC ERROR and the program aborting if parmetis is checked for before
00448     //some other calls to Petsc are made. This nasty hack ensures that the HasParMetis method always works on Intel
00449     Mat mat;
00450     PetscTools::SetupMat(mat, 1, 1, 1);
00451     PetscTools::Destroy(mat);
00452 #endif
00453 
00454     MatPartitioning part;
00455     MatPartitioningCreate(PETSC_COMM_WORLD, &part);
00456 
00457 
00458     // We are expecting an error from PETSC on systems that don't have the interface, so suppress it
00459     // in case it aborts
00460     PetscPushErrorHandler(PetscIgnoreErrorHandler, NULL);
00461 
00462 #if (PETSC_VERSION_MAJOR == 2 || (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR < 2))
00463     PetscErrorCode parmetis_installed_error = MatPartitioningSetType(part,MAT_PARTITIONING_PARMETIS);
00464 #else
00465     PetscErrorCode parmetis_installed_error = MatPartitioningSetType(part,MATPARTITIONINGPARMETIS);
00466 #endif
00467 
00468     // Stop supressing error
00469     PetscPopErrorHandler();
00470 
00471     // Note that this method probably leaks memory inside PETSc because if MatPartitioningCreate fails
00472     // then there isn't a proper handle to destroy.
00473     MatPartitioningDestroy(PETSC_DESTROY_PARAM(part));
00474     return (parmetis_installed_error == 0);
00475 }

Generated by  doxygen 1.6.2