Chaste Commit::1fd4e48e3990e67db148bc1bc4cf6991a0049d0c
DistanceMapCalculator.cpp
1/*
2
3Copyright (c) 2005-2024, University of Oxford.
4All rights reserved.
5
6University of Oxford means the Chancellor, Masters and Scholars of the
7University of Oxford, having an administrative office at Wellington
8Square, Oxford OX1 2JD, UK.
9
10This file is part of Chaste.
11
12Redistribution and use in source and binary forms, with or without
13modification, are permitted provided that the following conditions are met:
14 * Redistributions of source code must retain the above copyright notice,
15 this list of conditions and the following disclaimer.
16 * Redistributions in binary form must reproduce the above copyright notice,
17 this list of conditions and the following disclaimer in the documentation
18 and/or other materials provided with the distribution.
19 * Neither the name of the University of Oxford nor the names of its
20 contributors may be used to endorse or promote products derived from this
21 software without specific prior written permission.
22
23THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
27LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
29GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
32OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33
34*/
35
36#include "DistanceMapCalculator.hpp"
37#include "DistributedTetrahedralMesh.hpp" // For dynamic cast
38
39template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
42 : mrMesh(rMesh),
43 mWorkOnEntireMesh(true),
44 mNumHalosPerProcess(nullptr),
45 mRoundCounter(0u),
46 mPopCounter(0u),
47 mTargetNodeIndex(UINT_MAX),
48 mSingleTarget(false)
49{
50 mNumNodes = mrMesh.GetNumNodes();
51
53 if (PetscTools::IsSequential() || p_distributed_mesh == nullptr)
54 {
55 // It's a non-distributed mesh
56 mLo = 0;
57 mHi = mNumNodes;
58 }
59 else
60 {
61 // It's a parallel (distributed) mesh (p_parallel_mesh is non-null and we are running in parallel)
62 mWorkOnEntireMesh = false;
63 mLo = mrMesh.GetDistributedVectorFactory()->GetLow();
64 mHi = mrMesh.GetDistributedVectorFactory()->GetHigh();
65
66 // Get local halo information
67 p_distributed_mesh->GetHaloNodeIndices(mHaloNodeIndices);
68
69 // Share information on the number of halo nodes
70 unsigned my_size = mHaloNodeIndices.size();
72 MPI_Allgather(&my_size, 1, MPI_UNSIGNED, mNumHalosPerProcess, 1, MPI_UNSIGNED, PETSC_COMM_WORLD);
73 }
74}
75
76template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
78 const std::vector<unsigned>& rSourceNodeIndices,
79 std::vector<double>& rNodeDistances)
80{
81 rNodeDistances.resize(mNumNodes);
82 for (unsigned index=0; index<mNumNodes; index++)
83 {
84 rNodeDistances[index] = DBL_MAX;
85 }
86 assert(mActivePriorityNodeIndexQueue.empty());
87
88 if (mSingleTarget)
89 {
90 assert(rSourceNodeIndices.size() == 1);
91 unsigned source_node_index = rSourceNodeIndices[0];
92
93 // We need to make sure this is local, so that we can use the geometry
94 if (mLo<=source_node_index && source_node_index<mHi)
95 {
96 double heuristic_correction = norm_2(mrMesh.GetNode(source_node_index)->rGetLocation()-mTargetNodePoint);
97 PushLocal(heuristic_correction, source_node_index);
98 rNodeDistances[source_node_index] = heuristic_correction;
99 }
100 }
101 else
102 {
103 for (unsigned source_index=0; source_index<rSourceNodeIndices.size(); source_index++)
104 {
105 unsigned source_node_index = rSourceNodeIndices[source_index];
106 PushLocal(0.0, source_node_index);
107 rNodeDistances[source_node_index] = 0.0;
108 }
109 }
110
111 bool non_empty_queue = true;
112 mRoundCounter = 0;
113 mPopCounter = 0;
114 while (non_empty_queue)
115 {
116 bool termination = WorkOnLocalQueue(rNodeDistances);
117
118 // Sanity - check that we aren't doing this very many times
119 if (mRoundCounter++ > 10 * PetscTools::GetNumProcs())
120 {
121 // This line will be hit if there's a parallel distributed mesh with a really bad partition
123 }
124 if (mSingleTarget && PetscTools::ReplicateBool(termination))
125 {
126 // A single process found the target already
127 break;
128 }
129 non_empty_queue = UpdateQueueFromRemote(rNodeDistances);
130 }
131
132 if (mWorkOnEntireMesh == false)
133 {
134 // Update all processes with the best values from everywhere
135 // Take a local copy
136 std::vector<double> local_distances = rNodeDistances;
137
138 // Share it back into the vector
139 MPI_Allreduce( &local_distances[0], &rNodeDistances[0], mNumNodes, MPI_DOUBLE, MPI_MIN, PETSC_COMM_WORLD);
140 }
141}
142
143template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
145{
146 if (mWorkOnEntireMesh)
147 {
148 // This update does nowt
149 return !mActivePriorityNodeIndexQueue.empty();
150 }
151 for (unsigned bcast_process=0; bcast_process<PetscTools::GetNumProcs(); bcast_process++)
152 {
153 // Process packs cart0/cart1/...euclid/index into a 1-d array
154 double* dist_exchange = new double[ mNumHalosPerProcess[bcast_process] ];
155 unsigned* index_exchange = new unsigned[ mNumHalosPerProcess[bcast_process] ];
156 if (PetscTools::GetMyRank() == bcast_process)
157 {
158 // Broadcaster fills the array
159 for (unsigned index=0; index<mHaloNodeIndices.size();index++)
160 {
161 dist_exchange[index] = rNodeDistances[mHaloNodeIndices[index]];
162 index_exchange[index] = mHaloNodeIndices[index];
163 }
164 }
165
166 /*
167 * Broadcast - this is can be done by casting indices to double and
168 * packing everything into a single array. That would be better for
169 * latency, but this is probably more readable.
170 */
171 MPI_Bcast(dist_exchange, mNumHalosPerProcess[bcast_process], MPI_DOUBLE,
172 bcast_process, PETSC_COMM_WORLD);
173 MPI_Bcast(index_exchange, mNumHalosPerProcess[bcast_process], MPI_UNSIGNED,
174 bcast_process, PETSC_COMM_WORLD);
175 if (PetscTools::GetMyRank() != bcast_process)
176 {
177 // Receiving process take updates
178 for (unsigned index=0; index<mNumHalosPerProcess[bcast_process];index++)
179 {
180 unsigned global_index=index_exchange[index];
181 // Is it a better answer?
182 if (dist_exchange[index] < rNodeDistances[global_index]*(1.0-2*DBL_EPSILON) )
183 {
184 // Copy across - this may be unnecessary when PushLocal isn't going to push because it's not local
185 rNodeDistances[global_index] = dist_exchange[index];
186 PushLocal(rNodeDistances[global_index], global_index);
187 }
188 }
189 }
190 delete [] dist_exchange;
191 delete [] index_exchange;
192 }
193 // Is any queue non-empty?
194 bool non_empty_queue = PetscTools::ReplicateBool(!mActivePriorityNodeIndexQueue.empty());
195 return(non_empty_queue);
196}
197
198template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
200{
201 unsigned pop_stop = mNumNodes/(PetscTools::GetNumProcs()*20);
202 while (!mActivePriorityNodeIndexQueue.empty())
203 {
204 // Get the next index in the queue
205 unsigned current_node_index = mActivePriorityNodeIndexQueue.top().second;
206 double distance_when_queued = -mActivePriorityNodeIndexQueue.top().first;
207 mActivePriorityNodeIndexQueue.pop();
208
209 // Only act on nodes which haven't been acted on already
210 // (It's possible that a better distance has been found and already been dealt with)
211 if (distance_when_queued == rNodeDistances[current_node_index])
212 {
213 mPopCounter++;
214 Node<SPACE_DIM>* p_current_node = mrMesh.GetNode(current_node_index);
215 double current_heuristic = 0.0;
216 if (mSingleTarget)
217 {
218 current_heuristic=norm_2(p_current_node->rGetLocation()-mTargetNodePoint);
219 }
220
221 // Loop over the elements containing the given node
222 for (typename Node<SPACE_DIM>::ContainingElementIterator element_iterator = p_current_node->ContainingElementsBegin();
223 element_iterator != p_current_node->ContainingElementsEnd();
224 ++element_iterator)
225 {
226 // Get a pointer to the container element
227 Element<ELEMENT_DIM, SPACE_DIM>* p_containing_element = mrMesh.GetElement(*element_iterator);
228
229 // Loop over the nodes of the element
230 for (unsigned node_local_index=0;
231 node_local_index<p_containing_element->GetNumNodes();
232 node_local_index++)
233 {
234 Node<SPACE_DIM>* p_neighbour_node = p_containing_element->GetNode(node_local_index);
235 unsigned neighbour_node_index = p_neighbour_node->GetIndex();
236
237 // Avoid revisiting the active node
238 if (neighbour_node_index != current_node_index)
239 {
240
241 double neighbour_heuristic=0.0;
242 if (mSingleTarget)
243 {
244 neighbour_heuristic=norm_2(p_neighbour_node->rGetLocation()-mTargetNodePoint);
245 }
246 // Test if we have found a shorter path from the source to the neighbour through current node
247 double updated_distance = rNodeDistances[current_node_index] +
248 norm_2(p_neighbour_node->rGetLocation() - p_current_node->rGetLocation())
249 - current_heuristic + neighbour_heuristic;
250 if (updated_distance < rNodeDistances[neighbour_node_index] * (1.0-2*DBL_EPSILON))
251 {
252 rNodeDistances[neighbour_node_index] = updated_distance;
253 PushLocal(updated_distance, neighbour_node_index);
254 }
255 }
256 }
257 }
258 if (mSingleTarget)
259 {
260 if (current_node_index == mTargetNodeIndex)
261 {
262 // Premature termination if there is a single goal in mind (and we found it)
263 return true;
264 }
265 if (mPopCounter%pop_stop == 0)
266 {
267 // Premature termination -- in case the work has been done
268 return false;
269 }
270 }
271 }
272 }
273 return false;
274}
275
276template<unsigned ELEMENT_DIM, unsigned SPACE_DIM>
277double DistanceMapCalculator<ELEMENT_DIM, SPACE_DIM>::SingleDistance(unsigned sourceNodeIndex, unsigned targetNodeIndex)
278{
279 std::vector<unsigned> source_node_index_vector;
280 source_node_index_vector.push_back(sourceNodeIndex);
281
282 // We are re-using the mCachedDistances vector...
283 mTargetNodeIndex = targetNodeIndex; // For premature termination
284 mSingleTarget = true;
285
286 // Set up information on target (for A* guidance)
287 c_vector<double, SPACE_DIM> target_point = zero_vector<double>(SPACE_DIM);
288 if (mrMesh.GetDistributedVectorFactory()->IsGlobalIndexLocal(mTargetNodeIndex))
289 {
290 // Owner of target node sets target_point (others have zero)
291 target_point = mrMesh.GetNode(mTargetNodeIndex)->rGetLocation();
292 }
293
294 // Communicate for wherever to everyone
295 MPI_Allreduce(&target_point[0], &mTargetNodePoint[0], SPACE_DIM, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD);
296
297 //mTargetNodePoint;
298 std::vector<double> distances;
299 ComputeDistanceMap(source_node_index_vector, distances);
300
302
303 // Reset target, so we don't terminate early next time.
304 mSingleTarget = false;
305
306 // Make sure that there isn't a non-empty queue from a previous calculation
307 if (!mActivePriorityNodeIndexQueue.empty())
308 {
309 mActivePriorityNodeIndexQueue = std::priority_queue<std::pair<double, unsigned> >();
310 }
311
312 return distances[targetNodeIndex];
313}
314
315// Explicit instantiation
316template class DistanceMapCalculator<1, 1>;
317template class DistanceMapCalculator<1, 2>;
318template class DistanceMapCalculator<2, 2>;
319template class DistanceMapCalculator<1, 3>;
320//template class DistanceMapCalculator<2, 3>;
321template class DistanceMapCalculator<3, 3>;
#define NEVER_REACHED
Node< SPACE_DIM > * GetNode(unsigned localIndex) const
unsigned GetNumNodes() const
std::vector< unsigned > mHaloNodeIndices
double SingleDistance(unsigned sourceNodeIndex, unsigned destinationNodeIndex)
bool UpdateQueueFromRemote(std::vector< double > &rNodeDistances)
bool WorkOnLocalQueue(std::vector< double > &rNodeDistances)
DistanceMapCalculator(AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > &rMesh)
AbstractTetrahedralMesh< ELEMENT_DIM, SPACE_DIM > & mrMesh
void ComputeDistanceMap(const std::vector< unsigned > &rSourceNodeIndices, std::vector< double > &rNodeDistances)
void GetHaloNodeIndices(std::vector< unsigned > &rHaloIndices) const
Definition Node.hpp:59
ContainingElementIterator ContainingElementsEnd() const
Definition Node.hpp:493
ContainingElementIterator ContainingElementsBegin() const
Definition Node.hpp:485
const c_vector< double, SPACE_DIM > & rGetLocation() const
Definition Node.cpp:139
unsigned GetIndex() const
Definition Node.cpp:158
static bool ReplicateBool(bool flag)
static bool IsSequential()
static unsigned GetMyRank()
static unsigned GetNumProcs()