IPPL (Independent Parallel Particle Layer)
IPPL
Loading...
Searching...
No Matches
FieldLayout.hpp
Go to the documentation of this file.
1//
2// Class FieldLayout
3// FieldLayout describes how a given index space (represented by an NDIndex
4// object) is distributed among MPI ranks. It performs the initial
5// partitioning. The user may request that a particular dimension not be
6// partitioned by flagging that axis as 'SERIAL' (instead of 'PARALLEL').
7//
8#include "Ippl.h"
9
10#include <cstdlib>
11#include <limits>
12
14#include "Utility/IpplTimings.h"
15#include "Utility/PAssert.h"
16
18
19namespace ippl {
20
21 template <unsigned Dim>
23 // If we are touching another boundary component, that component must be parallel to us,
24 // so any 2s are unchanged. All other digits must be swapped because otherwise we would
25 // share a higher dimension boundary region with that other component and the digit
26 // would be 2
27 static const int digit_swap[3] = {1, 0, 2};
28 int match = 0;
29 for (unsigned d = 1; d < detail::countHypercubes(Dim); d *= 3) {
30 match += digit_swap[index % 3] * d;
31 index /= 3;
32 }
33 return match;
34 }
35
36 template <unsigned Dim>
38 : comm(communicator)
39 , dLocalDomains_m("local domains (device)", 0)
40 , hLocalDomains_m(Kokkos::create_mirror_view(dLocalDomains_m)) {
41 for (unsigned int d = 0; d < Dim; ++d) {
42 minWidth_m[d] = 0;
43 }
44 }
45
46 template <unsigned Dim>
48 std::array<bool, Dim> isParallel, bool isAllPeriodic)
49 : FieldLayout(communicator) {
50 initialize(domain, isParallel, isAllPeriodic);
51 }
52
53 template <unsigned Dim>
54 void FieldLayout<Dim>::updateLayout(const std::vector<NDIndex<Dim>>& domains) {
55 if (domains.empty()) {
56 return;
57 }
58
59 for (unsigned int i = 0; i < domains.size(); i++) {
60 hLocalDomains_m(i) = domains[i];
61 }
62
64
65 Kokkos::deep_copy(dLocalDomains_m, hLocalDomains_m);
66
67 calcWidths();
68 }
69
70 template <unsigned Dim>
71 void FieldLayout<Dim>::initialize(const NDIndex<Dim>& domain, std::array<bool, Dim> isParallel,
72 bool isAllPeriodic) {
73 int nRanks = comm.size();
74
75 gDomain_m = domain;
76
77 isAllPeriodic_m = isAllPeriodic;
78
80
81 if (nRanks < 2) {
82 Kokkos::resize(dLocalDomains_m, nRanks);
83 Kokkos::resize(hLocalDomains_m, nRanks);
84 hLocalDomains_m(0) = domain;
85 Kokkos::deep_copy(dLocalDomains_m, hLocalDomains_m);
86 return;
87 }
88
89 /* Check in to how many local domains we can partition the given domain.
90 If there are more ranks than local domains, we throw an error, ippl does not allow this.
91 */
92 long totparelems = 1;
93 for (unsigned d = 0; d < Dim; ++d) {
94 totparelems *= isParallelDim_m[d] ? domain[d].length() : 1;
95 }
96
97 if (totparelems < nRanks) {
98 throw std::runtime_error("FieldLayout:initialize: domain can only be partitioned in to "
99 + std::to_string(totparelems) + " local domains, but there are " + std::to_string(nRanks) + " ranks, decrease the number of ranks or increase the domain.");
100 }
101
102 Kokkos::resize(dLocalDomains_m, nRanks);
103 Kokkos::resize(hLocalDomains_m, nRanks);
104
105 detail::Partitioner<Dim> partitioner;
106 partitioner.split(domain, hLocalDomains_m, isParallel, nRanks);
107
109
110 Kokkos::deep_copy(dLocalDomains_m, hLocalDomains_m);
111
112 calcWidths();
113 }
114
115 template <unsigned Dim>
117 return hLocalDomains_m(comm.rank());
118 }
119
120 template <unsigned Dim>
122 // Rank must be valid for the communicator
123 PAssert(rank >= 0 && rank < comm.size());
124
125 return hLocalDomains_m(rank);
126 }
127
128 template <unsigned Dim>
133
134 template <unsigned Dim>
138
139 template <unsigned Dim>
143
144 template <unsigned Dim>
149
150 template <unsigned Dim>
155
156 template <unsigned Dim>
157 void FieldLayout<Dim>::write(std::ostream& out) const {
158 if (comm.rank() > 0) {
159 return;
160 }
161
162 out << "Domain = " << gDomain_m << "\n"
163 << "Total number of boxes = " << hLocalDomains_m.size() << "\n";
164
165 using size_type = typename host_mirror_type::size_type;
166 for (size_type i = 0; i < hLocalDomains_m.size(); ++i) {
167 out << " Box " << i << " " << hLocalDomains_m(i) << "\n";
168 }
169 }
170
171 template <unsigned Dim>
173 // initialize widths first
174 for (unsigned int d = 0; d < Dim; ++d) {
175 minWidth_m[d] = gDomain_m[d].length();
176 }
177
178 using size_type = typename host_mirror_type::size_type;
179 for (size_type i = 0; i < hLocalDomains_m.size(); ++i) {
180 const NDIndex_t& dom = hLocalDomains_m(i);
181 for (unsigned int d = 0; d < Dim; ++d) {
182 if ((unsigned int)dom[d].length() < minWidth_m[d]) {
183 minWidth_m[d] = dom[d].length();
184 }
185 }
186 }
187 }
188
189 template <unsigned Dim>
190 void FieldLayout<Dim>::findPeriodicNeighbors(const int nghost, const NDIndex<Dim>& localDomain,
191 NDIndex<Dim>& grown, NDIndex<Dim>& neighborDomain,
192 const int rank,
193 std::map<unsigned int, int>& offsets, unsigned d0,
194 unsigned codim) {
195 for (unsigned int d = d0; d < Dim; ++d) {
196 // 0 - check upper boundary
197 // 1 - check lower boundary
198 for (int k = 0; k < 2; ++k) {
199 auto offset = offsets[d] = getPeriodicOffset(localDomain, d, k);
200 if (offset == 0) {
201 continue;
202 }
203
204 grown[d] += offset;
205
206 if (grown.touches(neighborDomain)) {
207 auto intersect = grown.intersect(neighborDomain);
208 for (auto& [d, offset] : offsets) {
209 neighborDomain[d] -= offset;
210 }
211 addNeighbors(grown, localDomain, neighborDomain, intersect, nghost, rank);
212 for (auto& [d, offset] : offsets) {
213 neighborDomain[d] += offset;
214 }
216 if (codim + 1 < Dim) {
217 findPeriodicNeighbors(nghost, localDomain, grown, neighborDomain, rank, offsets,
218 d + 1, codim + 1);
219 }
220
221 grown[d] -= offset;
222 offsets.erase(d);
223 }
224 }
225 }
226
227 template <unsigned Dim>
229 /* We need to reset the neighbor list
230 * and its ranges because of the repartitioner.
231 */
232 for (size_t i = 0; i < detail::countHypercubes(Dim) - 1; i++) {
233 neighbors_m[i].clear();
234 neighborsSendRange_m[i].clear();
235 neighborsRecvRange_m[i].clear();
236 }
237
238 int myRank = comm.rank();
239
240 // get my local box
241 auto& nd = hLocalDomains_m[myRank];
242
243 // grow the box by nghost cells in each dimension
244 auto gnd = nd.grow(nghost);
245
246 static IpplTimings::TimerRef findInternalNeighborsTimer =
247 IpplTimings::getTimer("findInternal");
248 static IpplTimings::TimerRef findPeriodicNeighborsTimer =
249 IpplTimings::getTimer("findPeriodic");
250 for (int rank = 0; rank < comm.size(); ++rank) {
251 if (rank == myRank) {
252 // do not compare with my domain
253 continue;
254 }
255
256 auto& ndNeighbor = hLocalDomains_m[rank];
257 IpplTimings::startTimer(findInternalNeighborsTimer);
258 // For inter-processor neighbors
259 if (gnd.touches(ndNeighbor)) {
260 auto intersect = gnd.intersect(ndNeighbor);
261 addNeighbors(gnd, nd, ndNeighbor, intersect, nghost, rank);
262 }
263 IpplTimings::stopTimer(findInternalNeighborsTimer);
264
265 IpplTimings::startTimer(findPeriodicNeighborsTimer);
267 std::map<unsigned int, int> offsets;
268 findPeriodicNeighbors(nghost, nd, gnd, ndNeighbor, rank, offsets);
269 }
270 IpplTimings::stopTimer(findPeriodicNeighborsTimer);
271 }
272 }
273
274 template <unsigned Dim>
276 const NDIndex_t& ndNeighbor, const NDIndex_t& intersect,
277 int nghost, int rank) {
278 bound_type rangeSend, rangeRecv;
279 rangeSend = getBounds(nd, ndNeighbor, nd, nghost);
280
281 rangeRecv = getBounds(ndNeighbor, nd, nd, nghost);
283 int index = 0;
284 for (unsigned d = 0, digit = 1; d < Dim; d++, digit *= 3) {
285 // For each dimension, check what kind of intersection we have and construct
286 // an index for the component based on its base-3 representation with the
287 // following properties for each digit:
288 // 0 - touching the lower axis value
289 // 1 - touching the upper axis value
290 // 2 - parallel to the axis
291 if (intersect[d].length() == 1) {
292 if (gnd[d].first() != intersect[d].first()) {
293 index += digit;
294 }
295 } else {
296 index += 2 * digit;
297 }
298 }
299 neighbors_m[index].push_back(rank);
300 neighborsSendRange_m[index].push_back(rangeSend);
301 neighborsRecvRange_m[index].push_back(rangeRecv);
302 }
303
304 template <unsigned Dim>
306 const NDIndex_t& nd2,
307 const NDIndex_t& offset,
308 int nghost) {
309 NDIndex<Dim> gnd = nd2.grow(nghost);
311 NDIndex<Dim> overlap = gnd.intersect(nd1);
312
313 bound_type intersect;
314
315 /* Obtain the intersection bounds with local ranges of the view.
316 * Add "+1" to the upper bound since Kokkos loops always to "< extent".
317 */
318 for (size_t i = 0; i < Dim; ++i) {
319 intersect.lo[i] = overlap[i].first() - offset[i].first() /*offset*/ + nghost;
320 intersect.hi[i] = overlap[i].last() - offset[i].first() /*offset*/ + nghost + 1;
321 }
322
323 return intersect;
325
326 template <unsigned Dim>
327 int FieldLayout<Dim>::getPeriodicOffset(const NDIndex_t& nd, const unsigned int d,
328 const int k) {
329 switch (k) {
330 case 0:
331 if (nd[d].max() == gDomain_m[d].max()) {
332 return -gDomain_m[d].length();
334 break;
335 case 1:
336 if (nd[d].min() == gDomain_m[d].min()) {
337 return gDomain_m[d].length();
338 }
339 break;
340 default:
341 throw IpplException("FieldLayout:getPeriodicOffset", "k has to be either 0 or 1");
342 }
343 return 0;
345
346} // namespace ippl
constexpr unsigned Dim
ippl::detail::size_type size_type
Definition datatypes.h:23
constexpr KOKKOS_INLINE_FUNCTION auto first()
Definition AbsorbingBC.h:10
#define PAssert(c)
Definition PAssert.h:116
Definition Archive.h:20
KOKKOS_INLINE_FUNCTION Vector< T, Dim > min(const Vector< T, Dim > &a, const Vector< T, Dim > &b)
Definition Vector.hpp:217
KOKKOS_INLINE_FUNCTION constexpr unsigned int countHypercubes(unsigned int dim)
Definition FieldLayout.h:44
std::array< bool, Dim > isParallelDim_m
typename view_type::host_mirror_type host_mirror_type
void findNeighbors(int nghost=1)
const neighbor_list & getNeighbors() const
const host_mirror_type getHostLocalDomains() const
unsigned int minWidth_m[Dim]
void addNeighbors(const NDIndex_t &gnd, const NDIndex_t &nd, const NDIndex_t &ndNeighbor, const NDIndex_t &intersect, int nghost, int rank)
const view_type getDeviceLocalDomains() const
neighbor_list neighbors_m
neighbor_range_list neighborsSendRange_m
std::array< bool, Dim > isParallel() const
std::array< rank_list, detail::countHypercubes(Dim) - 1 > neighbor_list
typename detail::ViewType< NDIndex_t, 1 >::view_type view_type
void initialize(const NDIndex< Dim > &domain, std::array< bool, Dim > decomp, bool isAllPeriodic=false)
const NDIndex_t & getLocalNDIndex() const
void findPeriodicNeighbors(const int nghost, const NDIndex< Dim > &localDomain, NDIndex< Dim > &grown, NDIndex< Dim > &neighborDomain, const int rank, std::map< unsigned int, int > &offsets, unsigned d0=0, unsigned codim=0)
int getPeriodicOffset(const NDIndex_t &nd, const unsigned int d, const int k)
neighbor_range_list neighborsRecvRange_m
mpi::Communicator comm
view_type dLocalDomains_m
Local domains (device view).
FieldLayout(const mpi::Communicator &=MPI_COMM_WORLD)
host_mirror_type hLocalDomains_m
Local domains (host mirror view).
void write(std::ostream &=std::cout) const
std::array< bounds_list, detail::countHypercubes(Dim) - 1 > neighbor_range_list
const neighbor_range_list & getNeighborsSendRange() const
NDIndex< Dim > NDIndex_t
const neighbor_range_list & getNeighborsRecvRange() const
bound_type getBounds(const NDIndex_t &nd1, const NDIndex_t &nd2, const NDIndex_t &offset, int nghost)
NDIndex_t gDomain_m
Global domain.
void updateLayout(const std::vector< NDIndex_t > &domains)
static int getMatchingIndex(int index)
KOKKOS_INLINE_FUNCTION bool touches(const NDIndex< Dim > &) const
Definition NDIndex.hpp:95
KOKKOS_INLINE_FUNCTION Vector< int, Dim > last() const
Definition NDIndex.hpp:178
KOKKOS_INLINE_FUNCTION Vector< int, Dim > first() const
Definition NDIndex.hpp:170
KOKKOS_INLINE_FUNCTION NDIndex< Dim > intersect(const NDIndex< Dim > &) const
Definition NDIndex.hpp:70
KOKKOS_INLINE_FUNCTION Vector< size_t, Dim > length() const
Definition NDIndex.hpp:162
KOKKOS_INLINE_FUNCTION NDIndex< Dim > grow(int ncells) const
Definition NDIndex.hpp:79
void split(const NDIndex< Dim > &domain, view_type &view, const std::array< bool, Dim > &isParallel, int nSplits) const
Timing::TimerRef TimerRef
static TimerRef getTimer(const char *nm)
static void stopTimer(TimerRef t)
static void startTimer(TimerRef t)