IPPL (Independent Parallel Particle Layer)
IPPL
Loading...
Searching...
No Matches
FEMVector.hpp
Go to the documentation of this file.
1namespace ippl{
2
3 template <typename T>
4 FEMVector<T>::FEMVector(size_t n, std::vector<size_t> neighbors,
5 std::vector< Kokkos::View<size_t*> > sendIdxs,
6 std::vector< Kokkos::View<size_t*> > recvIdxs) :
7 data_m("FEMVector::data", n), boundaryInfo_m(new BoundaryInfo(std::move(neighbors),
8 std::move(sendIdxs), std::move(recvIdxs))) {
9
10 }
11
12
13 template <typename T>
14 FEMVector<T>::FEMVector(size_t n) : data_m("FEMVector::data", n), boundaryInfo_m(nullptr){
15
16 }
17
18
19
20 template <typename T>
21 KOKKOS_FUNCTION
26
27
28 template <typename T>
29 FEMVector<T>::BoundaryInfo::BoundaryInfo(std::vector<size_t> neighbors,
30 std::vector< Kokkos::View<size_t*> > sendIdxs,
31 std::vector< Kokkos::View<size_t*> > recvIdxs) :
32 neighbors_m(neighbors), sendIdxs_m(sendIdxs), recvIdxs_m(recvIdxs) {
33
34 }
35
36
37 template <typename T>
39 // check that we have halo information
40 if (!boundaryInfo_m) {
41 throw IpplException(
42 "FEMVector::fillHalo()",
43 "Cannot do halo operations, as no MPI communication information is provided. "
44 "Did you use the correct constructor to construct the FEMVector?");
45 }
46
47 using memory_space = typename Kokkos::View<size_t*>::memory_space;
48 // List of MPI requests which we send
49 std::vector<MPI_Request> requests(boundaryInfo_m->neighbors_m.size());
50
51 // Send loop.
52 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
53 size_t neighborRank = boundaryInfo_m->neighbors_m[i];
54 size_t nsends = boundaryInfo_m->sendIdxs_m[i].extent(0);
55 size_t tag = mpi::tag::FEMVECTOR + ippl::Comm->rank();
56
57 // pack data, i.e., copy values from data_m to commBuffer_m
58 pack(boundaryInfo_m->sendIdxs_m[i]);
59
60
61 // ippl MPI communication which sends data to neighbors
63 ippl::Comm->getBuffer<memory_space, T>(nsends);
64 ippl::Comm->isend(neighborRank, tag, boundaryInfo_m->commBuffer_m, *archive,
65 requests[i], nsends);
66 archive->resetWritePos();
67 }
68
69 // Recieve loop
70 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
71 size_t neighborRank = boundaryInfo_m->neighbors_m[i];
72 size_t nrecvs = boundaryInfo_m->recvIdxs_m[i].extent(0);
73 size_t tag = mpi::tag::FEMVECTOR + boundaryInfo_m->neighbors_m[i];
74
75 // ippl MPI communication which will recive data from neighbors,
76 // will put data into commBuffer_m
78 ippl::Comm->getBuffer<memory_space, T>(nrecvs);
79 ippl::Comm->recv(neighborRank, tag, boundaryInfo_m->commBuffer_m, *archive,
80 nrecvs * sizeof(T), nrecvs);
81 archive->resetReadPos();
82
83 // unpack recieved data, i.e., copy from commBuffer_m to data_m
84 unpack<Assign>(boundaryInfo_m->recvIdxs_m[i]);
85 }
86
87 if (requests.size() > 0) {
88 MPI_Waitall(requests.size(), requests.data(), MPI_STATUSES_IGNORE);
89 }
90 ippl::Comm->freeAllBuffers();
91 }
92
93
94 template <typename T>
96 // check that we have halo information
97 if (!boundaryInfo_m) {
98 throw IpplException(
99 "FEMVector::accumulateHalo()",
100 "Cannot do halo operations, as no MPI communication information is provided. "
101 "Did you use the correct constructor to construct the FEMVector?");
102 }
103
104 using memory_space = typename Kokkos::View<size_t*>::memory_space;
105 // List of MPI requests which we send
106 std::vector<MPI_Request> requests(boundaryInfo_m->neighbors_m.size());
107
108 // Send loop.
109 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
110 size_t neighborRank = boundaryInfo_m->neighbors_m[i];
111 size_t nsends = boundaryInfo_m->recvIdxs_m[i].extent(0);
112 size_t tag = mpi::tag::FEMVECTOR + ippl::Comm->rank();
113
114 // pack data, i.e., copy values from data_m to commBuffer_m
115 pack(boundaryInfo_m->recvIdxs_m[i]);
116
117
118 // ippl MPI communication which sends data to neighbors
120 ippl::Comm->getBuffer<memory_space, T>(nsends);
121 ippl::Comm->isend(neighborRank, tag, boundaryInfo_m->commBuffer_m, *archive,
122 requests[i], nsends);
123 archive->resetWritePos();
124 }
125
126 // Receive loop
127 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
128 size_t neighborRank = boundaryInfo_m->neighbors_m[i];
129 size_t nrecvs = boundaryInfo_m->sendIdxs_m[i].extent(0);
130 size_t tag = mpi::tag::FEMVECTOR + boundaryInfo_m->neighbors_m[i];
131
132 // ippl MPI communication which will receive data from neighbors,
133 // will put data into commBuffer_m
135 ippl::Comm->getBuffer<memory_space, T>(nrecvs);
136 ippl::Comm->recv(neighborRank, tag, boundaryInfo_m->commBuffer_m, *archive,
137 nrecvs * sizeof(T), nrecvs);
138 archive->resetReadPos();
139
140 // unpack recieved data, i.e., copy from commBuffer_m to data_m
141 unpack<AssignAdd>(boundaryInfo_m->sendIdxs_m[i]);
142 }
143
144 if (requests.size() > 0) {
145 MPI_Waitall(requests.size(), requests.data(), MPI_STATUSES_IGNORE);
146 }
147 ippl::Comm->freeAllBuffers();
148 }
149
150
151 template <typename T>
152 void FEMVector<T>::setHalo(T setValue) {
153 // check that we have halo information
154 if (!boundaryInfo_m) {
155 return;
156 throw IpplException(
157 "FEMVector::setHalo()",
158 "Cannot do halo operations, as no MPI communication information is provided. "
159 "Did you use the correct constructor to construct the FEMVector?");
160 }
161
162 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
163 auto& view = boundaryInfo_m->recvIdxs_m[i];
164 Kokkos::parallel_for("FEMVector::setHalo()",view.extent(0),
165 KOKKOS_CLASS_LAMBDA(const size_t& j){
166 data_m[view(j)] = setValue;
167 }
168 );
169 }
170 }
171
172
173 template <typename T>
175 Kokkos::parallel_for("FEMVector::operator=(T value)", data_m.extent(0),
176 KOKKOS_CLASS_LAMBDA(const size_t& i){
177 data_m[i] = value;
178 }
179 );
180 return *this;
181 }
182
183
184 template <typename T>
185 template <typename E, size_t N>
187 using capture_type = detail::CapturedExpression<E, N>;
188 capture_type expr_ = reinterpret_cast<const capture_type&>(expr);
189 Kokkos::parallel_for("FEMVector::operator=(Expression)", data_m.extent(0),
190 KOKKOS_CLASS_LAMBDA(const size_t& i){
191 data_m[i] = expr_(i);
192 }
193 );
194 return *this;
195 }
196
197
198 template <typename T>
200 auto view = v.getView();
201 Kokkos::parallel_for("FEMVector::operator=(FEMVector)", data_m.extent(0),
202 KOKKOS_CLASS_LAMBDA(const size_t& i){
203 data_m[i] = view(i);
204 }
205 );
206 return *this;
207 }
208
209
210 template <typename T>
211 KOKKOS_INLINE_FUNCTION T FEMVector<T>::operator[] (size_t i) const {
212 return data_m(i);
213 }
214
215
216 template <typename T>
217 KOKKOS_INLINE_FUNCTION T FEMVector<T>::operator() (size_t i) const {
218 return this->operator[](i);
219 }
220
221
222 template <typename T>
223 const Kokkos::View<T*>& FEMVector<T>::getView() const {
224 return data_m;
225 }
226
227
228 template <typename T>
229 size_t FEMVector<T>::size() const {
230 return data_m.extent(0);
231 }
232
233 template <typename T>
235 // We have to check if we have boundary information or not
236 if (boundaryInfo_m) {
237 // The neighbor_m can be simply passed to the new vector, the sendIdxs_m
238 // and recvIdxs_m need to be explicitly copied.
239 std::vector< Kokkos::View<size_t*> > newSendIdxs;
240 std::vector< Kokkos::View<size_t*> > newRecvIdxs;
241
242 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
243 newSendIdxs.emplace_back(Kokkos::View<size_t*>(boundaryInfo_m->sendIdxs_m[i].label(),
244 boundaryInfo_m->sendIdxs_m[i].extent(0)));
245 Kokkos::deep_copy(newSendIdxs[i], boundaryInfo_m->sendIdxs_m[i]);
246
247 newRecvIdxs.emplace_back(Kokkos::View<size_t*>(boundaryInfo_m->recvIdxs_m[i].label(),
248 boundaryInfo_m->recvIdxs_m[i].extent(0)));
249
250 Kokkos::deep_copy(newRecvIdxs[i], boundaryInfo_m->recvIdxs_m[i]);
251 }
252
253 // create the new FEMVector
254 FEMVector<T> newVector(size(), boundaryInfo_m->neighbors_m, newSendIdxs, newRecvIdxs);
255 // copy over the values
256 newVector = *this;
257
258 return newVector;
259 } else {
260 FEMVector<T> newVector(size());
261 // copy over the values
262 newVector = *this;
263
264 return newVector;
265 }
266 }
267
268 template <typename T>
269 template <typename K>
271 // We have to check if we have boundary information or not
272 if (boundaryInfo_m) {
273 // The neighbor_m can be simply passed to the new vector, the sendIdxs_m
274 // and recvIdxs_m need to be explicitly copied.
275 std::vector< Kokkos::View<size_t*> > newSendIdxs;
276 std::vector< Kokkos::View<size_t*> > newRecvIdxs;
277
278 for (size_t i = 0; i < boundaryInfo_m->neighbors_m.size(); ++i) {
279 newSendIdxs.emplace_back(Kokkos::View<size_t*>(boundaryInfo_m->sendIdxs_m[i].label(),
280 boundaryInfo_m->sendIdxs_m[i].extent(0)));
281 Kokkos::deep_copy(newSendIdxs[i], boundaryInfo_m->sendIdxs_m[i]);
282
283 newRecvIdxs.emplace_back(Kokkos::View<size_t*>(boundaryInfo_m->recvIdxs_m[i].label(),
284 boundaryInfo_m->recvIdxs_m[i].extent(0)));
285
286 Kokkos::deep_copy(newRecvIdxs[i], boundaryInfo_m->recvIdxs_m[i]);
287 }
288
289 // create the new FEMVector
290 FEMVector<K> newVector(size(), boundaryInfo_m->neighbors_m, newSendIdxs, newRecvIdxs);
291
292 return newVector;
293 } else {
294 FEMVector<K> newVector(size());
295
296 return newVector;
297 }
298 }
299
300
301 template <typename T>
302 void FEMVector<T>::pack(const Kokkos::View<size_t*>& idxStore) {
303 // check that we have halo information
304 if (!boundaryInfo_m) {
305 throw IpplException(
306 "FEMVector::pack()",
307 "Cannot do halo operations, as no MPI communication information is provided. "
308 "Did you use the correct constructor to construct the FEMVector?");
309 }
310
311 size_t nIdxs = idxStore.extent(0);
312 auto& bufferData = boundaryInfo_m->commBuffer_m.buffer;
313
314 if (bufferData.size() < nIdxs) {
315 int overalloc = Comm->getDefaultOverallocation();
316 Kokkos::realloc(bufferData, nIdxs * overalloc);
317 }
318
319 Kokkos::parallel_for("FEMVector::pack()", nIdxs,
320 KOKKOS_CLASS_LAMBDA(const size_t& i) {
321 bufferData(i) = data_m(idxStore(i));
322 }
323 );
324 Kokkos::fence();
325 }
326
327
328 template <typename T>
329 template <typename Op>
330 void FEMVector<T>::unpack(const Kokkos::View<size_t*>& idxStore) {
331 // check that we have halo information
332 if (!boundaryInfo_m) {
333 throw IpplException(
334 "FEMVector::unpack()",
335 "Cannot do halo operations, as no MPI communication information is provided. "
336 "Did you use the correct constructor to construct the FEMVector?");
337 }
338
339 size_t nIdxs = idxStore.extent(0);
340 auto& bufferData = boundaryInfo_m->commBuffer_m.buffer;
341 if (bufferData.size() < nIdxs) {
342 int overalloc = Comm->getDefaultOverallocation();
343 Kokkos::realloc(bufferData, nIdxs * overalloc);
344 }
345
346 Op op;
347 Kokkos::parallel_for("FEMVector::unpack()", nIdxs,
348 KOKKOS_CLASS_LAMBDA(const size_t& i) {
349 op(data_m(idxStore(i)), bufferData(i));
350 }
351 );
352 Kokkos::fence();
353 }
354
355
356
357} // namespace ippl
STL namespace.
Definition Archive.h:20
std::unique_ptr< mpi::Communicator > Comm
Definition Ippl.h:22
std::shared_ptr< archive_type< MemorySpace > > buffer_type
void unpack(const Kokkos::View< size_t * > &idxStore)
Unpack data from BoundaryInfo::commBuffer_m into FEMVector::data_m after communication.
std::shared_ptr< BoundaryInfo > boundaryInfo_m
Struct holding all the MPI and boundary information.
Definition FEMVector.h:385
void fillHalo()
Copy values from neighboring ranks into local halo.
Definition FEMVector.hpp:38
FEMVector< K > skeletonCopy() const
Create a new FEMVector with different data type, but same size and boundary infromation.
FEMVector< T > & operator=(T value)
Set all the values of the vector to value.
const Kokkos::View< T * > & getView() const
Get underlying data view.
KOKKOS_INLINE_FUNCTION T operator()(size_t i) const
Subscript operator to get value at position i.
Kokkos::View< T * > data_m
Data this object is storing.
Definition FEMVector.h:373
void pack(const Kokkos::View< size_t * > &idxStore)
Pack data into BoundaryInfo::commBuffer_m for MPI communication.
void accumulateHalo()
Accumulate halo values in neighbor.
Definition FEMVector.hpp:95
size_t size() const
Get the size (number of elements) of the vector.
KOKKOS_INLINE_FUNCTION T operator[](size_t i) const
Subscript operator to get value at position i.
FEMVector(size_t n, std::vector< size_t > neighbors, std::vector< Kokkos::View< size_t * > > sendIdxs, std::vector< Kokkos::View< size_t * > > recvIdxs)
Constructor taking size, neighbors, and halo exchange indices.
Definition FEMVector.hpp:4
FEMVector< T > deepCopy() const
Create a deep copy, where all the information of this vector is copied to a new one.
void setHalo(T setValue)
Set the halo cells to setValue.
Structure holding MPI neighbor and boundary information.
Definition FEMVector.h:293
std::vector< Kokkos::View< size_t * > > recvIdxs_m
Stores indices of FEMVector::data_m which are part of the halo.
Definition FEMVector.h:352
std::vector< size_t > neighbors_m
Stores the ranks of the neighboring MPI tasks.
Definition FEMVector.h:320
BoundaryInfo(std::vector< size_t > neighbors, std::vector< Kokkos::View< size_t * > > sendIdxs, std::vector< Kokkos::View< size_t * > > recvIdxs_m)
constructor for a BoundaryInfo object.
Definition FEMVector.hpp:29
std::vector< Kokkos::View< size_t * > > sendIdxs_m
Stores indices of FEMVector::data_m which need to be send to the MPI neighbors.
Definition FEMVector.h:335