5 std::vector< Kokkos::View<size_t*> > sendIdxs,
6 std::vector< Kokkos::View<size_t*> > recvIdxs) :
8 std::move(sendIdxs),
std::move(recvIdxs))) {
30 std::vector< Kokkos::View<size_t*> > sendIdxs,
31 std::vector< Kokkos::View<size_t*> > recvIdxs) :
42 "FEMVector::fillHalo()",
43 "Cannot do halo operations, as no MPI communication information is provided. "
44 "Did you use the correct constructor to construct the FEMVector?");
47 using memory_space =
typename Kokkos::View<size_t*>::memory_space;
49 std::vector<MPI_Request> requests(
boundaryInfo_m->neighbors_m.size());
66 archive->resetWritePos();
80 nrecvs *
sizeof(
T), nrecvs);
81 archive->resetReadPos();
87 if (requests.size() > 0) {
88 MPI_Waitall(requests.size(), requests.data(), MPI_STATUSES_IGNORE);
99 "FEMVector::accumulateHalo()",
100 "Cannot do halo operations, as no MPI communication information is provided. "
101 "Did you use the correct constructor to construct the FEMVector?");
104 using memory_space =
typename Kokkos::View<size_t*>::memory_space;
106 std::vector<MPI_Request> requests(
boundaryInfo_m->neighbors_m.size());
122 requests[i], nsends);
123 archive->resetWritePos();
137 nrecvs *
sizeof(
T), nrecvs);
138 archive->resetReadPos();
144 if (requests.size() > 0) {
145 MPI_Waitall(requests.size(), requests.data(), MPI_STATUSES_IGNORE);
151 template <
typename T>
157 "FEMVector::setHalo()",
158 "Cannot do halo operations, as no MPI communication information is provided. "
159 "Did you use the correct constructor to construct the FEMVector?");
164 Kokkos::parallel_for(
"FEMVector::setHalo()",view.extent(0),
165 KOKKOS_CLASS_LAMBDA(
const size_t& j){
166 data_m[view(j)] = setValue;
173 template <
typename T>
175 Kokkos::parallel_for(
"FEMVector::operator=(T value)",
data_m.extent(0),
176 KOKKOS_CLASS_LAMBDA(
const size_t& i){
184 template <
typename T>
185 template <
typename E,
size_t N>
188 capture_type expr_ =
reinterpret_cast<const capture_type&
>(expr);
189 Kokkos::parallel_for(
"FEMVector::operator=(Expression)",
data_m.extent(0),
190 KOKKOS_CLASS_LAMBDA(
const size_t& i){
198 template <
typename T>
201 Kokkos::parallel_for(
"FEMVector::operator=(FEMVector)",
data_m.extent(0),
202 KOKKOS_CLASS_LAMBDA(
const size_t& i){
210 template <
typename T>
216 template <
typename T>
222 template <
typename T>
228 template <
typename T>
233 template <
typename T>
239 std::vector< Kokkos::View<size_t*> > newSendIdxs;
240 std::vector< Kokkos::View<size_t*> > newRecvIdxs;
243 newSendIdxs.emplace_back(Kokkos::View<size_t*>(
boundaryInfo_m->sendIdxs_m[i].label(),
247 newRecvIdxs.emplace_back(Kokkos::View<size_t*>(
boundaryInfo_m->recvIdxs_m[i].label(),
268 template <
typename T>
269 template <
typename K>
275 std::vector< Kokkos::View<size_t*> > newSendIdxs;
276 std::vector< Kokkos::View<size_t*> > newRecvIdxs;
279 newSendIdxs.emplace_back(Kokkos::View<size_t*>(
boundaryInfo_m->sendIdxs_m[i].label(),
283 newRecvIdxs.emplace_back(Kokkos::View<size_t*>(
boundaryInfo_m->recvIdxs_m[i].label(),
301 template <
typename T>
307 "Cannot do halo operations, as no MPI communication information is provided. "
308 "Did you use the correct constructor to construct the FEMVector?");
311 size_t nIdxs = idxStore.extent(0);
314 if (bufferData.size() < nIdxs) {
315 int overalloc =
Comm->getDefaultOverallocation();
316 Kokkos::realloc(bufferData, nIdxs * overalloc);
319 Kokkos::parallel_for(
"FEMVector::pack()", nIdxs,
320 KOKKOS_CLASS_LAMBDA(
const size_t& i) {
321 bufferData(i) =
data_m(idxStore(i));
328 template <
typename T>
329 template <
typename Op>
334 "FEMVector::unpack()",
335 "Cannot do halo operations, as no MPI communication information is provided. "
336 "Did you use the correct constructor to construct the FEMVector?");
339 size_t nIdxs = idxStore.extent(0);
341 if (bufferData.size() < nIdxs) {
342 int overalloc =
Comm->getDefaultOverallocation();
343 Kokkos::realloc(bufferData, nIdxs * overalloc);
347 Kokkos::parallel_for(
"FEMVector::unpack()", nIdxs,
348 KOKKOS_CLASS_LAMBDA(
const size_t& i) {
349 op(
data_m(idxStore(i)), bufferData(i));
std::unique_ptr< mpi::Communicator > Comm
std::shared_ptr< archive_type< MemorySpace > > buffer_type
void unpack(const Kokkos::View< size_t * > &idxStore)
Unpack data from BoundaryInfo::commBuffer_m into FEMVector::data_m after communication.
std::shared_ptr< BoundaryInfo > boundaryInfo_m
Struct holding all the MPI and boundary information.
void fillHalo()
Copy values from neighboring ranks into local halo.
FEMVector< K > skeletonCopy() const
Create a new FEMVector with different data type, but same size and boundary infromation.
FEMVector< T > & operator=(T value)
Set all the values of the vector to value.
const Kokkos::View< T * > & getView() const
Get underlying data view.
KOKKOS_INLINE_FUNCTION T operator()(size_t i) const
Subscript operator to get value at position i.
Kokkos::View< T * > data_m
Data this object is storing.
void pack(const Kokkos::View< size_t * > &idxStore)
Pack data into BoundaryInfo::commBuffer_m for MPI communication.
void accumulateHalo()
Accumulate halo values in neighbor.
size_t size() const
Get the size (number of elements) of the vector.
KOKKOS_INLINE_FUNCTION T operator[](size_t i) const
Subscript operator to get value at position i.
FEMVector(size_t n, std::vector< size_t > neighbors, std::vector< Kokkos::View< size_t * > > sendIdxs, std::vector< Kokkos::View< size_t * > > recvIdxs)
Constructor taking size, neighbors, and halo exchange indices.
FEMVector< T > deepCopy() const
Create a deep copy, where all the information of this vector is copied to a new one.
void setHalo(T setValue)
Set the halo cells to setValue.
Structure holding MPI neighbor and boundary information.
std::vector< Kokkos::View< size_t * > > recvIdxs_m
Stores indices of FEMVector::data_m which are part of the halo.
std::vector< size_t > neighbors_m
Stores the ranks of the neighboring MPI tasks.
BoundaryInfo(std::vector< size_t > neighbors, std::vector< Kokkos::View< size_t * > > sendIdxs, std::vector< Kokkos::View< size_t * > > recvIdxs_m)
constructor for a BoundaryInfo object.
std::vector< Kokkos::View< size_t * > > sendIdxs_m
Stores indices of FEMVector::data_m which need to be send to the MPI neighbors.