IPPL (Independent Parallel Particle Layer)
IPPL
Loading...
Searching...
No Matches
LandauDampingManager.h
Go to the documentation of this file.
1#ifndef IPPL_LANDAU_DAMPING_MANAGER_H
2#define IPPL_LANDAU_DAMPING_MANAGER_H
3
4#include <memory>
5
6#include "AlpineManager.h"
7#include "FieldContainer.hpp"
8#include "FieldSolver.hpp"
9#include "LoadBalancer.hpp"
10#include "Manager/BaseManager.h"
11#include "ParticleContainer.hpp"
12#include "Random/Distribution.h"
15#include "Random/Randn.h"
16
18
19// define functions used in sampling particles
21 struct CDF {
22 KOKKOS_INLINE_FUNCTION double operator()(double x, unsigned int d,
23 const double* params_p) const {
24 return x
25 + (params_p[d * 2 + 0] / params_p[d * 2 + 1])
26 * Kokkos::sin(params_p[d * 2 + 1] * x);
27 }
28 };
29
30 struct PDF {
31 KOKKOS_INLINE_FUNCTION double operator()(double x, unsigned int d,
32 double const* params_p) const {
33 return 1.0 + params_p[d * 2 + 0] * Kokkos::cos(params_p[d * 2 + 1] * x);
34 }
35 };
36
37 struct Estimate {
38 KOKKOS_INLINE_FUNCTION double operator()(double u, unsigned int d,
39 double const* params_p) const {
40 return u + params_p[d] * 0.;
41 }
42 };
43};
44
45template <typename T, unsigned Dim>
46class LandauDampingManager : public AlpineManager<T, Dim> {
47public:
52
53 LandauDampingManager(size_type totalP_, int nt_, Vector_t<int, Dim>& nr_, double lbt_,
54 std::string& solver_, std::string& stepMethod_)
55 : AlpineManager<T, Dim>(totalP_, nt_, nr_, lbt_, solver_, stepMethod_) {}
56
57 LandauDampingManager(size_type totalP_, int nt_, Vector_t<int, Dim>& nr_, double lbt_,
58 std::string& solver_, std::string& stepMethod_,
59 std::vector<std::string> preconditioner_params_)
60 : AlpineManager<T, Dim>(totalP_, nt_, nr_, lbt_, solver_, stepMethod_,
61 preconditioner_params_) {}
62
64
65 void pre_run() override {
66 Inform m("Pre Run");
67
68 const double pi = Kokkos::numbers::pi_v<T>;
69
70 if (this->solver_m == "OPEN") {
71 throw IpplException("LandauDamping",
72 "Open boundaries solver incompatible with this simulation!");
73 }
74
75 for (unsigned i = 0; i < Dim; i++) {
76 this->domain_m[i] = ippl::Index(this->nr_m[i]);
77 }
78
79 this->decomp_m.fill(true);
80 this->kw_m = 0.5;
81 this->alpha_m = 0.05;
82 this->rmin_m = 0.0;
83 this->rmax_m = 2 * pi / this->kw_m;
84
85 this->hr_m = this->rmax_m / this->nr_m;
86 // Q = -\int\int f dx dv
87 this->Q_m =
88 std::reduce(this->rmax_m.begin(), this->rmax_m.end(), -1., std::multiplies<double>());
89 this->origin_m = this->rmin_m;
90 this->dt_m = std::min(.05, 0.5 * *std::min_element(this->hr_m.begin(), this->hr_m.end()));
91 this->it_m = 0;
92 this->time_m = 0.0;
93
94 m << "Discretization:" << endl
95 << "nt " << this->nt_m << " Np= " << this->totalP_m << " grid = " << this->nr_m << endl;
96
97 this->isAllPeriodic_m = true;
98
99 this->setFieldContainer(std::make_shared<FieldContainer_t>(
100 this->hr_m, this->rmin_m, this->rmax_m, this->decomp_m, this->domain_m, this->origin_m,
101 this->isAllPeriodic_m));
102
103 this->setParticleContainer(std::make_shared<ParticleContainer_t>(
104 this->fcontainer_m->getMesh(), this->fcontainer_m->getFL()));
105
106 this->fcontainer_m->initializeFields(this->solver_m);
107
108 if (this->getSolver() == "PCG") {
109 this->setFieldSolver(std::make_shared<FieldSolver_t>(
110 this->solver_m, &this->fcontainer_m->getRho(), &this->fcontainer_m->getE(),
111 &this->fcontainer_m->getPhi(), this->preconditioner_params_m));
112 } else {
113 this->setFieldSolver(std::make_shared<FieldSolver_t>(
114 this->solver_m, &this->fcontainer_m->getRho(), &this->fcontainer_m->getE(),
115 &this->fcontainer_m->getPhi()));
116 }
117
118 this->fsolver_m->initSolver();
119
120 this->setLoadBalancer(std::make_shared<LoadBalancer_t>(
121 this->lbt_m, this->fcontainer_m, this->pcontainer_m, this->fsolver_m));
122
124
125 static IpplTimings::TimerRef DummySolveTimer = IpplTimings::getTimer("solveWarmup");
126 IpplTimings::startTimer(DummySolveTimer);
127
128 this->fcontainer_m->getRho() = 0.0;
129
130 this->fsolver_m->runSolver();
131
132 IpplTimings::stopTimer(DummySolveTimer);
133
134 this->par2grid();
135
136 static IpplTimings::TimerRef SolveTimer = IpplTimings::getTimer("solve");
137 IpplTimings::startTimer(SolveTimer);
138
139 this->fsolver_m->runSolver();
140
141 IpplTimings::stopTimer(SolveTimer);
142
143 this->grid2par();
144
145 this->dump();
146
147 m << "Done";
148 }
149
151 Inform m("Initialize Particles");
152
153 auto* mesh = &this->fcontainer_m->getMesh();
154 auto* FL = &this->fcontainer_m->getFL();
155 using DistR_t =
157 double parR[2 * Dim];
158 for (unsigned int i = 0; i < Dim; i++) {
159 parR[i * 2] = this->alpha_m;
160 parR[i * 2 + 1] = this->kw_m[i];
161 }
162 DistR_t distR(parR);
163
164 Vector_t<double, Dim> kw = this->kw_m;
165 Vector_t<double, Dim> hr = this->hr_m;
166 Vector_t<double, Dim> origin = this->origin_m;
167 static IpplTimings::TimerRef domainDecomposition = IpplTimings::getTimer("loadBalance");
168 if ((this->lbt_m != 1.0) && (ippl::Comm->size() > 1)) {
169 m << "Starting first repartition" << endl;
170 IpplTimings::startTimer(domainDecomposition);
171 this->isFirstRepartition_m = true;
172 const ippl::NDIndex<Dim>& lDom = FL->getLocalNDIndex();
173 const int nghost = this->fcontainer_m->getRho().getNghost();
174 auto rhoview = this->fcontainer_m->getRho().getView();
175
176 using index_array_type = typename ippl::RangePolicy<Dim>::index_array_type;
178 "Assign initial rho based on PDF",
179 this->fcontainer_m->getRho().getFieldRangePolicy(),
180 KOKKOS_LAMBDA(const index_array_type& args) {
181 // local to global index conversion
182 Vector_t<double, Dim> xvec = (args + lDom.first() - nghost + 0.5) * hr + origin;
183
184 // ippl::apply accesses the view at the given indices and obtains a
185 // reference; see src/Expression/IpplOperations.h
186 ippl::apply(rhoview, args) = distR.getFullPdf(xvec);
187 });
188
189 Kokkos::fence();
190
191 this->loadbalancer_m->initializeORB(FL, mesh);
192 this->loadbalancer_m->repartition(FL, mesh, this->isFirstRepartition_m);
193 IpplTimings::stopTimer(domainDecomposition);
194 }
195
196 static IpplTimings::TimerRef particleCreation = IpplTimings::getTimer("particlesCreation");
197 IpplTimings::startTimer(particleCreation);
198
199 // Sample particle positions:
202
203 // unsigned int
204 size_type totalP = this->totalP_m;
205 int seed = 42;
206 Kokkos::Random_XorShift64_Pool<> rand_pool64((size_type)(seed + 100 * ippl::Comm->rank()));
207
208 using samplingR_t =
209 ippl::random::InverseTransformSampling<double, Dim, Kokkos::DefaultExecutionSpace,
210 DistR_t>;
211 Vector_t<double, Dim> rmin = this->rmin_m;
212 Vector_t<double, Dim> rmax = this->rmax_m;
213 samplingR_t samplingR(distR, rmax, rmin, rlayout, totalP);
214 size_type nlocal = samplingR.getLocalSamplesNum();
215
216 this->pcontainer_m->create(nlocal);
217
218 view_type* R = &(this->pcontainer_m->R.getView());
219 samplingR.generate(*R, rand_pool64);
220
221 view_type* P = &(this->pcontainer_m->P.getView());
222
223 double mu[Dim];
224 double sd[Dim];
225 for (unsigned int i = 0; i < Dim; i++) {
226 mu[i] = 0.0;
227 sd[i] = 1.0;
228 }
229 Kokkos::parallel_for(nlocal, ippl::random::randn<double, Dim>(*P, rand_pool64, mu, sd));
230 Kokkos::fence();
231 ippl::Comm->barrier();
232
233 IpplTimings::stopTimer(particleCreation);
234
235 this->pcontainer_m->q = this->Q_m / totalP;
236 m << "particles created and initial conditions assigned " << endl;
237 }
238
239 void advance() override {
240 if (this->stepMethod_m == "LeapFrog") {
241 LeapFrogStep();
242 } else {
243 throw IpplException(TestName, "Step method is not set/recognized!");
244 }
245 }
246
248 // LeapFrog time stepping https://en.wikipedia.org/wiki/Leapfrog_integration
249 // Here, we assume a constant charge-to-mass ratio of -1 for
250 // all the particles hence eliminating the need to store mass as
251 // an attribute
252 static IpplTimings::TimerRef PTimer = IpplTimings::getTimer("pushVelocity");
253 static IpplTimings::TimerRef RTimer = IpplTimings::getTimer("pushPosition");
254 static IpplTimings::TimerRef updateTimer = IpplTimings::getTimer("update");
255 static IpplTimings::TimerRef domainDecomposition = IpplTimings::getTimer("loadBalance");
256 static IpplTimings::TimerRef SolveTimer = IpplTimings::getTimer("solve");
257
258 double dt = this->dt_m;
259 std::shared_ptr<ParticleContainer_t> pc = this->pcontainer_m;
260 std::shared_ptr<FieldContainer_t> fc = this->fcontainer_m;
261
263 pc->P = pc->P - 0.5 * dt * pc->E;
265
266 // drift
268 pc->R = pc->R + dt * pc->P;
270
271 // Since the particles have moved spatially update them to correct processors
272 IpplTimings::startTimer(updateTimer);
273 pc->update();
274 IpplTimings::stopTimer(updateTimer);
275
276 size_type totalP = this->totalP_m;
277 int it = this->it_m;
278 bool isFirstRepartition = false;
279 if (this->loadbalancer_m->balance(totalP, it + 1)) {
280 IpplTimings::startTimer(domainDecomposition);
281 auto* mesh = &fc->getRho().get_mesh();
282 auto* FL = &fc->getFL();
283 this->loadbalancer_m->repartition(FL, mesh, isFirstRepartition);
284 IpplTimings::stopTimer(domainDecomposition);
285 }
286
287 // scatter the charge onto the underlying grid
288 this->par2grid();
289
290 // Field solve
291 IpplTimings::startTimer(SolveTimer);
292 this->fsolver_m->runSolver();
293 IpplTimings::stopTimer(SolveTimer);
294
295 // gather E field
296 this->grid2par();
297
298 // kick
300 pc->P = pc->P - 0.5 * dt * pc->E;
302 }
303
304 void dump() override {
305 static IpplTimings::TimerRef dumpDataTimer = IpplTimings::getTimer("dumpData");
306 IpplTimings::startTimer(dumpDataTimer);
307 dumpLandau(this->fcontainer_m->getE().getView());
308 IpplTimings::stopTimer(dumpDataTimer);
309 }
310
311 template <typename View>
312 void dumpLandau(const View& Eview) {
313 const int nghostE = this->fcontainer_m->getE().getNghost();
314
315 using index_array_type = typename ippl::RangePolicy<Dim>::index_array_type;
316 double localEx2 = 0, localExNorm = 0;
318 "Ex stats", ippl::getRangePolicy(Eview, nghostE),
319 KOKKOS_LAMBDA(const index_array_type& args, double& E2, double& ENorm) {
320 // ippl::apply<unsigned> accesses the view at the given indices and obtains a
321 // reference; see src/Expression/IpplOperations.h
322 double val = ippl::apply(Eview, args)[0];
323 double e2 = Kokkos::pow(val, 2);
324 E2 += e2;
325
326 double norm = Kokkos::fabs(ippl::apply(Eview, args)[0]);
327 if (norm > ENorm) {
328 ENorm = norm;
329 }
330 },
331 Kokkos::Sum<double>(localEx2), Kokkos::Max<double>(localExNorm));
332
333 double globaltemp = 0.0;
334 ippl::Comm->reduce(localEx2, globaltemp, 1, std::plus<double>());
335
336 double fieldEnergy =
337 std::reduce(this->fcontainer_m->getHr().begin(), this->fcontainer_m->getHr().end(),
338 globaltemp, std::multiplies<double>());
339
340 double ExAmp = 0.0;
341 ippl::Comm->reduce(localExNorm, ExAmp, 1, std::greater<double>());
342
343 if (ippl::Comm->rank() == 0) {
344 std::stringstream fname;
345 fname << "data/FieldLandau_";
346 fname << ippl::Comm->size();
347 fname << "_manager";
348 fname << ".csv";
349 Inform csvout(NULL, fname.str().c_str(), Inform::APPEND);
350 csvout.precision(16);
351 csvout.setf(std::ios::scientific, std::ios::floatfield);
352 if (std::fabs(this->time_m) < 1e-14) {
353 csvout << "time, Ex_field_energy, Ex_max_norm" << endl;
354 }
355 csvout << this->time_m << " " << fieldEnergy << " " << ExAmp << endl;
356 }
357 ippl::Comm->barrier();
358 }
359};
360#endif
typename ippl::detail::ViewType< ippl::Vector< double, Dim >, 1 >::view_type view_type
KOKKOS_FUNCTION double PDF(const Vector_t< double, Dim > &xvec, const double &alpha, const Vector_t< double, Dim > &kw, const unsigned Dim)
double CDF(const double &x, const double &alpha, const double &k)
const double pi
typename ippl::detail::ViewType< ippl::Vector< double, Dim >, 1 >::view_type view_type
constexpr unsigned Dim
ippl::detail::size_type size_type
Definition datatypes.h:23
const char * TestName
ippl::Vector< T, Dim > Vector_t
Definition datatypes.h:38
Inform & endl(Inform &inf)
Definition Inform.cpp:42
KOKKOS_INLINE_FUNCTION constexpr decltype(auto) apply(const View &view, const Coords &coords)
RangePolicy< View::rank, typenameView::execution_space, PolicyArgs... >::policy_type getRangePolicy(const View &view, int shift=0)
void parallel_for(const std::string &name, const ExecPolicy &policy, const FunctorType &functor)
void parallel_reduce(const std::string &name, const ExecPolicy &policy, const FunctorType &functor, ReducerArgument &&... reducer)
std::unique_ptr< mpi::Communicator > Comm
Definition Ippl.h:22
KOKKOS_INLINE_FUNCTION Vector< int, Dim > first() const
Definition NDIndex.hpp:170
void setParticleContainer(std::shared_ptr< ParticleContainer< T, Dim > > pcontainer)
Definition PicManager.h:55
The class that represents a distribution.
A class for inverse transform sampling.
Functor to generate random numbers from a normal distribution.
Definition Randn.h:26
@ APPEND
Definition Inform.h:45
FmtFlags_t setf(FmtFlags_t setbits, FmtFlags_t field)
Definition Inform.h:101
int precision() const
Definition Inform.h:111
Timing::TimerRef TimerRef
static TimerRef getTimer(const char *nm)
static void stopTimer(TimerRef t)
static void startTimer(TimerRef t)
::ippl::Vector< index_type, Dim > index_array_type
Vector_t< double, Dim > kw_m
void par2grid() override
Particle-to-grid operation.
Vector_t< double, Dim > origin_m
ippl::NDIndex< Dim > domain_m
Vector_t< int, Dim > nr_m
Vector_t< double, Dim > hr_m
Vector_t< double, Dim > rmin_m
const std::string & getSolver() const
bool isFirstRepartition_m
std::string stepMethod_m
std::array< bool, Dim > decomp_m
std::string solver_m
size_type totalP_m
Vector_t< double, Dim > rmax_m
AlpineManager(size_type totalP_, int nt_, Vector_t< int, Dim > &nr_, double lbt_, std::string &solver_, std::string &stepMethod_, std::vector< std::string > preconditioner_params={})
void grid2par() override
Grid-to-particle operation.
KOKKOS_INLINE_FUNCTION double operator()(double x, unsigned int d, const double *params_p) const
KOKKOS_INLINE_FUNCTION double operator()(double x, unsigned int d, double const *params_p) const
KOKKOS_INLINE_FUNCTION double operator()(double u, unsigned int d, double const *params_p) const
ParticleContainer< T, Dim > ParticleContainer_t
void advance() override
A method that should be used to execute/advance a step of simulation.
LoadBalancer< T, Dim > LoadBalancer_t
FieldSolver< T, Dim > FieldSolver_t
LandauDampingManager(size_type totalP_, int nt_, Vector_t< int, Dim > &nr_, double lbt_, std::string &solver_, std::string &stepMethod_, std::vector< std::string > preconditioner_params_)
void dumpLandau(const View &Eview)
void pre_run() override
A method that should be used for setting up the simulation.
LandauDampingManager(size_type totalP_, int nt_, Vector_t< int, Dim > &nr_, double lbt_, std::string &solver_, std::string &stepMethod_)
FieldContainer< T, Dim > FieldContainer_t