PetIBM 0.5.4
Toolbox and applications of the immersed-boundary method for distributed-memory architectures
cartesianmesh.cpp
Go to the documentation of this file.
1
8// here goes C++ STL
9#include <algorithm>
10#include <iostream>
11#include <numeric>
12#include <sstream>
13
14// here goes PETSc headers
15#include <petscvec.h>
16
17// here goes headers from our PetIBM
19#include <petibm/io.h>
20#include <petibm/misc.h>
21#include <petibm/parser.h>
22
23namespace petibm
24{
25namespace mesh
26{
27using namespace type;
28
29// implementation of CartesianMesh::CastesianMesh
30CartesianMesh::CartesianMesh(const MPI_Comm &world, const YAML::Node &node)
31{
32 init(world, node);
33} // CartesianMesh
34
35// implementation of CartesianMesh::~CastesianMesh
37{
38 PetscFunctionBeginUser;
39
40 PetscErrorCode ierr;
41 PetscBool finalized;
42
43 ierr = PetscFinalized(&finalized); CHKERRV(ierr);
44 if (finalized) return;
45
46 std::vector<AO>().swap(ao); // DO NOT DESTROY UNDERLYING AOs!!
47} // ~CartesianMesh
48
49// implementation of CartesianMesh::destroy
50PetscErrorCode CartesianMesh::destroy()
51{
52 PetscFunctionBeginUser;
53
54 PetscErrorCode ierr;
55
56 type::RealVec3D().swap(dLTrue);
58 std::vector<AO>().swap(ao); // DO NOT DESTROY UNDERLYING AOs!!
62
63 ierr = MeshBase::destroy(); CHKERRQ(ierr);
64
65 PetscFunctionReturn(0);
66} // destroy
67
68// implementation of CartesianMesh::init
69PetscErrorCode CartesianMesh::init(const MPI_Comm &world,
70 const YAML::Node &node)
71{
72 PetscFunctionBeginUser;
73
74 PetscErrorCode ierr;
75
76 // store the address of the communicator
77 // note: this is a bad practice; shared_ptr is not for stack variables!!
78 comm = world;
79
80 // set rank and size
81 ierr = MPI_Comm_size(comm, &mpiSize); CHKERRQ(ierr);
82 ierr = MPI_Comm_rank(comm, &mpiRank); CHKERRQ(ierr);
83
84 // set the default sizes and values for members
85 // The default values are chosen for 2D cases. We want to eliminate the use
86 // of template and also `if` conditions that determine the dimension. So
87 // with carefully chosen default values, we can take care of 2D case with
88 // 3D code in many places.
89 min = RealVec1D(3, 0.0);
90 max = RealVec1D(3, 1.0);
91 n = IntVec2D(5, IntVec1D(3, 1));
92 coordTrue = RealVec3D(5, RealVec2D(3, RealVec1D(1, 0.0)));
93 coord = GhostedVec3D(5, GhostedVec2D(3, nullptr));
94 dLTrue = RealVec3D(5, RealVec2D(3, RealVec1D(1, 1.0)));
95 dL = GhostedVec3D(5, GhostedVec2D(3, nullptr));
96 da = std::vector<DM>(5, PETSC_NULL);
97 nProc = IntVec1D(3, PETSC_DECIDE);
98 bg = IntVec2D(5, IntVec1D(3, 0));
99 ed = IntVec2D(5, IntVec1D(3, 1));
100 m = IntVec2D(5, IntVec1D(3, 0));
101 ao = std::vector<AO>(4);
106
107 // index 3 represent pressure mesh; min & max always represent pressure mesh
108 ierr = parser::parseMesh(node["mesh"], dim, min, max, n[3], dLTrue[3]);
109 CHKERRQ(ierr);
110
111 // check periodic BC
112 IntVec2D bcTypes;
113 RealVec2D bcValues;
114 ierr = parser::parseBCs(node, bcTypes, bcValues); CHKERRQ(ierr);
115 ierr = misc::checkPeriodicBC(bcTypes, periodic); CHKERRQ(ierr);
116
117 // create raw grid information
118 ierr = createPressureMesh(); CHKERRQ(ierr);
119 ierr = createVertexMesh(); CHKERRQ(ierr);
120 ierr = createVelocityMesh(); CHKERRQ(ierr);
121 ierr = MPI_Barrier(comm); CHKERRQ(ierr);
122
123 // create PETSc DMs
124 ierr = initDMDA(); CHKERRQ(ierr);
125
126 // create a std::string that can be used in `printInfo` and output stream
127 ierr = createInfoString(); CHKERRQ(ierr);
128
129 // all processes should be synchronized
130 ierr = MPI_Barrier(comm); CHKERRQ(ierr);
131
132 PetscFunctionReturn(0);
133} // init
134
135// implementation of CartesianMesh::createPressureMesh
137{
138 PetscFunctionBeginUser;
139
140 // loop through all axes to get the coordinates of pressure points
141 for (int i = 0; i < dim; ++i)
142 {
143 coordTrue[3][i].resize(n[3][i]);
144
145 // the following three lines calculate the coord of pressure points
146 std::partial_sum(dLTrue[3][i].begin(), dLTrue[3][i].end(),
147 coordTrue[3][i].begin());
148
149 auto f = [i, this](double &a, double &b) -> double {
150 return a + this->min[i] - 0.5 * b;
151 };
152
153 std::transform(coordTrue[3][i].begin(), coordTrue[3][i].end(),
154 dLTrue[3][i].begin(), coordTrue[3][i].begin(), f);
155
156 // no ghost points in pressure grid, so dL[0] = dLTrue[0]
157 dL[3][i] = &dLTrue[3][i][0];
158
159 // same for coordinates
160 coord[3][i] = &coordTrue[3][i][0];
161 }
162
163 // total number of pressure points
164 pN = n[3][0] * n[3][1] * n[3][2]; // for 2D, n[3][2] should be 1
165
166 // in 2D simulation, we still use the variable dL in z-direction
167 if (dim == 2)
168 {
169 dL[3][2] = &dLTrue[3][2][0];
170 coord[3][2] = &coordTrue[3][2][0];
171 }
172
173 PetscFunctionReturn(0);
174} // createPressureMesh
175
176// implementation of CartesianMesh::createVertexMesh
178{
179 PetscFunctionBeginUser;
180
181 // loop through all axes to get the coordinates of mesh vertexes
182 // note: index 3 means pressure mesh; 4 means vertexes
183 for (int i = 0; i < dim; ++i)
184 {
185 // number of vertexes is one more than pressure cells
186 n[4][i] = n[3][i] + 1;
187 coordTrue[4][i].resize(n[4][i]);
188
189 // create coordinates of vertexes
190 std::partial_sum(
191 dLTrue[3][i].begin(), dLTrue[3][i].end(),
192 coordTrue[4][i].begin() + 1); // first assume coord[4][i][0]=0.0
193
194 // shift according to real coord[4][i][0], which is min
195 std::for_each(coordTrue[4][i].begin(), coordTrue[4][i].end(),
196 [i, this](double &a) -> void { a += this->min[i]; });
197
198 // no ghost points in vertex mesh, so coord[4][i][0]=coordTrue[4][i][0]
199 coord[4][i] = &coordTrue[4][i][0];
200 }
201
202 // in 2D simulation, we still use the variable coord in z-direction
203 if (dim == 2) coord[4][2] = &coordTrue[4][2][0];
204
205 PetscFunctionReturn(0);
206} // createVertexMesh
207
208// implementation of CartesianMesh::createCelocityMesh
210{
211 PetscFunctionBeginUser;
212
213 // initialize UN
214 UN = 0;
215
216 // loop through all velocity component
217 // note: index 0, 1, 2 represent u, v, w respectively
218 for (int comp = 0; comp < dim; comp++)
219 {
220 // loop through each direction, i.e., x, y, z directions
221 for (int dir = 0; dir < dim; ++dir)
222 {
223 // when the direction corresponding to velocity component
224 if (dir == comp)
225 {
226 // there will no velocity point on boundary (it's a ghost point)
227 n[comp][dir] = n[3][dir] - 1;
228
229 // we store the dL and coord of ghost points, so the size is n+2
230 dLTrue[comp][dir].resize(n[comp][dir] + 2);
231 coordTrue[comp][dir].resize(n[comp][dir] + 2);
232
233 // coordinates will match the those of the vertex grid
234 coordTrue[comp][dir] = coordTrue[4][dir];
235
236 // cell size will be a half of the sum of adjacent pressure cell
237 auto f = [](const PetscReal &x,
238 const PetscReal &y) -> PetscReal {
239 return 0.5 * (x + y);
240 };
241
242 // note: adjacent can not automatically ignore the first element
243 // note: pressure doesn't have ghost points, while velocity
244 // does.
245 std::adjacent_difference(dLTrue[3][dir].begin(),
246 dLTrue[3][dir].end(),
247 dLTrue[comp][dir].begin(), f);
248
249 // if BC in this direction is periodic, we append one more
250 // velocity point at the end
251 if (periodic[comp][dir])
252 {
253 // add 1 to the number of valid grid point
254 n[comp][dir] += 1;
255
256 // the space for right ghost point in dLTrue is now used to
257 // store the grid point on periodic BC
258 dLTrue[comp][dir].back() =
259 f(dLTrue[3][dir][0], dLTrue[3][dir].back());
260
261 // the smaller ghost point will be the same as the last
262 // valid point
263 dLTrue[comp][dir][0] = dLTrue[comp][dir].back();
264
265 // we have to create one more space for the larger ghost
266 // point, and its dL is the same as the 1st valid point
267 dLTrue[comp][dir].push_back(dLTrue[comp][dir][1]);
268
269 // add the coordinate of the extra point, which is the first
270 // interior point add a shift of domain size.
271 coordTrue[comp][dir].push_back(max[dir] +
272 dLTrue[3][dir].front());
273 }
274 // for other cases, we simply get the dL for the larger ghost
275 // note: the smaller ghost point has already got value from
276 // adjacent_difference, which is the dL of the 1st pressure cell
277 else
278 {
279 dLTrue[comp][dir].back() = dLTrue[3][dir].back();
280 }
281 }
282 // other directions
283 else
284 {
285 n[comp][dir] = n[3][dir];
286
287 // we store the dL of ghost points, so the size is n+2
288 dLTrue[comp][dir].resize(n[comp][dir] + 2);
289 coordTrue[comp][dir].resize(n[comp][dir] + 2);
290
291 // coordinate and cell size will match that of pressure point,
292 // except the two ghost points
293 std::copy(coordTrue[3][dir].begin(), coordTrue[3][dir].end(),
294 coordTrue[comp][dir].begin() + 1);
295 std::copy(dLTrue[3][dir].begin(), dLTrue[3][dir].end(),
296 dLTrue[comp][dir].begin() + 1);
297
298 // get the dL for the two ghost points
299 // for periodic BCs, it's the same as the 1st cell on the other
300 // side
301 if (periodic[comp][dir])
302 {
303 // set the coordinate of the two ghost points
304 coordTrue[comp][dir].front() =
305 min[dir] - dLTrue[3][dir].back() / 2.0;
306 coordTrue[comp][dir].back() =
307 max[dir] + dLTrue[3][dir].front() / 2.0;
308
309 // dL
310 dLTrue[comp][dir].front() = dLTrue[3][dir].back();
311 dLTrue[comp][dir].back() = dLTrue[3][dir].front();
312 }
313 else // for other BCs, it's just a mirror of the first/last
314 // cells
315 {
316 // set the coordinate of the two ghost points
317 coordTrue[comp][dir].front() =
318 min[dir] - dLTrue[3][dir].front() / 2.0;
319 coordTrue[comp][dir].back() =
320 max[dir] + dLTrue[3][dir].back() / 2.0;
321
322 // dL
323 dLTrue[comp][dir].front() = dLTrue[3][dir].front();
324 dLTrue[comp][dir].back() = dLTrue[3][dir].back();
325 }
326 }
327
328 // dL and coord will point to the 1st valid grid point, so we can
329 // access the 1st ghost point with the index -1, e.g., dL[0][0][-1]
330 dL[comp][dir] = &dLTrue[comp][dir][1];
331 coord[comp][dir] = &coordTrue[comp][dir][1];
332 }
333
334 // add to UN; for 2D, n in z-direction should be 1
335 UN += (n[comp][0] * n[comp][1] * n[comp][2]);
336 }
337
338 // for 2D simulation, we still use dL in z-direction in our code
339 if (dim == 2)
340 {
341 dL[0][2] = &dLTrue[0][2][0];
342 dL[1][2] = &dLTrue[1][2][0];
343 dL[2][0] = &dLTrue[2][0][0];
344 dL[2][1] = &dLTrue[2][1][0];
345 dL[2][2] = &dLTrue[2][2][0];
346
347 coord[0][2] = &coordTrue[0][2][0];
348 coord[1][2] = &coordTrue[1][2][0];
349 coord[2][0] = &coordTrue[2][0][0];
350 coord[2][1] = &coordTrue[2][1][0];
351 coord[2][2] = &coordTrue[2][2][0];
352 }
353
354 PetscFunctionReturn(0);
355} // createVelocityMesh
356
357// implementation of CartesianMesh::createInfoString
359{
360 PetscFunctionBeginUser;
361
362 PetscErrorCode ierr;
363
364 std::stringstream ss;
365
366 if (mpiRank == 0)
367 {
368 ss << std::string(80, '=') << std::endl;
369 ss << "Cartesian Staggered Grid:" << std::endl;
370 ss << std::string(80, '=') << std::endl;
371
372 ss << "\tDimension: " << dim << std::endl;
373 ss << std::endl;
374
375 ss << "\tDomain Range: "
376 << "[" << min[0] << ", " << max[0] << "]; "
377 << "[" << min[1] << ", " << max[1] << "]";
378 if (dim == 3) ss << "; [" << min[2] << ", " << max[2] << "]";
379 ss << std::endl;
380 ss << std::endl;
381
382 ss << "\tNumber of Cells (Nx x Ny" << ((dim == 2) ? "" : " x Nz")
383 << "):\n";
384 ss << "\t\tPressure Grid: ";
385 ss << n[3][0];
386 for (int dir = 1; dir < dim; ++dir) ss << " x " << n[3][dir];
387 ss << std::endl;
388
389 for (int comp = 0; comp < dim; ++comp)
390 {
391 ss << "\t\t" << fd2str[Field(comp)] << "-Velocity Grid: ";
392 ss << n[comp][0];
393 for (int dir = 1; dir < dim; ++dir) ss << " x " << n[comp][dir];
394 ss << std::endl;
395 }
396 ss << std::endl;
397
398 ss << "\tGrid Decomposition:" << std::endl;
399 ss << "\t\tMPI Processes: " << nProc[0] << " x " << nProc[1] << " x "
400 << nProc[2] << std::endl;
401 }
402
403 ierr = addLocalInfoString(ss); CHKERRQ(ierr);
404 ss << std::endl;
405
406 info = ss.str();
407
408 PetscFunctionReturn(0);
409} // createInfoString
410
411// implementation of CartesianMesh::addLocalInfoString
412PetscErrorCode CartesianMesh::addLocalInfoString(std::stringstream &ss)
413{
414 PetscFunctionBeginUser;
415
416 std::string pre("\t\t");
417
418 ss << pre << "Rank " << mpiRank << ": " << std::endl;
419 ss << pre << "\tPressure Grid: ";
420 ss << "[" << bg[3][0] << ", " << ed[3][0] << "), ";
421 ss << "[" << bg[3][1] << ", " << ed[3][1] << "), ";
422 ss << "[" << bg[3][2] << ", " << ed[3][2] << ") ";
423 ss << std::endl;
424
425 for (int comp = 0; comp < dim; ++comp)
426 {
427 ss << pre << "\t" << fd2str[Field(comp)] << "-Velocity Grid: ";
428 ss << "[" << bg[comp][0] << ", " << ed[comp][0] << "), ";
429 ss << "[" << bg[comp][1] << ", " << ed[comp][1] << "), ";
430 ss << "[" << bg[comp][2] << ", " << ed[comp][2] << ") ";
431 ss << std::endl;
432 }
433
434 PetscFunctionReturn(0);
435} // addLocalInfoString
436
437// implementation of CartesianMesh::initDMDA
439{
440 PetscFunctionBeginUser;
441
442 PetscErrorCode ierr;
443
444 ierr = createPressureDMDA(); CHKERRQ(ierr);
445 ierr = createVelocityPack(); CHKERRQ(ierr);
446
447 // gather numbers of local velocity component
448 for (PetscInt f = 0; f < dim; ++f)
449 {
450 // temporarily use UNLocal
451 UNLocal = m[f][0] * m[f][1] * m[f][2];
452
453 // get number of local velocity component from all processes
454 ierr = MPI_Barrier(comm); CHKERRQ(ierr);
455 ierr = MPI_Allgather(&UNLocal, 1, MPIU_INT, UNLocalAllProcs[f].data(),
456 1, MPIU_INT, comm); CHKERRQ(ierr);
457 }
458
459 // total number of local velocity points
462
463 // offset on each process of each velocity component
464 for (PetscInt f = 0; f < dim; ++f)
465 {
466 for (PetscMPIInt r = mpiSize - 1; r > 0; --r)
467 offsetsAllProcs[f][r] = UNLocalAllProcs[f][r - 1];
468
469 for (PetscMPIInt r = 1; r < mpiSize; ++r)
470 offsetsAllProcs[f][r] += offsetsAllProcs[f][r - 1];
471 }
472
473 // calculate total number of all local velocity points on all processes
474 for (PetscMPIInt r = 0; r < mpiSize; ++r)
476 UNLocalAllProcs[2][r];
477
478 // offsets of all velocity points on each process in packed form
479 for (PetscMPIInt r = mpiSize - 1; r > 0; --r)
481
482 for (PetscMPIInt r = 1; r < mpiSize; ++r)
484
485 // total number of local pressure points
486 pNLocal = m[3][0] * m[3][1] * m[3][2];
487
488 PetscFunctionReturn(0);
489} // initDMDA
490
491// implementation of CartesianMesh::createSingleDMDA
492PetscErrorCode CartesianMesh::createSingleDMDA(const PetscInt &i)
493{
494 PetscFunctionBeginUser;
495
496 PetscErrorCode ierr;
497
498 DMDALocalInfo lclInfo;
499
500 switch (dim)
501 {
502 case 2:
503 ierr = DMDACreate2d(
504 comm,
505 (periodic[0][0]) ? DM_BOUNDARY_PERIODIC : DM_BOUNDARY_GHOSTED,
506 (periodic[1][1]) ? DM_BOUNDARY_PERIODIC : DM_BOUNDARY_GHOSTED,
507 DMDA_STENCIL_BOX, n[i][0], n[i][1], nProc[0], nProc[1], 1, 1,
508 nullptr, nullptr, &da[i]); CHKERRQ(ierr);
509 break;
510 case 3:
511 ierr = DMDACreate3d(
512 comm,
513 (periodic[0][0]) ? DM_BOUNDARY_PERIODIC : DM_BOUNDARY_GHOSTED,
514 (periodic[1][1]) ? DM_BOUNDARY_PERIODIC : DM_BOUNDARY_GHOSTED,
515 (periodic[2][2]) ? DM_BOUNDARY_PERIODIC : DM_BOUNDARY_GHOSTED,
516 DMDA_STENCIL_BOX, n[i][0], n[i][1], n[i][2], nProc[0], nProc[1],
517 nProc[2], 1, 1, nullptr, nullptr, nullptr, &da[i]);
518 CHKERRQ(ierr);
519 break;
520 }
521
522 ierr = DMSetUp(da[i]); CHKERRQ(ierr);
523
524 ierr = DMDAGetLocalInfo(da[i], &lclInfo); CHKERRQ(ierr);
525
526 bg[i][0] = lclInfo.xs;
527 bg[i][1] = lclInfo.ys;
528 bg[i][2] = lclInfo.zs;
529
530 m[i][0] = lclInfo.xm;
531 m[i][1] = lclInfo.ym;
532 m[i][2] = lclInfo.zm;
533
534 for (unsigned int comp = 0; comp < 3; ++comp)
535 ed[i][comp] = bg[i][comp] + m[i][comp];
536
537 PetscFunctionReturn(0);
538} // createSingleDMDA
539
540// implementation of CartesianMesh::createPressureDMDA
542{
543 PetscFunctionBeginUser;
544
545 PetscErrorCode ierr;
546
547 ierr = createSingleDMDA(3); CHKERRQ(ierr);
548
549 ierr = DMDAGetAO(da[3], &ao[3]); CHKERRQ(ierr);
550
551 ierr = DMDAGetInfo(da[3], nullptr, nullptr, nullptr, nullptr, &nProc[0],
552 &nProc[1], &nProc[2], nullptr, nullptr, nullptr, nullptr,
553 nullptr, nullptr); CHKERRQ(ierr);
554
555 PetscFunctionReturn(0);
556} // createPressureDMDA
557
558// implementation of CartesianMesh::createVelocityPack
560{
561 PetscFunctionBeginUser;
562
563 PetscErrorCode ierr;
564
565 ierr = DMCompositeCreate(comm, &UPack); CHKERRQ(ierr);
566
567 for (int i = 0; i < dim; ++i)
568 {
569 ierr = createSingleDMDA(i); CHKERRQ(ierr);
570 ierr = DMDAGetAO(da[i], &ao[i]); CHKERRQ(ierr);
571 ierr = DMCompositeAddDM(UPack, da[i]); CHKERRQ(ierr);
572 }
573
574 PetscFunctionReturn(0);
575} // createVelocityPack
576
577// implementation of CartesianMesh::getNaturalIndex
578PetscErrorCode CartesianMesh::getNaturalIndex(const PetscInt &f,
579 const MatStencil &s,
580 PetscInt &idx) const
581{
582 PetscFunctionBeginUser;
583
584 PetscErrorCode ierr;
585
586 ierr = getNaturalIndex(f, s.i, s.j, s.k, idx); CHKERRQ(ierr);
587
588 PetscFunctionReturn(0);
589} // getNaturalIndex
590
591// implementation of CartesianMesh::getNaturalIndex
592PetscErrorCode CartesianMesh::getNaturalIndex(const PetscInt &f,
593 const PetscInt &i,
594 const PetscInt &j,
595 const PetscInt &k,
596 PetscInt &idx) const
597{
598 PetscFunctionBeginUser;
599
600#ifndef NDEBUG
601 if ((i < -1) || (i > n[f][0]) || (j < -1) || (j > n[f][1]) || (k < -1) ||
602 (k > n[f][2]))
603 SETERRQ4(PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG,
604 "Stencil (%d, %d, %d) of field %d is out of domain.\n", i, j,
605 k, f);
606#endif
607
608 if (i == -1)
609 {
610 if (periodic[0][0])
611 idx = (n[f][0] - 1) + j * n[f][0] +
612 ((dim == 3) ? (k * n[f][1] * n[f][0]) : 0);
613 else
614 idx = -1;
615
616 PetscFunctionReturn(0);
617 }
618
619 if (i == n[f][0])
620 {
621 if (periodic[0][0])
622 idx = j * n[f][0] + ((dim == 3) ? (k * n[f][1] * n[f][0]) : 0);
623 else
624 idx = -1;
625
626 PetscFunctionReturn(0);
627 }
628
629 if (j == -1)
630 {
631 if (periodic[0][1])
632 idx = i + (n[f][1] - 1) * n[f][0] +
633 ((dim == 3) ? (k * n[f][1] * n[f][0]) : 0);
634 else
635 idx = -1;
636
637 PetscFunctionReturn(0);
638 }
639
640 if (j == n[f][1])
641 {
642 if (periodic[0][1])
643 idx = i + ((dim == 3) ? (k * n[f][1] * n[f][0]) : 0);
644 else
645 idx = -1;
646
647 PetscFunctionReturn(0);
648 }
649
650 if (k == -1)
651 {
652 if (periodic[0][2])
653 idx = i + j * n[f][0] +
654 ((dim == 3) ? ((n[f][2] - 1) * n[f][1] * n[f][0]) : 0);
655 else
656 idx = -1;
657
658 PetscFunctionReturn(0);
659 }
660
661 if (k == n[f][2])
662 {
663 if (periodic[0][2])
664 idx = i + j * n[f][0];
665 else
666 idx = -1;
667
668 PetscFunctionReturn(0);
669 }
670
671 // some overhead due to implicit if-condition, but this is also how PETSc
672 // does in their source code for similar functions
673 idx = i + j * n[f][0] + ((dim == 3) ? (k * n[f][1] * n[f][0]) : 0);
674
675 PetscFunctionReturn(0);
676} // getNaturalIndex
677
678// implementation of CartesianMesh::getLocalIndex
679PetscErrorCode CartesianMesh::getLocalIndex(const PetscInt &f,
680 const MatStencil &s,
681 PetscInt &idx) const
682{
683 PetscFunctionBeginUser;
684
685 PetscErrorCode ierr;
686
687 ierr = DMDAConvertToCell(da[f], s, &idx); CHKERRQ(ierr);
688
689 PetscFunctionReturn(0);
690} // getLocalIndex
691
692// implementation of CartesianMesh::getLocalIndex
693PetscErrorCode CartesianMesh::getLocalIndex(const PetscInt &f,
694 const PetscInt &i,
695 const PetscInt &j,
696 const PetscInt &k,
697 PetscInt &idx) const
698{
699 PetscFunctionBeginUser;
700
701 PetscErrorCode ierr;
702
703 ierr = getLocalIndex(f, {k, j, i, 0}, idx); CHKERRQ(ierr);
704
705 PetscFunctionReturn(0);
706} // getLocalIndex
707
708// implementation of CartesianMesh::getGlobalIndex
709PetscErrorCode CartesianMesh::getGlobalIndex(const PetscInt &f,
710 const MatStencil &s,
711 PetscInt &idx) const
712{
713 PetscFunctionBeginUser;
714
715 PetscErrorCode ierr;
716
717 ierr = getNaturalIndex(f, s, idx); CHKERRQ(ierr);
718 ierr = AOApplicationToPetsc(ao[f], 1, &idx); CHKERRQ(ierr);
719
720 PetscFunctionReturn(0);
721} // getGlobalIndex
722
723// implementation of CartesianMesh::getGlobalIndex
724PetscErrorCode CartesianMesh::getGlobalIndex(const PetscInt &f,
725 const PetscInt &i,
726 const PetscInt &j,
727 const PetscInt &k,
728 PetscInt &idx) const
729{
730 PetscFunctionBeginUser;
731
732 PetscErrorCode ierr;
733
734 ierr = getGlobalIndex(f, {k, j, i, 0}, idx); CHKERRQ(ierr);
735
736 PetscFunctionReturn(0);
737} // getGlobalIndex
738
739// implementation of CartesianMesh::getPackedGlobalIndex
740PetscErrorCode CartesianMesh::getPackedGlobalIndex(const PetscInt &f,
741 const MatStencil &s,
742 PetscInt &idx) const
743{
744 PetscFunctionBeginUser;
745
746 PetscErrorCode ierr;
747
748 PetscMPIInt p;
749
750 PetscInt unPackedIdx;
751
752 // get the global index of the target in sub-DM (un-packed DM)
753 ierr = getGlobalIndex(f, s, unPackedIdx); CHKERRQ(ierr);
754
755 // 1. pressure isn't in packed DM, so packed-ID = global-ID.
756 // 2. if unPackedIdx is -1, then packed ID is -1, too.
757 if ((f == 3) || (unPackedIdx == -1))
758 {
759 idx = unPackedIdx;
760 PetscFunctionReturn(0);
761 }
762
763 // find which process owns the target
764 p = std::upper_bound(offsetsAllProcs[f].begin(), offsetsAllProcs[f].end(),
765 unPackedIdx) -
766 offsetsAllProcs[f].begin() - 1;
767
768 // the beginning index of process p in packed DM
769 idx = offsetsPackAllProcs[p];
770
771 // offset due to previous DMs
772 for (PetscInt i = 0; i < f; ++i)
773 idx += UNLocalAllProcs[i][p]; // offset from previous DMs
774
775 // the packed index will be this
776 idx += (unPackedIdx - offsetsAllProcs[f][p]);
777
778 PetscFunctionReturn(0);
779} // getPackedGlobalIndex
780
781// implementation of CartesianMesh::getPackedGlobalIndex
782PetscErrorCode CartesianMesh::getPackedGlobalIndex(const PetscInt &f,
783 const PetscInt &i,
784 const PetscInt &j,
785 const PetscInt &k,
786 PetscInt &idx) const
787{
788 PetscFunctionBeginUser;
789
790 PetscErrorCode ierr;
791
792 ierr = getPackedGlobalIndex(f, {k, j, i, 0}, idx); CHKERRQ(ierr);
793
794 PetscFunctionReturn(0);
795} // getPackedGlobalIndex
796
797// implementation of CartesianMesh::write
798PetscErrorCode CartesianMesh::write(const std::string &filePath) const
799{
800 PetscFunctionBeginUser;
801
802 PetscErrorCode ierr;
803
804 if (mpiRank == 0)
805 {
806 PetscFileMode mode = FILE_MODE_WRITE;
807 std::vector<std::string> names{"x", "y", "z"};
808
809 for (unsigned int f = 0; f < 5; ++f)
810 {
811 std::string group = type::fd2str[type::Field(f)];
812
813 ierr = io::writeHDF5Vecs(PETSC_COMM_SELF, filePath, group, names,
814 n[f], coord[f], mode); CHKERRQ(ierr);
815
816 mode = FILE_MODE_APPEND;
817 }
818 }
819
820 ierr = MPI_Barrier(comm); CHKERRQ(ierr);
821
822 PetscFunctionReturn(0);
823} // write
824
826 const type::Field &field)
827{
828 PetscFunctionBeginUser;
829
830 for (PetscInt d = 0; d < dim; ++d)
831 {
832 PetscInt start = bg[field][d], end = ed[field][d];
833 if (point[d] < coord[field][d][start] ||
834 point[d] >= coord[field][d][end])
835 {
836 return PETSC_FALSE;
837 }
838 }
839
840 return PETSC_TRUE;
841} // isPointOnLocalProc
842
843} // end of namespace mesh
844} // end of namespace petibm
Definition of class mesh::CartesianMesh.
type::IntVec1D UPackNLocalAllProcs
Number of local packed velocity points (without ghost points) for all processes.
virtual PetscErrorCode getNaturalIndex(const PetscInt &f, const MatStencil &s, PetscInt &idx) const
Get the natural index of a point by providing MatStencil.
type::RealVec3D dLTrue
Underlying data for mesh point spacing.
Definition: cartesianmesh.h:95
PetscErrorCode createSingleDMDA(const PetscInt &i)
Function for creating a single DMDA.
virtual PetscErrorCode getGlobalIndex(const PetscInt &f, const MatStencil &s, PetscInt &idx) const
Get the global index of a point in unpacked DM by providing MatStencil.
virtual PetscErrorCode write(const std::string &filePath) const
Write the mesh object into a HDF5 file.
virtual PetscErrorCode getPackedGlobalIndex(const PetscInt &f, const MatStencil &s, PetscInt &idx) const
Get the global index of a point in packed DM by providing MatStencil.
PetscErrorCode initDMDA()
Initialize DMDAs.
type::RealVec3D coordTrue
Underlying data for mesh point coordinates.
Definition: cartesianmesh.h:98
type::IntVec1D offsetsPackAllProcs
Offsets of packed velocity points in packed DM.
PetscErrorCode addLocalInfoString(std::stringstream &ss)
Gather info of parallel distribution and add to info string.
virtual ~CartesianMesh()
Default destructor.
PetscErrorCode createVelocityMesh()
Create velocity mesh information.
std::vector< AO > ao
References to underlying AO objects of velocity DMs.
PetscErrorCode createPressureDMDA()
Create DMDA for pressure.
virtual PetscErrorCode getLocalIndex(const PetscInt &f, const MatStencil &s, PetscInt &idx) const
Get the local index of a point by providing MatStencil.
type::IntVec2D UNLocalAllProcs
Number of local velocity points (without ghost points) for all processes and all velocity fields.
virtual PetscErrorCode destroy()
Manually destroy data.
PetscErrorCode init(const MPI_Comm &world, const YAML::Node &node)
Initialization.
PetscErrorCode createPressureMesh()
Create pressure mesh information.
PetscErrorCode createVertexMesh()
Create vertex information.
PetscErrorCode createInfoString()
Create a string of information.
virtual PetscBool isPointOnLocalProc(const type::RealVec1D &point, const type::Field &field)
CartesianMesh(const MPI_Comm &world, const YAML::Node &node)
Constructor.
type::IntVec2D offsetsAllProcs
Offsets of velocity points in unpacked DMs for all processes and all velocity field.
PetscErrorCode createVelocityPack()
Create DMDAs for velocity fields and make a DMComposite.
type::RealVec1D max
Maximum coordinates of boundaries in all directions.
Definition: mesh.h:74
type::IntVec2D n
Total number of points of all fields and in all directions.
Definition: mesh.h:77
virtual PetscErrorCode destroy()
Manually destroy data.
Definition: mesh.cpp:38
DM UPack
DMComposte of velocity DMs.
Definition: mesh.h:124
MPI_Comm comm
Communicator.
Definition: mesh.h:128
type::IntVec2D ed
The ending index of all fields in all directions of this process.
Definition: mesh.h:111
std::string info
A string for printing information.
Definition: mesh.h:96
PetscMPIInt mpiRank
Rank of this process.
Definition: mesh.h:134
type::IntVec2D bg
The beginning index of all fields in all directions of this process.
Definition: mesh.h:107
PetscInt UNLocal
Total number of velocity points local to this process.
Definition: mesh.h:118
type::IntVec1D nProc
Number of processes in all directions.
Definition: mesh.h:103
type::GhostedVec3D coord
Coordinates of mesh points of all fields and in all directions.
Definition: mesh.h:84
PetscInt UN
Total number of velocity points.
Definition: mesh.h:90
type::GhostedVec3D dL
Spacings of mesh points of all fields and in all directions.
Definition: mesh.h:87
PetscMPIInt mpiSize
Total number of processes.
Definition: mesh.h:131
PetscInt pNLocal
Total number of pressure points local to this process.
Definition: mesh.h:121
PetscInt dim
Dimension.
Definition: mesh.h:68
std::vector< DM > da
A vector of DMs of all fields.
Definition: mesh.h:100
type::RealVec1D min
Minimum coordinates of boundaries in all directions.
Definition: mesh.h:71
type::BoolVec2D periodic
Bools indicating if any direction is periodic.
Definition: mesh.h:80
PetscInt pN
Total number of pressure points.
Definition: mesh.h:93
type::IntVec2D m
The number of points of all fields in all directions of this process.
Definition: mesh.h:115
PetscErrorCode parseMesh(const YAML::Node &meshNode, PetscInt &dim, type::RealVec1D &bg, type::RealVec1D &ed, type::IntVec1D &nTotal, type::RealVec2D &dL)
Parse a YAML node of cartesianMesh.
Definition: parser.cpp:239
PetscErrorCode writeHDF5Vecs(const MPI_Comm comm, const std::string &filePath, const std::string &loc, const std::vector< std::string > &names, const std::vector< Vec > &vecs, const PetscFileMode mode=FILE_MODE_WRITE)
Write a vector of Vec objects to a HDF5 file.
Definition: io.cpp:137
PetscErrorCode parseBCs(const YAML::Node &node, type::IntVec2D &bcTypes, type::RealVec2D &bcValues)
Parse boundary conditions from a YAML node.
Definition: parser.cpp:359
PetscErrorCode checkPeriodicBC(const type::IntVec2D &bcTypes, type::BoolVec2D &periodic)
Check if there is any periodic boundary condition and if these periodic BCs make sense.
Definition: misc.cpp:19
std::vector< IntVec1D > IntVec2D
2D std::vector holding PetscInt.
Definition: type.h:135
Field
Legal fields.
Definition: type.h:80
std::vector< PetscReal * > GhostedVec2D
a vector of pointers to mimic ghosted 1D vectors.
Definition: type.h:154
std::map< Field, std::string > fd2str
Mapping between Field and std::string.
Definition: type.cpp:24
std::vector< RealVec1D > RealVec2D
2D std::vector holding PetscReal.
Definition: type.h:142
std::vector< PetscInt > IntVec1D
1D std::vector holding PetscInt.
Definition: type.h:133
std::vector< PetscReal > RealVec1D
1D std::vector holding PetscReal.
Definition: type.h:140
std::vector< RealVec2D > RealVec3D
3D std::vector holding PetscReal.
Definition: type.h:144
Prototypes of I/O functions.
Prototypes of some miscellaneous functions.
std::vector< GhostedVec2D > GhostedVec3D
a vector of vector pointers to mimic ghosted 2D vectors. type
Definition: type.h:157
A toolbox for building flow solvers.
Definition: bodypack.h:52
Prototypes of parser functions.