590 {
591 using namespace maia;
592 using namespace std;
593
594
595
596
597
598 std::vector<std::tuple<std::array<MFloat, nDim>,
MInt,
MInt>> localTmpPoints;
599
601
604
605 std::vector<MFloat> tmpCoordinates{};
606
607
610 }
611
613
615 TERMM(1,
"ERROR: no sampling points found for input file '" + timeSeries.
m_inputFileName +
"'");
616 m_log <<
"ERROR: no sampling points found for input file '" << timeSeries.
m_inputFileName <<
"'" << std::endl;
617 return;
618 }
619
622 copy(tmpCoordinates.begin(), tmpCoordinates.end(), coordinatesScratch.begin());
623 }
625 "coordinatesScratch[0]");
626
628
630 std::array<MFloat, nDim> curCoordinates{};
631 MInt curElementId = -1;
633 for(
MInt j = 0; j < nDim; j++) {
634 curCoordinates[j] = tmpCoordinatesTensor(i, j);
635 }
636
637
638 curElementId =
solver().getIdAtPoint(&curCoordinates[0],
true);
639
640 if(curElementId != -1) {
641 localTmpPoints[timeSeries.
m_noLocalPoints] = make_tuple(curCoordinates, curElementId, i);
643
644
645 solver().initInterpolationForCell(curElementId);
646 }
647 }
648
649
651 loadedPointsVector.fill(0);
653 loadedPointsVector[get<2>(localTmpPoints[i])]++;
654 }
655
658 0,
mpiComm(), AT_,
"MPI_IN_PLACE",
"loadedPointsVector[0]");
659 for(
MUint i = 0; i < loadedPointsVector.size(); i++) {
660 if(loadedPointsVector[i] != 1) {
661 std::ostringstream err;
662 err << "Point at line " << i + 1 << " of file "
663 << timeSeries.
m_inputFileName <<
" was loaded " << loadedPointsVector[i] <<
" times. Aborting."
664 << std::endl;
665 TERMM(1, err.str());
666 }
667 }
668 } else {
670 mpiComm(), AT_,
"loadedPointsVector[0]",
"nullptr");
671 }
674
675
678
679 std::array<MFloat, nDim> coordinates{};
681
682 for(
MInt i = 0; i < noPoints; i++) {
683
685
686
687 const MInt curElementId =
solver().getIdAtPoint(&coordinates[0],
true);
688 if(curElementId != -1) {
689 localTmpPoints.push_back(make_tuple(coordinates, curElementId, i));
691 checkSum += (i + 1);
692
693
694 solver().initInterpolationForCell(curElementId);
695 }
696 }
697
700 "MPI_IN_PLACE", "globalNoPoints");
701 if(globalNoPoints != noPoints) {
702 TERMM(1, "Global number of points does not match " + std::to_string(globalNoPoints)
703 + " != " + std::to_string(noPoints));
704 }
705
706
707
708
710 "checkSum");
711 const MLong noPointsTmp = noPoints;
712 if(2 * checkSum != (noPointsTmp * noPointsTmp + noPointsTmp)) {
713 TERMM(1, "Incorrect check sum for " + std::to_string(noPoints) + " points: " + std::to_string(checkSum));
714 }
715 } else {
716 m_log <<
"Warning: no sampling data input file specified and generation of points disabled." << std::endl;
717 return;
718 }
719
720
723
724
727 MPI_Comm_rank(
mpiComm(), &rank);
728 }
729
730
732 mpiComm(), AT_,
"rank",
"pointDataScratch[0]");
733
734
735 const MInt noPdDomains =
736 count_if(pointDataScratch.begin(), pointDataScratch.end(), [](
const MInt a) { return a != -1; });
739 for(
MUint i = 0; i < pointDataScratch.size(); i++) {
740 if(pointDataScratch[i] != -1) {
741 pdDomains[position] = pointDataScratch[i];
742 position++;
743 }
744 }
745
746
747 MPI_Group globalGroup, localGroup;
749 MPI_Group_incl(globalGroup, noPdDomains, &pdDomains[0], &localGroup, AT_);
750
751
755
756
759 MPI_Comm_rank(timeSeries.
m_mpiComm, &subRank);
760 if(subRank == 0) {
762 }
763 }
764
765
766
767 stable_sort(localTmpPoints.begin(),
769
770 [
this](
const std::tuple<std::array<MFloat, nDim>,
MInt,
MInt>&
a,
771 const std::tuple<std::array<MFloat, nDim>,
MInt,
MInt>&
b) {
772 return solver().grid().tree().globalId(solver().getCellIdByIndex(get<1>(a)))
773 < solver().grid().tree().globalId(solver().getCellIdByIndex(get<1>(b)));
774 });
775
776
783 for(
MInt j = 0; j < nDim; j++) {
784 coordinatesTensor(i, j) = get<0>(localTmpPoints[i])[j];
785 }
787 timeSeries.
m_sortIndex[i] = get<2>(localTmpPoints[i]);
788 }
789 }
790
791
794 }
795
796
799
801 }
802
803
807 } else {
810 }
811
815 <<
getBaseName() <<
" sampling feature successfully." << std::endl;
818 << " sampling feature successfully." << std::endl;
819 }
822 << std::endl;
823}
virtual MInt loadInputFile(const MString NotUsed(inputFileName), const MInt NotUsed(fileNo), std::vector< MFloat > &NotUsed(coordinates))
virtual void getSamplingPoint(const MInt NotUsed(fileNo), const MInt NotUsed(pointId), MFloat *const NotUsed(coordinates))
Return the coordinates of the sampling point with the given id when generating points.
virtual MInt noSamplingPoints(const MInt NotUsed(fileNo))
Return the number of sampling points which are generated by the derived class (no input file)
MInt noVars() const
Return total number of sampling variables.
void saveSamplingPointCoordinates(SamplingDataSeries &timeSeries)
Save point coordinates of time series.
std::vector< MInt > m_elementIds
std::vector< MFloat > m_coordinates
const MInt m_writeInterval
std::vector< MInt > m_timeStepBuffer
std::vector< MFloat > m_stateBuffer
const MInt m_startTimeStep
std::vector< MInt > m_sortIndex
const MInt m_sampleInterval
const MString m_inputFileName
const MBool m_generatePoints
std::vector< MFloat > m_timeBuffer
This class is a ScratchSpace.
MInt globalNoDomains()
Return global number of domains.
int MPI_Comm_create(MPI_Comm comm, MPI_Group group, MPI_Comm *newcomm, const MString &name, const MString &varname)
same as MPI_Comm_create, but updates the number of MPI communicators
int MPI_Group_incl(MPI_Group group, int n, const int ranks[], MPI_Group *newgroup, const MString &name)
same as MPI_Group_incl
int MPI_Comm_group(MPI_Comm comm, MPI_Group *group, const MString &name, const MString &varname)
same as MPI_Comm_group
int MPI_Reduce(const void *sendbuf, void *recvbuf, int count, MPI_Datatype datatype, MPI_Op op, int root, MPI_Comm comm, const MString &name, const MString &sndvarname, const MString &rcvvarname)
same as MPI_Reduce
int MPI_Allreduce(const void *sendbuf, void *recvbuf, int count, MPI_Datatype datatype, MPI_Op op, MPI_Comm comm, const MString &name, const MString &sndvarname, const MString &rcvvarname)
same as MPI_Allreduce
int MPI_Bcast(void *buffer, int count, MPI_Datatype datatype, int root, MPI_Comm comm, const MString &name, const MString &varname)
same as MPI_Bcast
int MPI_Allgather(const void *sendbuf, int sendcount, MPI_Datatype sendtype, void *recvbuf, int recvcount, MPI_Datatype recvtype, MPI_Comm comm, const MString &name, const MString &sndvarname, const MString &rcvvarname)
same as MPI_Allgather
int MPI_Group_free(MPI_Group *group, const MString &name)
same as MPI_Group_free
Namespace for auxiliary functions/classes.