mirror of
https://github.com/OPM/opm-simulators.git
synced 2025-02-25 18:55:30 -06:00
Use CpGrid::loadBalance to distribute the field properties.
The created data handle for the communication could in theory be used with other DUNE grids, too. In reality we will need to merge with the handle that ALUGrid already uses to communicate the cartesian indices. This PR gets rid of using the get_global_(double|int) method in ParallelEclipseState and reduces the amount of boilerplate code there.
This commit is contained in:
@@ -232,125 +232,4 @@ void ParallelEclipseState::switchToDistributedProps()
|
||||
m_parProps = true;
|
||||
}
|
||||
|
||||
|
||||
#if HAVE_MPI
|
||||
namespace {
|
||||
|
||||
|
||||
template<class T>
|
||||
struct GetField {
|
||||
GetField(const FieldPropsManager& propMan) : props(propMan) {}
|
||||
std::vector<T> getField(const std::string& key) const;
|
||||
const FieldPropsManager& props;
|
||||
};
|
||||
|
||||
|
||||
template<>
|
||||
std::vector<int> GetField<int>::getField(const std::string& key) const {
|
||||
return props.get_global_int(key);
|
||||
}
|
||||
|
||||
|
||||
template<>
|
||||
std::vector<double> GetField<double>::getField(const std::string& key) const {
|
||||
return props.get_global_double(key);
|
||||
}
|
||||
|
||||
|
||||
template<class T>
|
||||
void extractRootProps(const std::vector<int>& localToGlobal,
|
||||
const std::vector<std::string>& keys,
|
||||
const GetField<T>& getter,
|
||||
std::map<std::string,std::vector<T>>& localMap)
|
||||
{
|
||||
for (const std::string& key : keys) {
|
||||
auto prop = getter.getField(key);
|
||||
std::vector<T>& local = localMap[key];
|
||||
local.reserve(localToGlobal.size());
|
||||
for (int cell : localToGlobal) {
|
||||
local.push_back(prop[cell]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template<class T>
|
||||
void packProps(const std::vector<int>& l2gCell,
|
||||
const std::vector<std::string>& keys,
|
||||
const GetField<T>& getter,
|
||||
std::vector<char>& buffer, int& position)
|
||||
{
|
||||
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
||||
std::vector<T> sendData(l2gCell.size());
|
||||
for (const std::string& key : keys) {
|
||||
auto prop = getter.getField(key);
|
||||
size_t idx = 0;
|
||||
for (int cell : l2gCell)
|
||||
sendData[idx++] = prop[cell];
|
||||
Mpi::pack(sendData, buffer, position, comm);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
void ParallelEclipseState::setupLocalProps(const std::vector<int>& localToGlobal)
|
||||
{
|
||||
const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
|
||||
if (comm.rank() == 0) {
|
||||
extractRootProps(localToGlobal, this->globalFieldProps().keys<int>(),
|
||||
GetField<int>(this->globalFieldProps()),
|
||||
m_fieldProps.m_intProps);
|
||||
extractRootProps(localToGlobal, this->globalFieldProps().keys<double>(),
|
||||
GetField<double>(this->globalFieldProps()),
|
||||
m_fieldProps.m_doubleProps);
|
||||
for (int i = 1; i < comm.size(); ++i) {
|
||||
std::vector<int> l2gCell;
|
||||
size_t size;
|
||||
MPI_Recv(&size, 1, Dune::MPITraits<size_t>::getType(), i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
l2gCell.resize(size);
|
||||
MPI_Recv(l2gCell.data(), size, MPI_INT, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
size_t cells = l2gCell.size();
|
||||
const auto& intKeys = this->globalFieldProps().keys<int>();
|
||||
const auto& dblKeys = this->globalFieldProps().keys<double>();
|
||||
size = Mpi::packSize(intKeys, comm) +
|
||||
Mpi::packSize(dblKeys,comm) +
|
||||
intKeys.size() * Mpi::packSize(std::vector<int>(cells), comm) +
|
||||
dblKeys.size() * Mpi::packSize(std::vector<double>(cells), comm);
|
||||
|
||||
std::vector<char> buffer(size);
|
||||
int position = 0;
|
||||
Mpi::pack(intKeys, buffer, position, comm);
|
||||
Mpi::pack(dblKeys, buffer, position, comm);
|
||||
packProps(l2gCell, intKeys, GetField<int>(this->globalFieldProps()),
|
||||
buffer, position);
|
||||
packProps(l2gCell, dblKeys, GetField<double>(this->globalFieldProps()),
|
||||
buffer, position);
|
||||
MPI_Send(&position, 1, MPI_INT, i, 0, MPI_COMM_WORLD);
|
||||
MPI_Send(buffer.data(), position, MPI_CHAR, i, 0, MPI_COMM_WORLD);
|
||||
}
|
||||
} else {
|
||||
size_t l2gSize = localToGlobal.size();
|
||||
MPI_Send(&l2gSize, 1, Dune::MPITraits<size_t>::getType(), 0, 0, MPI_COMM_WORLD);
|
||||
MPI_Send(localToGlobal.data(), localToGlobal.size(), MPI_INT, 0, 0, MPI_COMM_WORLD);
|
||||
int size;
|
||||
MPI_Recv(&size, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
std::vector<char> buffer(size);
|
||||
MPI_Recv(buffer.data(), size, MPI_CHAR, 0, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
|
||||
std::vector<std::string> intKeys, dblKeys;
|
||||
int position = 0;
|
||||
Mpi::unpack(intKeys, buffer, position, comm);
|
||||
Mpi::unpack(dblKeys, buffer, position, comm);
|
||||
for (const std::string& key : intKeys) {
|
||||
Mpi::unpack(m_fieldProps.m_intProps[key], buffer, position, comm);
|
||||
}
|
||||
for (const std::string& key : dblKeys) {
|
||||
Mpi::unpack(m_fieldProps.m_doubleProps[key], buffer, position, comm);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
} // end namespace Opm
|
||||
|
||||
Reference in New Issue
Block a user