2020-08-17 09:10:18 -05:00
|
|
|
/*
|
|
|
|
Copyright 2013, 2014, 2015 SINTEF ICT, Applied Mathematics.
|
|
|
|
Copyright 2014 Dr. Blatt - HPC-Simulation-Software & Services
|
|
|
|
Copyright 2015 IRIS AS
|
|
|
|
Copyright 2014 STATOIL ASA.
|
|
|
|
|
|
|
|
This file is part of the Open Porous Media project (OPM).
|
|
|
|
|
|
|
|
OPM is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
OPM is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with OPM. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
2021-10-07 07:15:15 -05:00
|
|
|
|
2020-08-17 09:10:18 -05:00
|
|
|
#include "config.h"
|
|
|
|
|
|
|
|
#if HAVE_MPI
|
|
|
|
#include "mpi.h"
|
|
|
|
#endif
|
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
#include <opm/simulators/utils/readDeck.hpp>
|
|
|
|
|
2021-04-26 15:39:32 -05:00
|
|
|
#include <opm/common/ErrorMacros.hpp>
|
2021-10-07 07:15:15 -05:00
|
|
|
#include <opm/common/OpmLog/EclipsePRTLog.hpp>
|
|
|
|
#include <opm/common/OpmLog/OpmLog.hpp>
|
|
|
|
#include <opm/common/utility/OpmInputError.hpp>
|
|
|
|
#include <opm/common/utility/String.hpp>
|
2020-08-17 09:10:18 -05:00
|
|
|
|
|
|
|
#include <opm/io/eclipse/EclIOdata.hpp>
|
2021-06-25 02:11:09 -05:00
|
|
|
#include <opm/io/eclipse/ERst.hpp>
|
|
|
|
#include <opm/io/eclipse/RestartFileView.hpp>
|
2021-06-22 09:23:43 -05:00
|
|
|
#include <opm/io/eclipse/rst/aquifer.hpp>
|
2020-08-17 09:10:18 -05:00
|
|
|
#include <opm/io/eclipse/rst/state.hpp>
|
|
|
|
|
2021-12-14 01:30:15 -06:00
|
|
|
#include <opm/input/eclipse/Deck/Deck.hpp>
|
|
|
|
|
|
|
|
#include <opm/input/eclipse/EclipseState/checkDeck.hpp>
|
|
|
|
#include <opm/input/eclipse/Schedule/Action/State.hpp>
|
|
|
|
#include <opm/input/eclipse/Schedule/ArrayDimChecker.hpp>
|
|
|
|
#include <opm/input/eclipse/Schedule/Schedule.hpp>
|
|
|
|
#include <opm/input/eclipse/Schedule/SummaryState.hpp>
|
|
|
|
#include <opm/input/eclipse/Schedule/UDQ/UDQState.hpp>
|
|
|
|
#include <opm/input/eclipse/Schedule/Well/WellTestState.hpp>
|
|
|
|
#include <opm/input/eclipse/EclipseState/SummaryConfig/SummaryConfig.hpp>
|
|
|
|
|
|
|
|
#include <opm/input/eclipse/Parser/ErrorGuard.hpp>
|
|
|
|
#include <opm/input/eclipse/Parser/Parser.hpp>
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2022-03-25 05:08:32 -05:00
|
|
|
#include <opm/input/eclipse/Units/UnitSystem.hpp>
|
|
|
|
|
2021-04-09 15:20:38 -05:00
|
|
|
#include <opm/simulators/flow/KeywordValidation.hpp>
|
2021-07-23 02:58:52 -05:00
|
|
|
#include <opm/simulators/flow/ValidationFunctions.hpp>
|
2020-08-17 09:10:18 -05:00
|
|
|
#include <opm/simulators/utils/ParallelEclipseState.hpp>
|
|
|
|
#include <opm/simulators/utils/ParallelSerialization.hpp>
|
2021-10-07 07:15:15 -05:00
|
|
|
#include <opm/simulators/utils/PartiallySupportedFlowKeywords.hpp>
|
|
|
|
#include <opm/simulators/utils/UnsupportedFlowKeywords.hpp>
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2020-09-28 04:01:13 -05:00
|
|
|
#include <fmt/format.h>
|
|
|
|
|
2020-09-14 12:17:55 -05:00
|
|
|
#include <cstdlib>
|
2021-10-07 07:15:15 -05:00
|
|
|
#include <cstdint>
|
2021-11-01 06:31:40 -05:00
|
|
|
#include <filesystem>
|
2021-07-23 02:58:52 -05:00
|
|
|
#include <functional>
|
2021-06-25 02:11:09 -05:00
|
|
|
#include <memory>
|
2021-10-07 07:15:15 -05:00
|
|
|
#include <sstream>
|
|
|
|
#include <stdexcept>
|
2021-07-23 02:58:52 -05:00
|
|
|
#include <unordered_map>
|
2021-06-25 02:11:09 -05:00
|
|
|
#include <utility>
|
2020-09-14 12:17:55 -05:00
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
namespace {
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
void setupMessageLimiter(const Opm::MessageLimits& msgLimits,
|
|
|
|
const std::string& stdout_log_id)
|
|
|
|
{
|
|
|
|
const auto limits = std::map<std::int64_t, int> {
|
|
|
|
{Opm::Log::MessageType::Note, msgLimits.getCommentPrintLimit()},
|
|
|
|
{Opm::Log::MessageType::Info, msgLimits.getMessagePrintLimit()},
|
|
|
|
{Opm::Log::MessageType::Warning, msgLimits.getWarningPrintLimit()},
|
|
|
|
{Opm::Log::MessageType::Error, msgLimits.getErrorPrintLimit()},
|
|
|
|
{Opm::Log::MessageType::Problem, msgLimits.getProblemPrintLimit()},
|
|
|
|
{Opm::Log::MessageType::Bug, msgLimits.getBugPrintLimit()},
|
|
|
|
};
|
|
|
|
|
|
|
|
Opm::OpmLog::getBackend<Opm::StreamLog>(stdout_log_id)
|
|
|
|
->setMessageLimiter(std::make_shared<Opm::MessageLimiter>(10, limits));
|
|
|
|
}
|
|
|
|
|
|
|
|
void loadObjectsFromRestart(const Opm::Deck& deck,
|
|
|
|
const Opm::Parser& parser,
|
|
|
|
const Opm::ParseContext& parseContext,
|
|
|
|
const bool initFromRestart,
|
|
|
|
const std::optional<int>& outputInterval,
|
|
|
|
Opm::EclipseState& eclipseState,
|
|
|
|
std::shared_ptr<Opm::Python> python,
|
|
|
|
std::shared_ptr<Opm::Schedule>& schedule,
|
|
|
|
std::unique_ptr<Opm::UDQState>& udqState,
|
|
|
|
std::unique_ptr<Opm::Action::State>& actionState,
|
2021-10-11 07:38:41 -05:00
|
|
|
std::unique_ptr<Opm::WellTestState>& wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
Opm::ErrorGuard& errorGuard)
|
|
|
|
{
|
|
|
|
// Analytic aquifers must always be loaded from the restart file in
|
|
|
|
// restarted runs and the corresponding keywords (e.g., AQUANCON and
|
|
|
|
// AQUCT) do not exist in the input deck in this case. In other
|
|
|
|
// words, there's no way to check if there really are analytic
|
|
|
|
// aquifers in the run until we attempt to read the specifications
|
|
|
|
// from the restart file. If the loader determines that there are
|
|
|
|
// no analytic aquifers, then 'EclipseState::loadRestartAquifers()'
|
|
|
|
// does nothing.
|
|
|
|
const auto& init_config = eclipseState.getInitConfig();
|
|
|
|
const int report_step = init_config.getRestartStep();
|
|
|
|
const auto rst_filename = eclipseState.getIOConfig()
|
|
|
|
.getRestartFileName(init_config.getRestartRootName(), report_step, false);
|
|
|
|
|
|
|
|
auto rst_file = std::make_shared<Opm::EclIO::ERst>(rst_filename);
|
|
|
|
auto rst_view = std::make_shared<Opm::EclIO::RestartFileView>
|
|
|
|
(std::move(rst_file), report_step);
|
|
|
|
|
|
|
|
// Note: RstState::load() will just *read* from the grid structure,
|
|
|
|
// and only do so if the case actually includes analytic aquifers.
|
|
|
|
// The pointer to the input grid is just to allow 'nullptr' to
|
|
|
|
// signify "don't load aquifers" in certain unit tests. Passing an
|
|
|
|
// optional<EclipseGrid> is too expensive however since doing so
|
|
|
|
// will create a copy of the grid inside the optional<>.
|
|
|
|
const auto rst_state = Opm::RestartIO::RstState::
|
|
|
|
load(std::move(rst_view),
|
|
|
|
eclipseState.runspec(), parser,
|
|
|
|
&eclipseState.getInputGrid());
|
|
|
|
|
|
|
|
eclipseState.loadRestartAquifers(rst_state.aquifers);
|
|
|
|
|
|
|
|
// For the time being initializing wells and groups from the restart
|
|
|
|
// file is not possible. Work is underway and the ability is
|
|
|
|
// included here contingent on user-level switch 'initFromRestart'
|
|
|
|
// (i.e., setting "--sched-restart=false" as a command line
|
|
|
|
// invocation parameter).
|
|
|
|
const auto* init_state = initFromRestart ? &rst_state : nullptr;
|
|
|
|
if (schedule == nullptr) {
|
|
|
|
schedule = std::make_shared<Opm::Schedule>
|
|
|
|
(deck, eclipseState, parseContext, errorGuard,
|
|
|
|
std::move(python), outputInterval, init_state);
|
|
|
|
}
|
|
|
|
|
|
|
|
udqState = std::make_unique<Opm::UDQState>
|
|
|
|
((*schedule)[0].udq().params().undefinedValue());
|
|
|
|
udqState->load_rst(rst_state);
|
|
|
|
|
|
|
|
actionState = std::make_unique<Opm::Action::State>();
|
|
|
|
actionState->load_rst((*schedule)[report_step].actions(), rst_state);
|
2021-10-11 07:38:41 -05:00
|
|
|
|
|
|
|
wtestState = std::make_unique<Opm::WellTestState>(schedule->runspec().start_time(), rst_state);
|
2021-10-07 07:15:15 -05:00
|
|
|
}
|
|
|
|
|
2021-10-08 05:44:27 -05:00
|
|
|
void createNonRestartDynamicObjects(const Opm::Deck& deck,
|
2021-10-07 07:15:15 -05:00
|
|
|
const Opm::EclipseState& eclipseState,
|
|
|
|
const Opm::ParseContext& parseContext,
|
|
|
|
std::shared_ptr<Opm::Python> python,
|
|
|
|
std::shared_ptr<Opm::Schedule>& schedule,
|
|
|
|
std::unique_ptr<Opm::UDQState>& udqState,
|
|
|
|
std::unique_ptr<Opm::Action::State>& actionState,
|
2021-10-15 06:31:08 -05:00
|
|
|
std::unique_ptr<Opm::WellTestState>& wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
Opm::ErrorGuard& errorGuard)
|
|
|
|
{
|
|
|
|
if (schedule == nullptr) {
|
|
|
|
schedule = std::make_shared<Opm::Schedule>
|
|
|
|
(deck, eclipseState, parseContext,
|
|
|
|
errorGuard, std::move(python));
|
|
|
|
}
|
|
|
|
|
|
|
|
udqState = std::make_unique<Opm::UDQState>
|
|
|
|
((*schedule)[0].udq().params().undefinedValue());
|
|
|
|
|
|
|
|
actionState = std::make_unique<Opm::Action::State>();
|
2021-10-15 06:31:08 -05:00
|
|
|
wtestState = std::make_unique<Opm::WellTestState>();
|
2021-10-07 07:15:15 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
std::shared_ptr<Opm::Deck>
|
|
|
|
readDeckFile(const std::string& deckFilename,
|
|
|
|
const bool checkDeck,
|
|
|
|
const Opm::Parser& parser,
|
|
|
|
const Opm::ParseContext& parseContext,
|
|
|
|
Opm::ErrorGuard& errorGuard)
|
|
|
|
{
|
|
|
|
auto deck = std::make_shared<Opm::Deck>
|
|
|
|
(parser.parseFile(deckFilename, parseContext, errorGuard));
|
|
|
|
|
|
|
|
auto keyword_validator = Opm::KeywordValidation::KeywordValidator {
|
|
|
|
Opm::FlowKeywordValidation::unsupportedKeywords(),
|
|
|
|
Opm::FlowKeywordValidation::partiallySupported<std::string>(),
|
|
|
|
Opm::FlowKeywordValidation::partiallySupported<int>(),
|
|
|
|
Opm::FlowKeywordValidation::partiallySupported<double>(),
|
|
|
|
Opm::KeywordValidation::specialValidation()
|
|
|
|
};
|
|
|
|
|
|
|
|
keyword_validator.validateDeck(*deck, parseContext, errorGuard);
|
|
|
|
|
|
|
|
if (checkDeck) {
|
|
|
|
Opm::checkDeck(*deck, parser, parseContext, errorGuard);
|
|
|
|
}
|
|
|
|
|
|
|
|
return deck;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::shared_ptr<Opm::EclipseState>
|
2021-10-10 13:38:45 -05:00
|
|
|
createEclipseState([[maybe_unused]] Opm::Parallel::Communication comm,
|
|
|
|
const Opm::Deck& deck)
|
2021-10-07 07:15:15 -05:00
|
|
|
{
|
|
|
|
#if HAVE_MPI
|
|
|
|
return std::make_shared<Opm::ParallelEclipseState>(deck, comm);
|
|
|
|
#else
|
|
|
|
return std::make_shared<Opm::EclipseState>(deck);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void readOnIORank(Opm::Parallel::Communication comm,
|
|
|
|
const std::string& deckFilename,
|
|
|
|
const Opm::ParseContext* parseContext,
|
|
|
|
std::shared_ptr<Opm::Deck>& deck,
|
|
|
|
std::shared_ptr<Opm::EclipseState>& eclipseState,
|
|
|
|
std::shared_ptr<Opm::Schedule>& schedule,
|
|
|
|
std::unique_ptr<Opm::UDQState>& udqState,
|
|
|
|
std::unique_ptr<Opm::Action::State>& actionState,
|
2021-10-11 07:38:41 -05:00
|
|
|
std::unique_ptr<Opm::WellTestState>& wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
std::shared_ptr<Opm::SummaryConfig>& summaryConfig,
|
|
|
|
std::shared_ptr<Opm::Python> python,
|
|
|
|
const bool initFromRestart,
|
|
|
|
const bool checkDeck,
|
|
|
|
const std::optional<int>& outputInterval,
|
|
|
|
Opm::ErrorGuard& errorGuard)
|
|
|
|
{
|
|
|
|
if (((deck == nullptr) || (schedule == nullptr) || (summaryConfig == nullptr)) &&
|
|
|
|
(parseContext == nullptr))
|
|
|
|
{
|
|
|
|
OPM_THROW(std::logic_error,
|
|
|
|
"We need a parse context if deck, schedule, "
|
|
|
|
"or summaryConfig are not initialized");
|
|
|
|
}
|
|
|
|
|
|
|
|
auto parser = Opm::Parser{};
|
|
|
|
if (deck == nullptr) {
|
|
|
|
deck = readDeckFile(deckFilename, checkDeck, parser,
|
|
|
|
*parseContext, errorGuard);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (eclipseState == nullptr) {
|
|
|
|
eclipseState = createEclipseState(comm, *deck);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (eclipseState->getInitConfig().restartRequested()) {
|
|
|
|
loadObjectsFromRestart(*deck, parser, *parseContext,
|
|
|
|
initFromRestart, outputInterval,
|
|
|
|
*eclipseState, std::move(python),
|
2021-10-11 07:38:41 -05:00
|
|
|
schedule, udqState, actionState, wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
errorGuard);
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
else {
|
2021-10-08 05:44:27 -05:00
|
|
|
createNonRestartDynamicObjects(*deck, *eclipseState,
|
2021-10-07 07:15:15 -05:00
|
|
|
*parseContext, std::move(python),
|
2021-10-15 06:31:08 -05:00
|
|
|
schedule, udqState, actionState, wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
errorGuard);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (Opm::OpmLog::hasBackend("STDOUT_LOGGER")) {
|
|
|
|
// loggers might not be set up!
|
|
|
|
setupMessageLimiter((*schedule)[0].message_limits(), "STDOUT_LOGGER");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (summaryConfig == nullptr) {
|
|
|
|
summaryConfig = std::make_shared<Opm::SummaryConfig>
|
|
|
|
(*deck, *schedule, eclipseState->fieldProps(),
|
|
|
|
eclipseState->aquifer(), *parseContext, errorGuard);
|
|
|
|
}
|
|
|
|
|
|
|
|
Opm::checkConsistentArrayDimensions(*eclipseState, *schedule,
|
|
|
|
*parseContext, errorGuard);
|
|
|
|
}
|
|
|
|
|
|
|
|
#if HAVE_MPI
|
|
|
|
void defineStateObjectsOnNonIORank(Opm::Parallel::Communication comm,
|
|
|
|
std::shared_ptr<Opm::Python> python,
|
|
|
|
std::shared_ptr<Opm::EclipseState>& eclipseState,
|
|
|
|
std::shared_ptr<Opm::Schedule>& schedule,
|
|
|
|
std::unique_ptr<Opm::UDQState>& udqState,
|
|
|
|
std::unique_ptr<Opm::Action::State>& actionState,
|
2021-10-15 06:31:08 -05:00
|
|
|
std::unique_ptr<Opm::WellTestState>& wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
std::shared_ptr<Opm::SummaryConfig>& summaryConfig)
|
|
|
|
{
|
|
|
|
if (eclipseState == nullptr) {
|
|
|
|
eclipseState = std::make_shared<Opm::ParallelEclipseState>(comm);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (schedule == nullptr) {
|
|
|
|
schedule = std::make_shared<Opm::Schedule>(std::move(python));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (udqState == nullptr) {
|
|
|
|
udqState = std::make_unique<Opm::UDQState>(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (actionState == nullptr) {
|
|
|
|
actionState = std::make_unique<Opm::Action::State>();
|
|
|
|
}
|
|
|
|
|
2021-10-15 06:31:08 -05:00
|
|
|
if (wtestState == nullptr) {
|
|
|
|
wtestState = std::make_unique<Opm::WellTestState>();
|
|
|
|
}
|
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
if (summaryConfig == nullptr) {
|
|
|
|
summaryConfig = std::make_shared<Opm::SummaryConfig>();
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
#endif
|
2022-03-25 05:08:32 -05:00
|
|
|
|
|
|
|
bool gridHasValidCellGeometry(const Opm::EclipseGrid& inputGrid,
|
|
|
|
const Opm::UnitSystem& usys)
|
|
|
|
{
|
|
|
|
const auto numActive = inputGrid.getNumActive();
|
|
|
|
|
|
|
|
for (auto activeCell = 0*numActive; activeCell < numActive; ++activeCell) {
|
|
|
|
if (inputGrid.isValidCellGeomtry(inputGrid.getGlobalIndex(activeCell), usys)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool gridHasValidCellGeometry(Opm::Parallel::Communication comm,
|
|
|
|
const Opm::EclipseState& eclipseState)
|
|
|
|
{
|
|
|
|
bool hasValidCells = false;
|
|
|
|
|
|
|
|
if (comm.rank() == 0) {
|
|
|
|
hasValidCells =
|
|
|
|
gridHasValidCellGeometry(eclipseState.getInputGrid(),
|
|
|
|
eclipseState.getDeckUnitSystem());
|
|
|
|
}
|
|
|
|
|
|
|
|
#if HAVE_MPI
|
|
|
|
const auto status = comm.broadcast(&hasValidCells, 1, 0);
|
|
|
|
|
|
|
|
if (status != MPI_SUCCESS) {
|
|
|
|
throw std::invalid_argument {
|
2022-04-27 13:46:57 -05:00
|
|
|
"Unable to establish cell geometry validity across MPI ranks"
|
2022-03-25 05:08:32 -05:00
|
|
|
};
|
|
|
|
}
|
|
|
|
#endif // HAVE_MPI
|
|
|
|
|
|
|
|
return hasValidCells;
|
|
|
|
}
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
// ---------------------------------------------------------------------------
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2022-10-07 07:32:46 -05:00
|
|
|
|
|
|
|
void Opm::ensureOutputDirExists(const std::string& cmdline_output_dir)
|
|
|
|
{
|
|
|
|
namespace fs = std::filesystem;
|
|
|
|
|
|
|
|
if (! fs::is_directory(cmdline_output_dir)) {
|
|
|
|
try {
|
|
|
|
fs::create_directories(cmdline_output_dir);
|
|
|
|
}
|
|
|
|
catch (...) {
|
|
|
|
throw std::runtime_error {
|
|
|
|
fmt::format("Creation of output directory '{}' failed",
|
|
|
|
cmdline_output_dir)
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
// Setup the OpmLog backends
|
|
|
|
Opm::FileOutputMode
|
|
|
|
Opm::setupLogging(const int mpi_rank_,
|
|
|
|
const std::string& deck_filename,
|
|
|
|
const std::string& cmdline_output_dir,
|
|
|
|
const std::string& cmdline_output,
|
|
|
|
const bool output_cout_,
|
2022-01-27 10:00:39 -06:00
|
|
|
const std::string& stdout_log_id,
|
|
|
|
const bool allRanksDbgLog)
|
2021-10-07 07:15:15 -05:00
|
|
|
{
|
2020-08-17 09:10:18 -05:00
|
|
|
if (!cmdline_output_dir.empty()) {
|
2022-10-07 07:32:46 -05:00
|
|
|
ensureOutputDirExists(cmdline_output_dir);
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// create logFile
|
2021-11-01 06:31:40 -05:00
|
|
|
using std::filesystem::path;
|
2020-08-17 09:10:18 -05:00
|
|
|
path fpath(deck_filename);
|
|
|
|
std::string baseName;
|
|
|
|
std::ostringstream debugFileStream;
|
|
|
|
std::ostringstream logFileStream;
|
|
|
|
|
|
|
|
// Strip extension "." or ".DATA"
|
|
|
|
std::string extension = uppercase(fpath.extension().string());
|
|
|
|
if (extension == ".DATA" || extension == ".") {
|
|
|
|
baseName = uppercase(fpath.stem().string());
|
2021-10-07 07:15:15 -05:00
|
|
|
}
|
|
|
|
else {
|
2020-08-17 09:10:18 -05:00
|
|
|
baseName = uppercase(fpath.filename().string());
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string output_dir = cmdline_output_dir;
|
|
|
|
if (output_dir.empty()) {
|
|
|
|
output_dir = fpath.has_parent_path()
|
|
|
|
? absolute(fpath.parent_path()).generic_string()
|
2021-11-01 06:31:40 -05:00
|
|
|
: std::filesystem::current_path().generic_string();
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
logFileStream << output_dir << "/" << baseName;
|
|
|
|
debugFileStream << output_dir << "/" << baseName;
|
|
|
|
|
|
|
|
if (mpi_rank_ != 0) {
|
|
|
|
// Added rank to log file for non-zero ranks.
|
|
|
|
// This prevents message loss.
|
|
|
|
debugFileStream << "." << mpi_rank_;
|
|
|
|
// If the following file appears then there is a bug.
|
|
|
|
logFileStream << "." << mpi_rank_;
|
|
|
|
}
|
|
|
|
logFileStream << ".PRT";
|
|
|
|
debugFileStream << ".DBG";
|
|
|
|
|
|
|
|
FileOutputMode output;
|
|
|
|
{
|
|
|
|
static std::map<std::string, FileOutputMode> stringToOutputMode =
|
|
|
|
{ {"none", FileOutputMode::OUTPUT_NONE },
|
|
|
|
{"false", FileOutputMode::OUTPUT_LOG_ONLY },
|
|
|
|
{"log", FileOutputMode::OUTPUT_LOG_ONLY },
|
|
|
|
{"all" , FileOutputMode::OUTPUT_ALL },
|
|
|
|
{"true" , FileOutputMode::OUTPUT_ALL }};
|
|
|
|
auto outputModeIt = stringToOutputMode.find(cmdline_output);
|
|
|
|
if (outputModeIt != stringToOutputMode.end()) {
|
|
|
|
output = outputModeIt->second;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
output = FileOutputMode::OUTPUT_ALL;
|
2021-10-07 07:15:15 -05:00
|
|
|
std::cerr << "Value " << cmdline_output
|
|
|
|
<< " is not a recognized output mode. Using \"all\" instead.\n";
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
2022-01-27 10:00:39 -06:00
|
|
|
if (!allRanksDbgLog && mpi_rank_ != 0)
|
|
|
|
{
|
|
|
|
output = FileOutputMode::OUTPUT_NONE;
|
|
|
|
}
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if (output > FileOutputMode::OUTPUT_NONE) {
|
|
|
|
std::shared_ptr<Opm::EclipsePRTLog> prtLog = std::make_shared<Opm::EclipsePRTLog>(logFileStream.str(), Opm::Log::NoDebugMessageTypes, false, output_cout_);
|
|
|
|
Opm::OpmLog::addBackend("ECLIPSEPRTLOG", prtLog);
|
|
|
|
prtLog->setMessageLimiter(std::make_shared<Opm::MessageLimiter>());
|
|
|
|
prtLog->setMessageFormatter(std::make_shared<Opm::SimpleMessageFormatter>(false));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (output >= FileOutputMode::OUTPUT_LOG_ONLY) {
|
|
|
|
std::string debugFile = debugFileStream.str();
|
|
|
|
std::shared_ptr<Opm::StreamLog> debugLog = std::make_shared<Opm::EclipsePRTLog>(debugFileStream.str(), Opm::Log::DefaultMessageTypes, false, output_cout_);
|
|
|
|
Opm::OpmLog::addBackend("DEBUGLOG", debugLog);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mpi_rank_ == 0) {
|
|
|
|
std::shared_ptr<Opm::StreamLog> streamLog = std::make_shared<Opm::StreamLog>(std::cout, Opm::Log::StdoutMessageTypes);
|
|
|
|
Opm::OpmLog::addBackend(stdout_log_id, streamLog);
|
2022-08-16 07:43:07 -05:00
|
|
|
// Set a tag limit of 10 (no category limit). Will later in
|
|
|
|
// the run be replaced by calling setupMessageLimiter(), after
|
|
|
|
// the deck is read and the (possibly user-set) category
|
|
|
|
// limits are known.
|
|
|
|
streamLog->setMessageLimiter(std::make_shared<Opm::MessageLimiter>(10));
|
2021-05-28 03:20:10 -05:00
|
|
|
bool use_color_coding = OpmLog::stdoutIsTerminal();
|
|
|
|
streamLog->setMessageFormatter(std::make_shared<Opm::SimpleMessageFormatter>(use_color_coding));
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
return output;
|
2020-09-15 02:49:22 -05:00
|
|
|
}
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
void Opm::readDeck(Opm::Parallel::Communication comm,
|
|
|
|
const std::string& deckFilename,
|
|
|
|
std::shared_ptr<Deck>& deck,
|
|
|
|
std::shared_ptr<EclipseState>& eclipseState,
|
|
|
|
std::shared_ptr<Schedule>& schedule,
|
|
|
|
std::unique_ptr<UDQState>& udqState,
|
|
|
|
std::unique_ptr<Action::State>& actionState,
|
2021-10-11 07:38:41 -05:00
|
|
|
std::unique_ptr<WellTestState>& wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
std::shared_ptr<SummaryConfig>& summaryConfig,
|
|
|
|
std::unique_ptr<ErrorGuard> errorGuard,
|
|
|
|
std::shared_ptr<Python> python,
|
|
|
|
std::unique_ptr<ParseContext> parseContext,
|
|
|
|
const bool initFromRestart,
|
|
|
|
const bool checkDeck,
|
|
|
|
const std::optional<int>& outputInterval)
|
2020-08-17 09:10:18 -05:00
|
|
|
{
|
2021-10-07 07:15:15 -05:00
|
|
|
if (errorGuard == nullptr) {
|
2020-08-18 05:35:55 -05:00
|
|
|
errorGuard = std::make_unique<ErrorGuard>();
|
|
|
|
}
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2020-09-14 05:53:02 -05:00
|
|
|
int parseSuccess = 1; // > 0 is success
|
2020-08-17 09:10:18 -05:00
|
|
|
std::string failureMessage;
|
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
if (comm.rank() == 0) { // Always true when !HAVE_MPI
|
|
|
|
try {
|
|
|
|
readOnIORank(comm, deckFilename, parseContext.get(), deck,
|
2021-10-11 07:38:41 -05:00
|
|
|
eclipseState, schedule, udqState, actionState, wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
summaryConfig, std::move(python), initFromRestart,
|
|
|
|
checkDeck, outputInterval, *errorGuard);
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
catch (const OpmInputError& input_error) {
|
2020-09-25 04:59:05 -05:00
|
|
|
failureMessage = input_error.what();
|
|
|
|
parseSuccess = 0;
|
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
catch (const std::exception& std_error) {
|
2020-09-25 04:59:05 -05:00
|
|
|
failureMessage = std_error.what();
|
2020-09-14 05:53:02 -05:00
|
|
|
parseSuccess = 0;
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
|
2020-08-17 09:10:18 -05:00
|
|
|
#if HAVE_MPI
|
|
|
|
else {
|
2021-10-07 07:15:15 -05:00
|
|
|
defineStateObjectsOnNonIORank(comm, std::move(python), eclipseState,
|
2021-10-15 06:31:08 -05:00
|
|
|
schedule, udqState, actionState, wtestState,
|
2021-10-07 07:15:15 -05:00
|
|
|
summaryConfig);
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
2020-09-14 05:53:02 -05:00
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
// In case of parse errors eclipseState/schedule might be null and
|
|
|
|
// trigger segmentation faults in parallel during broadcast (e.g. when
|
|
|
|
// serializing the non-existent TableManager)
|
2021-09-01 10:08:38 -05:00
|
|
|
parseSuccess = comm.min(parseSuccess);
|
2021-10-07 07:15:15 -05:00
|
|
|
try {
|
|
|
|
if (parseSuccess) {
|
|
|
|
eclStateBroadcast(comm, *eclipseState, *schedule,
|
2021-10-11 07:38:41 -05:00
|
|
|
*summaryConfig, *udqState, *actionState, *wtestState);
|
2021-09-01 10:08:38 -05:00
|
|
|
}
|
2020-09-14 05:53:02 -05:00
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
catch (const std::exception& broadcast_error) {
|
2020-09-25 04:59:05 -05:00
|
|
|
failureMessage = broadcast_error.what();
|
2020-09-25 05:01:01 -05:00
|
|
|
OpmLog::error(fmt::format("Distributing properties to all processes failed\n"
|
|
|
|
"Internal error message: {}", broadcast_error.what()));
|
2020-09-14 05:53:02 -05:00
|
|
|
parseSuccess = 0;
|
|
|
|
}
|
2020-09-15 01:27:04 -05:00
|
|
|
#endif
|
|
|
|
|
2020-09-14 05:53:02 -05:00
|
|
|
if (*errorGuard) { // errors encountered
|
|
|
|
parseSuccess = 0;
|
2020-09-25 05:01:01 -05:00
|
|
|
errorGuard->dump();
|
|
|
|
errorGuard->clear();
|
2020-09-14 05:53:02 -05:00
|
|
|
}
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2021-05-25 05:57:11 -05:00
|
|
|
parseSuccess = comm.min(parseSuccess);
|
2020-08-17 09:10:18 -05:00
|
|
|
|
2021-10-07 07:15:15 -05:00
|
|
|
if (! parseSuccess) {
|
|
|
|
if (comm.rank() == 0) {
|
2022-05-10 11:02:32 -05:00
|
|
|
OpmLog::error(fmt::format("Unrecoverable errors while loading input: {}", failureMessage));
|
2020-09-14 05:53:02 -05:00
|
|
|
}
|
2021-10-07 07:15:15 -05:00
|
|
|
|
2020-09-14 05:53:02 -05:00
|
|
|
#if HAVE_MPI
|
|
|
|
MPI_Finalize();
|
|
|
|
#endif
|
2021-10-07 07:15:15 -05:00
|
|
|
|
2020-09-15 01:36:20 -05:00
|
|
|
std::exit(EXIT_FAILURE);
|
2020-08-17 09:10:18 -05:00
|
|
|
}
|
|
|
|
}
|
2022-03-25 05:08:32 -05:00
|
|
|
|
|
|
|
void Opm::verifyValidCellGeometry(Parallel::Communication comm,
|
|
|
|
const EclipseState& eclipseState)
|
|
|
|
{
|
|
|
|
if (gridHasValidCellGeometry(comm, eclipseState)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
throw std::invalid_argument {
|
|
|
|
R"(No active cell in input grid has valid/finite cell geometry
|
|
|
|
Please check geometry keywords, especially if grid is imported through GDFILE)"
|
|
|
|
};
|
|
|
|
}
|