#804 Updated opm-common based on

1216bc052542f24ec6fcfbe1947d52e6300ff754
This commit is contained in:
Magne Sjaastad 2016-10-13 13:33:54 +02:00
parent 3b4f1620a5
commit 0306168bb4
27 changed files with 397 additions and 180 deletions

View File

@ -8,7 +8,6 @@
# HAVE_SHARED_PTR True if std::shared_ptr is available
# HAVE_UNIQUE_PTR True if std::unique_ptr is available
# HAVE_NULLPTR True if nullptr is available
# HAVE_REGEX True if std::regex available and sufficiently usable
# HAVE_ARRAY True if header <array> and fill() are available
# HAVE_ATTRIBUTE_ALWAYS_INLINE True if attribute always inline is supported
# HAS_ATTRIBUTE_UNUSED True if attribute unused is supported
@ -147,33 +146,6 @@ CHECK_CXX_SOURCE_COMPILES("
" HAVE_NULLPTR
)
# <regex>
CHECK_CXX_SOURCE_RUNS("
#include <regex>
int main(void)
{
std::regex r(\"AB.*|BC+|DE.+\", std::regex::extended);
if (!std::regex_match(\"AB\", r))
return 1;
if (!std::regex_match(\"ABC\", r))
return 2;
if (!std::regex_match(\"ABC!#\", r))
return 3;
if (std::regex_match(\"B\", r))
return 4;
if (!std::regex_match(\"BC\", r))
return 5;
if (std::regex_match(\"BCE\", r))
return 6;
if (std::regex_match(\"DE\", r))
return 7;
if (!std::regex_match(\"DEF\", r))
return 8;
return 0;
}
" HAVE_REGEX
)
# constexpr
CHECK_CXX_SOURCE_COMPILES("
template <class T>

View File

@ -0,0 +1,49 @@
# Find the Python wrappers for module cwrap from ert
#
# Set the cache variable CWRAP_PYTHON_PATH to the install location of the root
# ert package.
find_package(PythonInterp)
if(PYTHONINTERP_FOUND)
# We try to find the cwrap Python distribution. This is done by running Python
# code which tries to 'import cwrap' and prints out the path to the module if
# the import succeeds.
#
# The normal Python import machinery is employed, so if you have installed cwrap
# python in a default location, or alternatively set the PYTHONPATH variable the
# cwrap Python distribution will eventually be found there, independently of the
# alternatives which are tested with the ${PATH_LIST} variable.
if (EXISTS "/etc/debian_version")
set( PYTHON_PACKAGE_PATH "dist-packages")
else()
set( PYTHON_PACKAGE_PATH "site-packages")
endif()
set(PYTHON_INSTALL_PREFIX "lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/${PYTHON_PACKAGE_PATH}" CACHE STRING "Subdirectory to install Python modules in")
set(PATH_LIST)
if (ERT_ROOT)
list(APPEND PATH_LIST ${ERT_ROOT})
endif()
list(APPEND PATH_LIST ${CMAKE_PREFIX_PATH})
# Add various popular sibling alternatives.
list(APPEND PATH_LIST "${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_BINARY_DIR}/../ert-build")
foreach( PATH ${PATH_LIST})
set( python_code "import sys; sys.path.insert(0 , '${PATH}/${PYTHON_INSTALL_PREFIX}'); import os.path; import inspect; import cwrap; print os.path.dirname(os.path.dirname(inspect.getfile(cwrap)))")
execute_process( COMMAND ${PYTHON_EXECUTABLE} -c "${python_code}"
RESULT_VARIABLE import_result
OUTPUT_VARIABLE stdout_output
ERROR_VARIABLE stderr_output
OUTPUT_STRIP_TRAILING_WHITESPACE )
if (${import_result} EQUAL 0)
set( CWRAP_PYTHON_PATH ${stdout_output} CACHE PATH "Python path for cwrap" )
break()
endif()
endforeach()
endif()
find_package_handle_standard_args("Cwrap" DEFAULT_MSG CWRAP_PYTHON_PATH)

View File

@ -34,7 +34,7 @@ find_path (ERT_ECL_INCLUDE_DIR
NAMES "ert/ecl/ecl_util.h"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_SOURCE_DIR}/../ert"
PATH_SUFFIXES "devel/libecl/include/" "include"
PATH_SUFFIXES "libecl/include/" "include"
DOC "Path to ERT Eclipse library header files"
${_no_default_path}
)
@ -42,7 +42,7 @@ find_path (ERT_ECL_WELL_INCLUDE_DIR
NAMES "ert/ecl_well/well_const.h"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_SOURCE_DIR}/../ert"
PATH_SUFFIXES "devel/libecl_well/include/" "include"
PATH_SUFFIXES "libecl_well/include/" "include"
DOC "Path to ERT Eclipse library header files"
${_no_default_path}
)
@ -50,7 +50,7 @@ find_path (ERT_ECLXX_INCLUDE_DIR
NAMES "ert/ecl/EclKW.hpp"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_SOURCE_DIR}/../ert"
PATH_SUFFIXES "devel/libeclxx/include/" "include"
PATH_SUFFIXES "libeclxx/include/" "include"
DOC "Path to ERT Eclipse C++ library header files"
${_no_default_path}
)
@ -58,7 +58,7 @@ find_path (ERT_UTIL_INCLUDE_DIR
NAMES "ert/util/stringlist.h"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_SOURCE_DIR}/../ert"
PATH_SUFFIXES "devel/libert_util/include/" "include"
PATH_SUFFIXES "libert_util/include/" "include"
DOC "Path to ERT Eclipse library header files"
${_no_default_path}
)
@ -66,7 +66,7 @@ find_path (ERT_UTILXX_INCLUDE_DIR
NAMES "ert/util/ert_unique_ptr.hpp"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_SOURCE_DIR}/../ert"
PATH_SUFFIXES "devel/libert_utilxx/include/" "include"
PATH_SUFFIXES "libert_utilxx/include/" "include"
DOC "Path to ERT Eclipse C++ library header files"
${_no_default_path}
)
@ -74,8 +74,8 @@ find_path (ERT_GEN_INCLUDE_DIR
NAMES "ert/util/int_vector.h"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_SOURCE_DIR}/../ert"
PATH_SUFFIXES "devel/libert_util/include"
"include" "build/libert_util/include" "devel/build/libert_util/include"
PATH_SUFFIXES "libert_util/include"
"include" "build/libert_util/include" "build/libert_util/include"
DOC "Path to ERT generated library header files"
${_no_default_path}
)
@ -90,9 +90,7 @@ find_library (ERT_LIBRARY_ECL
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_BINARY_DIR}/../ert"
"${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_SOURCE_DIR}/../ert/devel/build"
"${PROJECT_BINARY_DIR}/../ert-build"
"${PROJECT_BINARY_DIR}/../ert/devel"
PATH_SUFFIXES "lib" "lib/Release" "lib/Debug" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
DOC "Path to ERT Eclipse library archive/shared object files"
${_no_default_path}
@ -102,9 +100,7 @@ find_library (ERT_LIBRARY_ECLXX
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_BINARY_DIR}/../ert"
"${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_SOURCE_DIR}/../ert/devel/build"
"${PROJECT_BINARY_DIR}/../ert-build"
"${PROJECT_BINARY_DIR}/../ert/devel"
PATH_SUFFIXES "lib" "lib/Release" "lib/Debug" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
DOC "Path to ERT Eclipse C++ library archive/shared object files"
${_no_default_path}
@ -114,9 +110,7 @@ find_library (ERT_LIBRARY_ECL_WELL
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_BINARY_DIR}/../ert"
"${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_SOURCE_DIR}/../ert/devel/build"
"${PROJECT_BINARY_DIR}/../ert-build"
"${PROJECT_BINARY_DIR}/../ert/devel"
PATH_SUFFIXES "lib" "lib/Release" "lib/Debug" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
DOC "Path to ERT Eclipse library archive/shared object files"
${_no_default_path}
@ -126,9 +120,7 @@ find_library (ERT_LIBRARY_GEOMETRY
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_BINARY_DIR}/../ert"
"${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_SOURCE_DIR}/../ert/devel/build"
"${PROJECT_BINARY_DIR}/../ert-build"
"${PROJECT_BINARY_DIR}/../ert/devel"
PATH_SUFFIXES "lib" "lib/Release" "lib/Debug" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
DOC "Path to ERT Geometry library archive/shared object files"
${_no_default_path}
@ -138,9 +130,17 @@ find_library (ERT_LIBRARY_UTIL
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_BINARY_DIR}/../ert"
"${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_SOURCE_DIR}/../ert/devel/build"
"${PROJECT_BINARY_DIR}/../ert-build"
"${PROJECT_BINARY_DIR}/../ert/devel"
PATH_SUFFIXES "lib" "lib/Release" "lib/Debug" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
DOC "Path to ERT Utilities library archive/shared object files"
${_no_default_path}
)
find_library (ERT_LIBRARY_UTILXX
NAMES "ert_utilxx"
HINTS "${ERT_ROOT}"
PATHS "${PROJECT_BINARY_DIR}/../ert"
"${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_BINARY_DIR}/../ert-build"
PATH_SUFFIXES "lib" "lib/Release" "lib/Debug" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
DOC "Path to ERT Utilities library archive/shared object files"
${_no_default_path}
@ -160,6 +160,7 @@ list (APPEND ERT_LIBRARY
${ERT_LIBRARY_ECL_WELL}
${ERT_LIBRARY_GEOMETRY}
${ERT_LIBRARY_UTIL}
${ERT_LIBRARY_UTILXX}
)
list (APPEND ERT_LIBRARIES ${ERT_LIBRARY})
list (APPEND ERT_INCLUDE_DIRS ${ERT_INCLUDE_DIR})

View File

@ -23,20 +23,18 @@ if(PYTHONINTERP_FOUND)
endif()
set(PYTHON_INSTALL_PREFIX "lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/${PYTHON_PACKAGE_PATH}" CACHE STRING "Subdirectory to install Python modules in")
set(PATH_LIST)
if (ERT_ROOT)
set( start_path "${ERT_ROOT}/${PYTHON_INSTALL_PREFIX}" )
else()
set( start_path "DEFAULT_PATH")
list(APPEND PATH_LIST ${ERT_ROOT})
endif()
list(APPEND PATH_LIST ${CMAKE_PREFIX_PATH})
set( PATH_LIST "${start_path}"
"${PROJECT_SOURCE_DIR}/../ert/build/${PYTHON_INSTALL_PREFIX}"
"${PROJECT_SOURCE_DIR}/../ert/devel/build/${PYTHON_INSTALL_PREFIX}"
"${PROJECT_BINARY_DIR}/../ert-build/${PYTHON_INSTALL_PREFIX}"
"${PROJECT_BINARY_DIR}/../ert/devel/${PYTHON_INSTALL_PREFIX}")
# Add various popular sibling alternatives.
list(APPEND PATH_LIST "${PROJECT_SOURCE_DIR}/../ert/build"
"${PROJECT_BINARY_DIR}/../ert-build")
foreach( PATH ${PATH_LIST})
set( python_code "import sys; sys.path.insert(0 , '${PATH}'); import os.path; import inspect; import ert; print os.path.dirname(os.path.dirname(inspect.getfile(ert)))")
set( python_code "import sys; sys.path.insert(0 , '${PATH}/${PYTHON_INSTALL_PREFIX}'); import os.path; import inspect; import ert; print os.path.dirname(os.path.dirname(inspect.getfile(ert))); from ert.ecl import EclSum")
execute_process( COMMAND ${PYTHON_EXECUTABLE} -c "${python_code}"
RESULT_VARIABLE import_result
OUTPUT_VARIABLE stdout_output

View File

@ -107,22 +107,35 @@ if (Petsc_ROOT)
set (PETSC_ROOT "${Petsc_ROOT}")
endif (Petsc_ROOT)
find_path (PETSC_NORMAL_INCLUDE_DIR
NAMES "petsc.h"
PATHS ${PETSC_ROOT}
PATH_SUFFIXES "include" "petsc"
${_no_default_path}
)
find_package(PkgConfig)
if(PKG_CONFIG_FOUND)
set(OLD_PKG $ENV{PKG_CONFIG_PATH})
set(ENV{PKG_CONFIG_PATH} $ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig)
pkg_check_modules(PETSC PETSc>=3.4.0)
set(ENV{PKG_CONFIG_PATH} ${OLD_PKG})
set(PETSC_LIBRARIES ${PETSC_STATIC_LDFLAGS})
set(PETSC_LIBRARY ${PETSC_LIBRARIES})
set(PETSC_INCLUDE_DIR ${PETSC_INCLUDE_DIRS})
endif()
list(APPEND PETSC_INCLUDE_DIR ${PETSC_NORMAL_INCLUDE_DIR})
if(NOT PETSC_FOUND)
find_path (PETSC_NORMAL_INCLUDE_DIR
NAMES "petsc.h"
PATHS ${PETSC_ROOT}
PATH_SUFFIXES "include" "petsc"
${_no_default_path}
)
# look for actual Petsc library
find_library(PETSC_LIBRARY
NAMES "petsc-3.4.3" "petsc-3.4.4" "petsc"
PATHS ${PETSC_ROOT}
PATH_SUFFIXES "lib" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
${_no_default_path}
)
list(APPEND PETSC_INCLUDE_DIR ${PETSC_NORMAL_INCLUDE_DIR})
# look for actual Petsc library
find_library(PETSC_LIBRARY
NAMES "petsc-3.4.3" "petsc-3.4.4" "petsc"
PATHS ${PETSC_ROOT}
PATH_SUFFIXES "lib" "lib${_BITS}" "lib/${CMAKE_LIBRARY_ARCHITECTURE}"
${_no_default_path}
)
endif()
if(NOT PETSC_LIBRARY)
message(STATUS "Could not find the PETSc library")
@ -136,7 +149,7 @@ mark_as_advanced(PETSC_INCLUDE_DIR PETSC_LIBRARY)
# if both headers and library are found, store results
if(PETSC_FOUND)
set(PETSC_INCLUDE_DIRS ${PETSC_INCLUDE_DIR})
set(PETSC_INCLUDE_DIRS ${PETSC_INCLUDE_DIR})
list(APPEND PETSC_INCLUDE_DIRS ${PETSC_MPI_INCLUDE_DIRS})
set(PETSC_LIBRARIES ${PETSC_LIBRARY})

View File

@ -42,7 +42,7 @@ if (USE_QUADMATH AND NOT QUADMATH_FOUND)
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(QuadMath
find_package_handle_standard_args(Quadmath
DEFAULT_MSG
QUADMATH_LIBRARIES
HAVE_QUAD

View File

@ -57,6 +57,7 @@ int main (void) {
HAVE_NULLPTR;
HAVE_STATIC_ASSERT;
HAVE_SHARED_PTR;
MPI_2;
SHARED_PTR_HEADER;
SHARED_PTR_NAMESPACE;
HAVE_TYPE_TRAITS;
@ -66,6 +67,17 @@ int main (void) {
")
#debug_find_vars ("dune-common")
if(MPI_C_FOUND)
# check for MPI version 2
include(CMakePushCheckState)
include(CheckFunctionExists)
cmake_push_check_state()
set(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES};${MPI_C_LIBRARIES})
set(CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES};${MPI_C_INCLUDES})
check_function_exists(MPI_Finalized MPI_2)
cmake_pop_check_state()
endif(MPI_C_FOUND)
# make version number available in config.h
include (UseDuneVer)
find_dune_version ("dune" "common")

View File

@ -23,7 +23,7 @@ find_opm_package (
"${opm-flowdiagnostics_DEPS}"
# header to search for
"opm/flowdiagnostics/reorder/tarjan.h"
"opm/flowdiagnostics/Toolbox.hpp"
# library to search for
"opmflowdiagnostics"
@ -32,16 +32,15 @@ find_opm_package (
""
# test program
"#include <opm/flowdiagnostics/reorder/tarjan.h>
int main() {
const int ia[] = { 0, 0, 1, 2, 4 };
const int ja[] = { 0, 0, 1, 2 };
int vert[4] = { 0 };
int comp[4 + 1] = { 0 };
int ncomp = 0 ;
int work[3 * 4] = { 0 };
"#include <opm/flowdiagnostics/Toolbox.hpp>
tarjan(4, ia, ja, vert, comp, &ncomp, work);
#include <vector>
int main()
{
using FDT = Opm::FlowDiagnostics::Toolbox;
const auto pv = std::vector<double>(10, 0.3);
}
"
# config variables

View File

@ -30,9 +30,9 @@ find_opm_package (
""
# test program
"#include <opm/output/OutputWriter.hpp>
"#include <opm/output/eclipse/Summary.hpp>
int main (void) {
return 0;
return 0;
}
"
# config variables
@ -40,3 +40,16 @@ int main (void) {
)
include (UseDynamicBoost)
#debug_find_vars ("opm-output")
if(OPM_OUTPUT_FOUND)
get_filename_component(opm-output_PREFIX_DIR ${opm-output_LIBRARY} PATH)
find_program(COMPARE_SUMMARY_COMMAND compareSummary
PATHS ${opm-output_PREFIX_DIR}/../bin
${opm-output_PREFIX_DIR}/../../bin)
find_program(COMPARE_ECL_COMMAND compareECL
PATHS ${opm-output_PREFIX_DIR}/../bin
${opm-output_PREFIX_DIR}/../../bin)
endif()

View File

@ -0,0 +1,7 @@
function (opm_add_python_test TEST_NAME TEST_SCRIPT)
add_test(NAME ${TEST_NAME}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMAND ${TEST_SCRIPT} ${ARGN})
set_property(TEST ${TEST_NAME} PROPERTY ENVIRONMENT "PYTHONPATH=${ERT_PYTHON_PATH}:${CWRAP_PYTHON_PATH}:${PYTHONPATH}")
endfunction(opm_add_python_test)

View File

@ -1,4 +1,8 @@
# - Build satellites that are dependent of main library
option(ADD_DISABLED_CTESTS "Add the tests which are disabled due to failed preconditions to the ctest output (this makes ctest return an error if such a test is present)" ON)
mark_as_advanced(ADD_DISABLED_CTESTS)
#
# Enumerate all source code in a "satellite" directory such as tests/,
# compile each of them and optionally set them as a test for CTest to
@ -337,7 +341,7 @@ macro(opm_add_test TestName)
# the following causes the test to appear as 'skipped' in the
# CDash dashboard. it this is removed, the test is just silently
# ignored.
if (NOT CURTEST_ONLY_COMPILE)
if (NOT CURTEST_ONLY_COMPILE AND ADD_DISABLED_CTESTS)
add_test(${TestName} skip_test_dummy)
endif()
endif()

View File

@ -25,12 +25,12 @@ set (ewoms_DEPS
"CXX11Features REQUIRED"
# DUNE prerequisites
"dune-common REQUIRED"
"dune-localfunctions REQUIRED"
"dune-geometry REQUIRED"
"dune-grid REQUIRED"
"dune-istl REQUIRED"
"opm-common REQUIRED"
"opm-material REQUIRED"
"dune-localfunctions"
"dune-alugrid"
"dune-fem"
"opm-parser"

View File

@ -0,0 +1,21 @@
# -*- mode: cmake; tab-width: 2; indent-tabs-mode: t; truncate-lines: t; compile-command: "cmake -Wdev" -*-
# vim: set filetype=cmake autoindent tabstop=2 shiftwidth=2 noexpandtab softtabstop=2 nowrap:
# defines that must be present in config.h for our headers
set (opm-flowdiagnostics-applications_CONFIG_VAR
)
# dependencies
set (opm-flowdiagnostics-applications_DEPS
# compile with C99 support if available
"C99"
# compile with C++0x/11 support if available
"CXX11Features REQUIRED"
"Boost 1.44.0
COMPONENTS filesystem system unit_test_framework REQUIRED"
"ERT REQUIRED"
# prerequisite OPM modules
"opm-common REQUIRED;
opm-flowdiagnostics REQUIRED;
opm-core REQUIRED"
)

View File

@ -14,7 +14,7 @@ set (opm-output_DEPS
"CXX11Features REQUIRED"
# various runtime library enhancements
"Boost 1.44.0
COMPONENTS filesystem system unit_test_framework REQUIRED"
COMPONENTS unit_test_framework REQUIRED"
# Ensembles-based Reservoir Tools (ERT)
"ERT REQUIRED"
# Look for MPI support

View File

@ -19,7 +19,8 @@ set (opm-simulators_DEPS
# DUNE prerequisites
"dune-common REQUIRED;
dune-istl REQUIRED"
# OPM dependency
"ERTPython"
# OPM dependency
"opm-common REQUIRED;
opm-parser REQUIRED;
opm-core REQUIRED;

View File

@ -1,17 +1,100 @@
#!/bin/bash
declare -A configurations
# Parse revisions from trigger comment and setup arrays
# Depends on: 'upstreams', upstreamRev',
# 'downstreams', 'downstreamRev',
# 'ghprbCommentBody',
# 'CONFIGURATIONS', 'TOOLCHAINS'
function parseRevisions {
for upstream in ${upstreams[*]}
do
if grep -qi "$upstream=" <<< $ghprbCommentBody
then
upstreamRev[$upstream]=pull/`echo $ghprbCommentBody | sed -r "s/.*${upstream,,}=([0-9]+).*/\1/g"`/merge
fi
done
if grep -q "with downstreams" <<< $ghprbCommentBody
then
for downstream in ${downstreams[*]}
do
if grep -qi "$downstream=" <<< $ghprbCommentBody
then
downstreamRev[$downstream]=pull/`echo $ghprbCommentBody | sed -r "s/.*${downstream,,}=([0-9]+).*/\1/g"`/merge
fi
done
fi
# Default to a serial build if no types are given
if test -z "$BTYPES"
then
BTYPES="serial"
fi
# Convert to arrays for easy looping
declare -a BTYPES_ARRAY
for btype in $BTYPES
do
BTYPES_ARRAY=($BTYPES_ARRAY $btype)
done
TOOLCHAIN_ARRAY=($CMAKE_TOOLCHAIN_FILES)
for index in ${!BTYPES_ARRAY[*]}
do
key=${BTYPES_ARRAY[$index]}
data=${TOOLCHAIN_ARRAY[$index]}
configurations[$key]=$data
done
}
# Print revisions and configurations
# $1 = Name of main module
# Depends on: 'upstreams', upstreamRev',
# 'downstreams', 'downstreamRev',
# 'ghprbCommentBody',
# 'configurations', 'sha1'
function printHeader {
echo -e "Repository revisions:"
for upstream in ${upstreams[*]}
do
echo -e "\t [upstream] $upstream=${upstreamRev[$upstream]}"
done
echo -e "\t[main module] $1=$sha1"
if grep -q "with downstreams" <<< $ghprbCommentBody
then
for downstream in ${downstreams[*]}
do
echo -e "\t [downstream] $downstream=${downstreamRev[$downstream]}"
done
fi
echo "Configurations to process:"
for conf in ${!configurations[@]}
do
echo -e "\t$conf=${configurations[$conf]}"
done
}
# $1 = Additional cmake parameters
# $2 = 0 to build and install module, 1 to build and test module
# $3 = Source root of module to build
function build_module {
cmake $3 -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=$2 $1
cmake $3 -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=$2 -DCMAKE_TOOLCHAIN_FILE=${configurations[$configuration]} $1
test $? -eq 0 || exit 1
if test $2 -eq 1
then
cmake --build .
test $? -eq 0 || exit 2
ctest -T Test --no-compress-output
# Convert to junit format
$WORKSPACE/deps/opm-common/jenkins/convert.py -x $WORKSPACE/deps/opm-common/jenkins/conv.xsl -t . > testoutput.xml
if ! grep -q "with downstreams" <<< $ghprbCommentBody
then
# Add configuration name
sed -e "s/classname=\"TestSuite\"/classname=\"${configuration}\"/g" testoutput.xml > $WORKSPACE/$configuration/testoutput.xml
fi
else
cmake --build . --target install
fi
@ -20,11 +103,18 @@ function build_module {
# $1 = Name of module
# $2 = git-rev to use for module
function clone_module {
# Already cloned by an earlier configuration
test -d $WORKSPACE/deps/$1 && return 0
pushd .
mkdir -p $WORKSPACE/deps/$1
cd $WORKSPACE/deps/$1
git init .
git remote add origin https://github.com/OPM/$1
if [ "$1" == "ert" ]
then
git remote add origin https://github.com/Ensembles/$1
else
git remote add origin https://github.com/OPM/$1
fi
git fetch --depth 1 origin $2:branch_to_build
git checkout branch_to_build
test $? -eq 0 || exit 1
@ -38,7 +128,7 @@ function clone_module {
function clone_and_build_module {
clone_module $1 $3
pushd .
mkdir $4/build-$1
mkdir -p $4/build-$1
cd $4/build-$1
test_build=0
if test -n "$5"
@ -55,10 +145,9 @@ function clone_and_build_module {
function build_upstreams {
for upstream in ${upstreams[*]}
do
echo "Building upstream $upstream=${upstreamRev[$upstream]}"
echo "Building upstream $upstream=${upstreamRev[$upstream]} configuration=$configuration"
# Build upstream and execute installation
clone_and_build_module $upstream "-DCMAKE_PREFIX_PATH=$WORKSPACE/serial/install -DCMAKE_INSTALL_PREFIX=$WORKSPACE/serial/install" ${upstreamRev[$upstream]} $WORKSPACE/serial
clone_and_build_module $upstream "-DCMAKE_PREFIX_PATH=$WORKSPACE/$configuration/install -DCMAKE_INSTALL_PREFIX=$WORKSPACE/$configuration/install" ${upstreamRev[$upstream]} $WORKSPACE/$configuration
test $? -eq 0 || exit 1
done
test $? -eq 0 || exit 1
@ -69,38 +158,55 @@ function build_upstreams {
# which holds the default revisions to use for downstreams
function build_downstreams {
pushd .
cd $WORKSPACE/serial/build-$1
cd $WORKSPACE/$configuration/build-$1
cmake --build . --target install
popd
egrep_cmd="xml_grep --wrap testsuites --cond testsuite $WORKSPACE/serial/build-$1/testoutput.xml"
egrep_cmd="xml_grep --wrap testsuites --cond testsuite $WORKSPACE/$configuration/build-$1/testoutput.xml"
for downstream in ${downstreams[*]}
do
if grep -q "$downstream=" <<< $ghprbCommentBody
then
downstreamRev[$downstream]=pull/`echo $ghprbCommentBody | sed -r "s/.*$downstream=([0-9]+).*/\1/g"`/merge
fi
echo "Building downstream $downstream=${downstreamRev[$downstream]}"
echo "Building downstream $downstream=${downstreamRev[$downstream]} configuration=$configuration"
# Build downstream and execute installation
# Additional cmake parameters:
# OPM_DATA_ROOT - passed for modules having opm-data based integration tests
# USE_QUADMATH - used by ewoms to disable quadmath support (makes tests usable)
clone_and_build_module $downstream "-DCMAKE_PREFIX_PATH=$WORKSPACE/serial/install -DCMAKE_INSTALL_PREFIX=$WORKSPACE/serial/install -DOPM_DATA_ROOT=$OPM_DATA_ROOT -DUSE_QUADMATH=0" ${downstreamRev[$downstream]} $WORKSPACE/serial 1
code=$?
# ewoms skips tests in nasty ways. ignore return code
if [ "$downstream" != "ewoms" ]
then
test $code -eq 0 || exit 1
fi
clone_and_build_module $downstream "-DCMAKE_PREFIX_PATH=$WORKSPACE/$configuration/install -DCMAKE_INSTALL_PREFIX=$WORKSPACE/$configuration/install -DOPM_DATA_ROOT=$OPM_DATA_ROOT" ${downstreamRev[$downstream]} $WORKSPACE/$configuration 1
test $? -eq 0 || exit 1
# Installation for downstream
pushd .
cd $WORKSPACE/serial/build-$downstream
cd $WORKSPACE/$configuration/build-$downstream
cmake --build . --target install
popd
egrep_cmd="$egrep_cmd $WORKSPACE/serial/build-$downstream/testoutput.xml"
egrep_cmd="$egrep_cmd $WORKSPACE/$configuration/build-$downstream/testoutput.xml"
done
$egrep_cmd > testoutput.xml
$egrep_cmd > $WORKSPACE/$configuration/testoutput.xml
# Add testsuite name
sed -e "s/classname=\"TestSuite\"/classname=\"${configuration}\"/g" -i $WORKSPACE/$configuration/testoutput.xml
test $? -eq 0 || exit 1
}
# $1 = Name of main module
function build_module_full {
for configuration in ${!configurations[@]}
do
# Build upstream modules
build_upstreams
# Build main module
pushd .
mkdir -p $configuration/build-$1
cd $configuration/build-$1
echo "Building main module $1=$sha1 configuration=$configuration"
build_module "-DCMAKE_INSTALL_PREFIX=$WORKSPACE/$configuration/install -DOPM_DATA_ROOT=$OPM_DATA_ROOT" 1 $WORKSPACE
test $? -eq 0 || exit 1
popd
# If no downstream builds we are done
if grep -q "with downstreams" <<< $ghprbCommentBody
then
build_downstreams $1
test $? -eq 0 || exit 1
fi
done
}

View File

@ -6,32 +6,10 @@ source `dirname $0`/build-opm-module.sh
mkdir deps
ln -sf $WORKSPACE deps/opm-common
pushd .
mkdir -p serial/build-opm-common
cd serial/build-opm-common
build_module "-DCMAKE_INSTALL_PREFIX=$WORKSPACE/serial/install" 1 $WORKSPACE
test $? -eq 0 || exit 1
popd
# If no downstream builds we are done
if ! grep -q "with downstreams" <<< $ghprbCommentBody
then
cp serial/build-opm-common/testoutput.xml .
exit 0
fi
ERT_REVISION=master
if grep -q "ert=" <<< $ghprbCommentBody
then
ERT_REVISION=pull/`echo $ghprbCommentBody | sed -r 's/.*ert=([0-9]+).*/\1/g'`/merge
fi
source $WORKSPACE/deps/opm-common/jenkins/setup-opm-data.sh
# Downstream revisions
# Downstreams and revisions
declare -a downstreams
downstreams=(opm-parser
downstreams=(ert
opm-parser
opm-output
opm-material
opm-core
@ -41,6 +19,7 @@ downstreams=(opm-parser
ewoms)
declare -A downstreamRev
downstreamRev[ert]=master
downstreamRev[opm-parser]=master
downstreamRev[opm-material]=master
downstreamRev[opm-core]=master
@ -50,25 +29,13 @@ downstreamRev[opm-simulators]=master
downstreamRev[opm-upscaling]=master
downstreamRev[ewoms]=master
# Build ERT
echo "Building downstream ert=$ERT_REVISION"
pushd .
mkdir -p $WORKSPACE/deps/ert
cd $WORKSPACE/deps/ert
git init .
git remote add origin https://github.com/Ensembles/ert
git fetch --depth 1 origin $ERT:branch_to_build
test $? -eq 0 || exit 1
git checkout branch_to_build
popd
parseRevisions
printHeader opm-common
pushd .
mkdir -p serial/build-ert
cd serial/build-ert
cmake $WORKSPACE/deps/ert/devel -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$WORKSPACE/serial/install
cmake --build . --target install
popd
# Setup opm-data if necessary
if grep -q "with downstreams" <<< $ghprbCommentBody
then
source $WORKSPACE/deps/opm-common/jenkins/setup-opm-data.sh
fi
build_downstreams opm-common
test $? -eq 0 || exit 1
build_module_full opm-common

View File

@ -45,6 +45,11 @@ namespace Opm {
EclipsePRTLog::~EclipsePRTLog()
{
if( ! print_summary_ )
{
return;
}
//output summary.
const std::string summary_msg = "\n\nError summary:" +
std::string("\nWarnings " + std::to_string(numMessages(Log::MessageType::Warning))) +
@ -56,4 +61,17 @@ namespace Opm {
StreamLog::addTaggedMessage(Log::MessageType::Info, "", summary_msg);
}
EclipsePRTLog::EclipsePRTLog(const std::string& logFile,
int64_t messageMask,
bool append,
bool print_summary)
: StreamLog(logFile, messageMask, append),
print_summary_(print_summary)
{}
EclipsePRTLog::EclipsePRTLog(std::ostream& os,
int64_t messageMask,
bool print_summary)
: StreamLog(os, messageMask), print_summary_(print_summary)
{}
}

View File

@ -37,8 +37,27 @@ public:
~EclipsePRTLog();
/// \brief Construct a logger to the <MODLE>.PRT file
/// \param logFile The name of the logfile to use.
/// \param messageMask ????
/// \param append If true then we append messages to the file.
/// Otherwise a new file is created.
/// \param print_summary If true print a summary to the PRT file.
EclipsePRTLog(const std::string& logFile , int64_t messageMask,
bool append, bool print_summary);
/// \brief Construct a logger to the <MODLE>.PRT file
/// \param logFile The name of the logfile to use.
/// \param messageMask ????
/// \param append If true then we append messages to the file.
/// Otherwise a new file is created.
/// \param print_summary If true print a summary to the PRT file.
EclipsePRTLog(std::ostream& os , int64_t messageMask,
bool print_summary);
private:
std::map<int64_t, size_t> m_count;
/// \brief Whether to print a summary to the log file.
bool print_summary_ = true;
};
}
#endif // ECLIPSEPRTLOG_H

View File

@ -35,7 +35,7 @@ namespace Log {
std::string fileMessage(const std::string& filename , int line , const std::string& message) {
std::ostringstream oss;
oss << filename << ":" << line << ": " << message;
oss << message << "\n" << "In file " << filename << ", line " << line << "\n";
return oss.str();
}

View File

@ -210,11 +210,11 @@ namespace Opm {
}
const std::unordered_map<std::string, std::vector<double>>& SimulationDataContainer::cellData() const {
const std::map<std::string, std::vector<double>>& SimulationDataContainer::cellData() const {
return m_cell_data;
}
std::unordered_map<std::string, std::vector<double>>& SimulationDataContainer::cellData() {
std::map<std::string, std::vector<double>>& SimulationDataContainer::cellData() {
return m_cell_data;
}

View File

@ -22,7 +22,7 @@
#include <cstddef>
#include <string>
#include <unordered_map>
#include <map>
#include <vector>
namespace Opm {
@ -118,8 +118,8 @@ namespace Opm {
const std::vector<double>& facepressure() const;
const std::vector<double>& faceflux () const;
const std::unordered_map<std::string, std::vector<double>>& cellData() const;
std::unordered_map<std::string, std::vector<double>>& cellData();
const std::map<std::string, std::vector<double>>& cellData() const;
std::map<std::string, std::vector<double>>& cellData();
private:
void addDefaultFields();
@ -129,8 +129,8 @@ namespace Opm {
size_t m_num_faces;
size_t m_num_phases;
std::unordered_map< std::string , std::vector<double> > m_cell_data;
std::unordered_map< std::string , std::vector<double> > m_face_data;
std::map< std::string , std::vector<double> > m_cell_data;
std::map< std::string , std::vector<double> > m_face_data;
std::vector<double>* pressure_ref_;
std::vector<double>* temperature_ref_;

View File

@ -13,7 +13,7 @@ Group: Development/Libraries/C and C++
Url: http://www.opm-project.org/
Source0: https://github.com/OPM/%{name}/archive/release/%{version}/%{tag}.tar.gz#/%{name}-%{version}.tar.gz
BuildRequires: git doxygen bc
%{?el6:BuildRequires: devtoolset-2 cmake28 boost148-devel}
%{?el6:BuildRequires: devtoolset-3-toolchain cmake28 boost148-devel}
%{!?el6:BuildRequires: gcc gcc-c++ cmake boost-devel}
BuildRoot: %{_tmppath}/%{name}-%{version}-build
@ -48,8 +48,8 @@ This package contains the documentation files for opm-common
# consider using -DUSE_VERSIONED_DIR=ON if backporting
%build
%{?el6:scl enable devtoolset-2 bash}
%{?el6:cmake28} %{?!el6:cmake} -DBUILD_SHARED_LIBS=1 -DCMAKE_BUILD_TYPE=RelWithDebInfo -DSTRIP_DEBUGGING_SYMBOLS=ON -DCMAKE_INSTALL_PREFIX=%{_prefix} -DCMAKE_INSTALL_DOCDIR=share/doc/%{name}-%{version} -DUSE_RUNPATH=OFF %{?el6:-DCMAKE_CXX_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/g++ -DCMAKE_C_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/gcc -DCMAKE_Fortran_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/gfortran -DBOOST_LIBRARYDIR=%{_libdir}/boost148 -DBOOST_INCLUDEDIR=%{_includedir}/boost148}
%{?el6:scl enable devtoolset-3 bash}
%{?el6:cmake28} %{?!el6:cmake} -DBUILD_SHARED_LIBS=1 -DCMAKE_BUILD_TYPE=RelWithDebInfo -DSTRIP_DEBUGGING_SYMBOLS=ON -DCMAKE_INSTALL_PREFIX=%{_prefix} -DCMAKE_INSTALL_DOCDIR=share/doc/%{name}-%{version} -DUSE_RUNPATH=OFF %{?el6:-DCMAKE_CXX_COMPILER=/opt/rh/devtoolset-3/root/usr/bin/g++ -DCMAKE_C_COMPILER=/opt/rh/devtoolset-3/root/usr/bin/gcc -DCMAKE_Fortran_COMPILER=/opt/rh/devtoolset-3/root/usr/bin/gfortran -DBOOST_LIBRARYDIR=%{_libdir}/boost148 -DBOOST_INCLUDEDIR=%{_includedir}/boost148}
make
%install

View File

@ -46,7 +46,7 @@ BOOST_AUTO_TEST_CASE(DoLogging) {
BOOST_AUTO_TEST_CASE(Test_Format) {
BOOST_CHECK_EQUAL( "/path/to/file:100: There is a mild fuckup here?" , Log::fileMessage("/path/to/file" , 100 , "There is a mild fuckup here?"));
BOOST_CHECK_EQUAL( "There is a mild fuckup here?\nIn file /path/to/file, line 100\n" , Log::fileMessage("/path/to/file" , 100 , "There is a mild fuckup here?"));
BOOST_CHECK_EQUAL( "Error: This is the error" , Log::prefixMessage(Log::MessageType::Error , "This is the error"));
BOOST_CHECK_EQUAL( "Warning: This is the warning" , Log::prefixMessage(Log::MessageType::Warning , "This is the warning"));
@ -248,8 +248,8 @@ BOOST_AUTO_TEST_CASE(TestHelperFunctions)
BOOST_CHECK(isPower2(1ul << 62));
// fileMessage
BOOST_CHECK_EQUAL(fileMessage("foo/bar", 1, "message"), "foo/bar:1: message");
BOOST_CHECK_EQUAL(fileMessage(MessageType::Error, "foo/bar", 1, "message"), "foo/bar:1: Error: message");
BOOST_CHECK_EQUAL(fileMessage("foo/bar", 1, "message"), "message\nIn file foo/bar, line 1\n");
BOOST_CHECK_EQUAL(fileMessage(MessageType::Error, "foo/bar", 1, "message"), "Error: message\nIn file foo/bar, line 1\n");
// prefixMessage
BOOST_CHECK_EQUAL(prefixMessage(MessageType::Error, "message"), "Error: message");

View File

@ -1,7 +1,7 @@
#!/bin/bash
set -e
build_order=(opm-common opm-parser opm-material opm-output opm-core opm-grid opm-simulators opm-upscaling)
build_order=(opm-common opm-parser opm-material opm-output opm-core opm-grid ewoms opm-simulators opm-upscaling)
# This shell script should be started with the name of a module as
# only only command line argument. It will start by building all
@ -25,13 +25,12 @@ build_order=(opm-common opm-parser opm-material opm-output opm-core opm-grid opm
# This can typically be achived by using the 'clone-opm.sh' script.
function upstream_build {
project=${1}
echo "Building: ${project}"
mkdir -p ${project}/build
pushd ${project}/build > /dev/null
cmake ../ -DENABLE_PYTHON=ON -DBUILD_TESTING=OFF -DSILENCE_EXTERNAL_WARNINGS=True
cmake ../ -DENABLE_PYTHON=ON -DBUILD_TESTING=OFF -DSILENCE_EXTERNAL_WARNINGS=True -DUSE_QUADMATH=OFF -DADD_DISABLED_CTESTS=OFF
make
popd > /dev/null
}
@ -45,12 +44,17 @@ function downstream_build_and_test {
# The build commands cmake, make and ctest must be given as
# separate commands and not chained with &&. If chaining with &&
# is used the 'set -e' does not exit on first error.
cmake ../ -DENABLE_PYTHON=ON -DBUILD_TESTING=ON -DSILENCE_EXTERNAL_WARNINGS=True
cmake ../ -DENABLE_PYTHON=ON -DBUILD_TESTING=ON -DSILENCE_EXTERNAL_WARNINGS=True -DUSE_QUADMATH=OFF -DADD_DISABLED_CTESTS=OFF
make
ctest --output-on-failure
popd > /dev/null
}
#-----------------------------------------------------------------
export CONDA_HOME="$HOME/miniconda"
export PATH="$CONDA_HOME/bin:$PATH"
for i in "${!build_order[@]}"; do
if [[ "${build_order[$i]}" = "$1" ]]; then

View File

@ -47,12 +47,27 @@ function build_superlu {
}
function install_python_deps {
export TRAVIS_PYTHON_VERSION="2.7"
wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
bash miniconda.sh -b -p $HOME/miniconda
export CONDA_HOME="$HOME/miniconda"
export PATH="$CONDA_HOME/bin:$PATH"
hash -r
conda config --set always_yes yes --set changeps1 no
conda update -q conda
conda install numpy
}
function build_ert {
install_python_deps
git clone https://github.com/Ensembles/ert.git
mkdir -p ert/build
pushd ert/build > /dev/null
cmake ../devel && make
cmake .. && make
popd > /dev/null
}
@ -66,6 +81,4 @@ build_dune dune-common
build_dune dune-istl
build_dune dune-geometry
build_dune dune-grid
build_dune dune-localfunctions

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -e
project_list=(opm-data opm-parser opm-material opm-core opm-output opm-grid opm-simulators opm-upscaling ewoms)
project_list=(opm-data opm-parser opm-material ewoms opm-core opm-output opm-grid opm-simulators opm-upscaling)
# Will clone all the projects *except* the one project given as
# commandline argument; that has typically been checked out by travis