opm-simulators/compareECLFiles.cmake

329 lines
13 KiB
CMake
Raw Normal View History

2017-01-23 03:42:09 -06:00
# This script manages the addition of tests.
# The tests are orchestrated by a shell script,
# configured using opm_set_test_driver()
# and then the appropriate helper macro is called to
# register the ctest entry through the opm_add_test macro.
# Information such as the binary to call and test tolerances
# are passed from the build system to the driver script through
# command line parameters. See the opm_add_test() documentation for
# details on the parameters passed to the macro.
# Define some paths
2016-08-04 06:54:37 -05:00
set(BASE_RESULT_PATH ${PROJECT_BINARY_DIR}/tests/results)
###########################################################################
2016-08-04 06:54:37 -05:00
# TEST: compareECLFiles
###########################################################################
2016-08-04 06:54:37 -05:00
# Input:
# - casename: basename (no extension)
#
2017-01-23 03:42:09 -06:00
# Details:
# - This test class compares output from a simulation to reference files.
function(add_test_compareECLFiles)
set(oneValueArgs CASENAME FILENAME SIMULATOR ABS_TOL REL_TOL DIR DIR_PREFIX PREFIX)
set(multiValueArgs TEST_ARGS)
cmake_parse_arguments(PARAM "$" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
if(NOT PARAM_DIR)
set(PARAM_DIR ${PARAM_CASENAME})
endif()
if(NOT PARAM_PREFIX)
set(PARAM_PREFIX compareECLFiles)
2017-05-16 04:49:40 -05:00
endif()
set(RESULT_PATH ${BASE_RESULT_PATH}${PARAM_DIR_PREFIX}/${PARAM_SIMULATOR}+${PARAM_CASENAME})
2018-03-12 09:37:21 -05:00
set(TEST_ARGS ${OPM_TESTS_ROOT}/${PARAM_DIR}/${PARAM_FILENAME} ${PARAM_TEST_ARGS})
opm_add_test(${PARAM_PREFIX}_${PARAM_SIMULATOR}+${PARAM_FILENAME} NO_COMPILE
EXE_NAME ${PARAM_SIMULATOR}
2018-03-12 09:37:21 -05:00
DRIVER_ARGS ${OPM_TESTS_ROOT}/${PARAM_DIR} ${RESULT_PATH}
${PROJECT_BINARY_DIR}/bin
${PARAM_FILENAME}
${PARAM_ABS_TOL} ${PARAM_REL_TOL}
2016-08-04 06:54:37 -05:00
${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND}
2017-05-16 04:49:40 -05:00
TEST_ARGS ${TEST_ARGS})
endfunction()
2016-11-04 08:47:59 -05:00
###########################################################################
2017-01-23 04:15:49 -06:00
# TEST: add_test_compare_restarted_simulation
2016-11-04 08:47:59 -05:00
###########################################################################
# Input:
# - casename: basename (no extension)
#
2017-01-23 03:42:09 -06:00
# Details:
# - This test class compares the output from a restarted simulation
# to that of a non-restarted simulation.
function(add_test_compare_restarted_simulation)
set(oneValueArgs CASENAME FILENAME SIMULATOR ABS_TOL REL_TOL)
set(multiValueArgs TEST_ARGS)
cmake_parse_arguments(PARAM "$" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
2016-11-04 08:47:59 -05:00
set(RESULT_PATH ${BASE_RESULT_PATH}/restart/${PARAM_SIMULATOR}+${PARAM_CASENAME})
2018-03-12 09:37:21 -05:00
set(TEST_ARGS ${OPM_TESTS_ROOT}/${PARAM_CASENAME}/${PARAM_FILENAME} ${PARAM_TEST_ARGS})
opm_add_test(compareRestartedSim_${PARAM_SIMULATOR}+${PARAM_FILENAME} NO_COMPILE
EXE_NAME ${PARAM_SIMULATOR}
2018-03-12 09:37:21 -05:00
DRIVER_ARGS ${OPM_TESTS_ROOT}/${PARAM_CASENAME} ${RESULT_PATH}
${PROJECT_BINARY_DIR}/bin
${PARAM_FILENAME}
${PARAM_ABS_TOL} ${PARAM_REL_TOL}
2016-11-04 08:47:59 -05:00
${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND}
2018-01-04 06:48:43 -06:00
0
TEST_ARGS ${TEST_ARGS})
endfunction()
2016-11-04 08:47:59 -05:00
###########################################################################
2017-01-23 04:15:49 -06:00
# TEST: add_test_compare_parallel_simulation
###########################################################################
# Input:
# - casename: basename (no extension)
#
2017-01-23 03:42:09 -06:00
# Details:
# - This test class compares the output from a parallel simulation
# to the output from the serial instance of the same model.
function(add_test_compare_parallel_simulation)
set(oneValueArgs CASENAME FILENAME SIMULATOR ABS_TOL REL_TOL)
set(multiValueArgs TEST_ARGS)
cmake_parse_arguments(PARAM "$" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
set(RESULT_PATH ${BASE_RESULT_PATH}/parallel/${PARAM_SIMULATOR}+${PARAM_CASENAME})
2018-03-12 09:37:21 -05:00
set(TEST_ARGS ${OPM_TESTS_ROOT}/${PARAM_CASENAME}/${PARAM_FILENAME} ${PARAM_TEST_ARGS})
# Add test that runs flow_mpi and outputs the results to file
opm_add_test(compareParallelSim_${PARAM_SIMULATOR}+${PARAM_FILENAME} NO_COMPILE
EXE_NAME ${PARAM_SIMULATOR}
2018-03-12 09:37:21 -05:00
DRIVER_ARGS ${OPM_TESTS_ROOT}/${PARAM_CASENAME} ${RESULT_PATH}
${PROJECT_BINARY_DIR}/bin
${PARAM_FILENAME}
${PARAM_ABS_TOL} ${PARAM_REL_TOL}
${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND}
TEST_ARGS ${TEST_ARGS})
endfunction()
2018-01-04 06:48:43 -06:00
###########################################################################
# TEST: add_test_compare_parallel_restarted_simulation
###########################################################################
# Input:
# - casename: basename (no extension)
#
# Details:
# - This test class compares the output from a restarted parallel simulation
# to that of a non-restarted parallel simulation.
function(add_test_compare_parallel_restarted_simulation)
set(oneValueArgs CASENAME FILENAME SIMULATOR ABS_TOL REL_TOL)
set(multiValueArgs TEST_ARGS)
cmake_parse_arguments(PARAM "$" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
set(RESULT_PATH ${BASE_RESULT_PATH}/parallelRestart/${PARAM_SIMULATOR}+${PARAM_CASENAME})
2018-03-12 09:37:21 -05:00
set(TEST_ARGS ${OPM_TESTS_ROOT}/${PARAM_CASENAME}/${PARAM_FILENAME} ${PARAM_TEST_ARGS})
2018-01-04 06:48:43 -06:00
opm_add_test(compareParallelRestartedSim_${PARAM_SIMULATOR}+${PARAM_FILENAME} NO_COMPILE
EXE_NAME ${PARAM_SIMULATOR}
2018-03-12 09:37:21 -05:00
DRIVER_ARGS ${OPM_TESTS_ROOT}/${PARAM_CASENAME} ${RESULT_PATH}
${PROJECT_BINARY_DIR}/bin
2018-01-04 06:48:43 -06:00
${PARAM_FILENAME}
${PARAM_ABS_TOL} ${PARAM_REL_TOL}
${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND}
1
TEST_ARGS ${TEST_ARGS})
endfunction()
if(NOT TARGET test-suite)
add_custom_target(test-suite)
endif()
# Regression tests
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-regressionTest.sh "")
# Set absolute tolerance to be used passed to the macros in the following tests
set(abs_tol 2e-2)
set(rel_tol 1e-5)
set(coarse_rel_tol 1e-2)
2017-11-10 05:33:24 -06:00
foreach(SIM flow flow_legacy)
add_test_compareECLFiles(CASENAME spe1
FILENAME SPE1CASE2
SIMULATOR ${SIM}
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol})
endforeach()
add_test_compareECLFiles(CASENAME spe1_2p
FILENAME SPE1CASE2_2P
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
DIR spe1)
add_test_compareECLFiles(CASENAME spe1_2p
FILENAME SPE1CASE2_2P
SIMULATOR flow_legacy
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol}
DIR spe1)
2017-12-05 02:26:01 -06:00
add_test_compareECLFiles(CASENAME spe1_oilgas
FILENAME SPE1CASE2_OILGAS
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol}
DIR spe1)
add_test_compareECLFiles(CASENAME spe1
FILENAME SPE1CASE1
SIMULATOR flow_sequential
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol})
2018-02-15 03:07:58 -06:00
add_test_compareECLFiles(CASENAME spe1_nowells
FILENAME SPE1CASE2_NOWELLS
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
DIR spe1)
add_test_compareECLFiles(CASENAME spe1_thermal
FILENAME SPE1CASE2_THERMAL
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
DIR spe1)
2018-05-29 09:35:26 -05:00
add_test_compareECLFiles(CASENAME ctaquifer_2d_oilwater
FILENAME 2D_OW_CTAQUIFER
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
2018-05-29 09:44:28 -05:00
DIR aquifer-oilwater)
2018-05-29 09:35:26 -05:00
2017-11-10 05:33:24 -06:00
foreach(SIM flow flow_legacy)
add_test_compareECLFiles(CASENAME spe3
FILENAME SPE3CASE1
SIMULATOR ${SIM}
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol}
TEST_ARGS tolerance_wells=1e-6 max_iter=20)
endforeach()
2017-11-10 05:33:24 -06:00
foreach(SIM flow flow_legacy)
add_test_compareECLFiles(CASENAME spe9
FILENAME SPE9_CP_SHORT
SIMULATOR ${SIM}
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol})
endforeach()
2017-11-10 05:33:24 -06:00
add_test_compareECLFiles(CASENAME spe9group
FILENAME SPE9_CP_GROUP
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol})
add_test_compareECLFiles(CASENAME msw_2d_h
FILENAME 2D_H__
2017-11-10 07:29:49 -06:00
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol}
2017-11-10 07:29:49 -06:00
TEST_ARGS use_multisegment_well=true)
2017-11-10 08:12:41 -06:00
add_test_compareECLFiles(CASENAME msw_3d_hfa
FILENAME 3D_MSW
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
TEST_ARGS use_multisegment_well=true)
2018-02-27 08:27:30 -06:00
add_test_compareECLFiles(CASENAME polymer_oilwater
FILENAME 2D_OILWATER_POLYMER
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol})
add_test_compareECLFiles(CASENAME polymer_simple2D
FILENAME 2D_THREEPHASE_POLY_HETER
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol}
TEST_ARGS max_iter=20)
add_test_compareECLFiles(CASENAME spe5
FILENAME SPE5CASE1
2017-11-10 07:32:27 -06:00
SIMULATOR flow
ABS_TOL ${abs_tol}
REL_TOL ${coarse_rel_tol}
TEST_ARGS max_iter=20)
2016-11-04 08:47:59 -05:00
# Restart tests
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-restart-regressionTest.sh "")
# Cruder tolerances for the restarted tests
set(abs_tol_restart 2e-1)
set(rel_tol_restart 4e-5)
2017-11-10 05:33:24 -06:00
foreach(sim flow flow_legacy)
add_test_compare_restarted_simulation(CASENAME spe1
FILENAME SPE1CASE2_ACTNUM
SIMULATOR ${sim}
ABS_TOL ${abs_tol_restart}
REL_TOL ${rel_tol_restart})
add_test_compare_restarted_simulation(CASENAME spe9
FILENAME SPE9_CP_SHORT
SIMULATOR ${sim}
ABS_TOL ${abs_tol_restart}
REL_TOL ${rel_tol_restart})
2017-03-03 07:34:00 -06:00
endforeach()
# Init tests
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-init-regressionTest.sh "")
2017-11-10 05:33:24 -06:00
foreach(sim flow flow_legacy)
add_test_compareECLFiles(CASENAME norne
FILENAME NORNE_ATW2013
SIMULATOR ${sim}
ABS_TOL ${abs_tol}
REL_TOL ${rel_tol}
PREFIX compareECLInitFiles
DIR_PREFIX /init)
2017-03-03 07:34:11 -06:00
endforeach()
# Parallel tests
if(MPI_FOUND)
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-restart-regressionTest.sh "")
add_test_compare_parallel_restarted_simulation(CASENAME spe1
FILENAME SPE1CASE2_ACTNUM
SIMULATOR flow
ABS_TOL ${abs_tol_restart}
REL_TOL ${rel_tol_restart})
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-parallel-regressionTest.sh "")
# Different tolerances for these tests
set(abs_tol_parallel 0.02)
set(rel_tol_parallel 1e-5)
set(coarse_rel_tol_parallel 1e-2)
2017-11-10 05:33:24 -06:00
add_test_compare_parallel_simulation(CASENAME spe1
FILENAME SPE1CASE2
SIMULATOR flow
ABS_TOL ${abs_tol_parallel}
REL_TOL ${rel_tol_parallel})
2017-11-10 07:33:05 -06:00
2017-11-10 05:33:24 -06:00
add_test_compare_parallel_simulation(CASENAME spe9
FILENAME SPE9_CP_SHORT
SIMULATOR flow
ABS_TOL ${abs_tol_parallel}
REL_TOL ${rel_tol_parallel})
2017-11-10 07:33:05 -06:00
add_test_compare_parallel_simulation(CASENAME spe3
FILENAME SPE3CASE1
2017-11-10 07:33:05 -06:00
SIMULATOR flow
ABS_TOL ${abs_tol_parallel}
REL_TOL ${coarse_rel_tol_parallel})
endif()