Merge pull request #1036 from akva2/documentation_ramblings

attempt to add some documentation
This commit is contained in:
Arne Morten Kvarving 2017-01-23 13:19:49 +01:00 committed by GitHub
commit 9ee2af940d

View File

@ -1,6 +1,12 @@
# Set absolute tolerance to be used for testing # This script manages the addition of tests.
set(abs_tol 2e-2) # The tests are orchestrated by a shell script,
set(rel_tol 1e-5) # configured using opm_set_test_driver()
# and then the appropriate helper macro is called to
# register the ctest entry through the opm_add_test macro.
# Information such as the binary to call and test tolerances
# are passed from the build system to the driver script through
# command line parameters. See the opm_add_test() documentation for
# details on the parameters passed to the macro.
# Define some paths # Define some paths
set(BASE_RESULT_PATH ${PROJECT_BINARY_DIR}/tests/results) set(BASE_RESULT_PATH ${PROJECT_BINARY_DIR}/tests/results)
@ -12,10 +18,12 @@ set(BASE_RESULT_PATH ${PROJECT_BINARY_DIR}/tests/results)
# Input: # Input:
# - casename: basename (no extension) # - casename: basename (no extension)
# #
macro (add_test_compareECLFiles casename filename simulator) # Details:
# - This test class compares output from a simulation to reference files.
macro (add_test_compareECLFiles casename filename simulator abs_tol rel_tol prefix)
set(RESULT_PATH ${BASE_RESULT_PATH}/${simulator}+${casename}) set(RESULT_PATH ${BASE_RESULT_PATH}/${simulator}+${casename})
opm_add_test(compareECLFiles_${simulator}+${filename} NO_COMPILE opm_add_test(${prefix}_${simulator}+${filename} NO_COMPILE
EXE_NAME ${simulator} EXE_NAME ${simulator}
DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH} DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH}
${CMAKE_BINARY_DIR}/bin ${CMAKE_BINARY_DIR}/bin
@ -27,16 +35,19 @@ macro (add_test_compareECLFiles casename filename simulator)
endmacro (add_test_compareECLFiles) endmacro (add_test_compareECLFiles)
########################################################################### ###########################################################################
# TEST: compareECLRestartFiles # TEST: add_test_compare_restarted_simulation
########################################################################### ###########################################################################
# Input: # Input:
# - casename: basename (no extension) # - casename: basename (no extension)
# #
macro (add_test_compareECLRestartFiles casename filename simulator) # Details:
# - This test class compares the output from a restarted simulation
# to that of a non-restarted simulation.
macro (add_test_compare_restarted_simulation casename filename simulator abs_tol rel_tol)
set(RESULT_PATH ${BASE_RESULT_PATH}/restart/${simulator}+${casename}) set(RESULT_PATH ${BASE_RESULT_PATH}/restart/${simulator}+${casename})
opm_add_test(compareECLRestartFiles_${simulator}+${filename} NO_COMPILE opm_add_test(compareRestartedSim_${simulator}+${filename} NO_COMPILE
EXE_NAME ${simulator} EXE_NAME ${simulator}
DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH} DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH}
${CMAKE_BINARY_DIR}/bin ${CMAKE_BINARY_DIR}/bin
@ -45,43 +56,23 @@ macro (add_test_compareECLRestartFiles casename filename simulator)
${COMPARE_SUMMARY_COMMAND} ${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND} ${COMPARE_ECL_COMMAND}
TEST_ARGS ${OPM_DATA_ROOT}/${casename}/${filename}) TEST_ARGS ${OPM_DATA_ROOT}/${casename}/${filename})
endmacro (add_test_compareECLRestartFiles) endmacro (add_test_compare_restarted_simulation)
########################################################################### ###########################################################################
# TEST: compareECLInitFiles # TEST: add_test_compare_parallel_simulation
########################################################################### ###########################################################################
# Input: # Input:
# - casename: basename (no extension) # - casename: basename (no extension)
# #
macro (add_test_compareECLInitFiles casename filename simulator) # Details:
# - This test class compares the output from a parallel simulation
set(RESULT_PATH ${BASE_RESULT_PATH}/init/${simulator}+${casename}) # to the output from the serial instance of the same model.
opm_add_test(compareECLInitFiles_${simulator}+${filename} NO_COMPILE macro (add_test_compare_parallel_simulation casename filename simulator abs_tol rel_tol)
EXE_NAME ${simulator}
DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH}
${CMAKE_BINARY_DIR}/bin
${filename}
${abs_tol} ${rel_tol}
${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND}
TEST_ARGS ${OPM_DATA_ROOT}/${casename}/${filename})
endmacro (add_test_compareECLInitFiles)
###########################################################################
# TEST: parallelECLFiles
###########################################################################
# Input:
# - casename: basename (no extension)
#
macro (add_test_parallelECLFiles casename filename simulator)
set(abs_tol 0.20)
set(rel_tol 4e-4)
set(RESULT_PATH ${BASE_RESULT_PATH}/parallel/${simulator}+${casename}) set(RESULT_PATH ${BASE_RESULT_PATH}/parallel/${simulator}+${casename})
# Add test that runs flow_mpi and outputs the results to file # Add test that runs flow_mpi and outputs the results to file
opm_add_test(parallelECLFiles_${simulator}+${filename} NO_COMPILE opm_add_test(compareParallelSim_${simulator}+${filename} NO_COMPILE
EXE_NAME ${simulator} EXE_NAME ${simulator}
DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH} DRIVER_ARGS ${OPM_DATA_ROOT}/${casename} ${RESULT_PATH}
${CMAKE_BINARY_DIR}/bin ${CMAKE_BINARY_DIR}/bin
@ -90,7 +81,7 @@ macro (add_test_parallelECLFiles casename filename simulator)
${COMPARE_SUMMARY_COMMAND} ${COMPARE_SUMMARY_COMMAND}
${COMPARE_ECL_COMMAND} ${COMPARE_ECL_COMMAND}
TEST_ARGS ${OPM_DATA_ROOT}/${casename}/${filename}) TEST_ARGS ${OPM_DATA_ROOT}/${casename}/${filename})
endmacro (add_test_parallelECLFiles) endmacro (add_test_compare_parallel_simulation)
if(NOT TARGET test-suite) if(NOT TARGET test-suite)
add_custom_target(test-suite) add_custom_target(test-suite)
@ -99,27 +90,35 @@ endif()
# Regression tests # Regression tests
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-regressionTest.sh "") opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-regressionTest.sh "")
add_test_compareECLFiles(spe1 SPE1CASE2 flow) # Set absolute tolerance to be used passed to the macros in the following tests
add_test_compareECLFiles(spe1 SPE1CASE1 flow_sequential) set(abs_tol 2e-2)
add_test_compareECLFiles(spe3 SPE3CASE1 flow) set(rel_tol 1e-5)
add_test_compareECLFiles(spe9 SPE9_CP_SHORT flow)
add_test_compareECLFiles(spe1 SPE1CASE2 flow ${abs_tol} ${rel_tol} compareECLFiles)
add_test_compareECLFiles(spe1 SPE1CASE1 flow_sequential ${abs_tol} ${rel_tol} compareECLFiles)
add_test_compareECLFiles(spe3 SPE3CASE1 flow ${abs_tol} ${rel_tol} compareECLFiles)
add_test_compareECLFiles(spe9 SPE9_CP_SHORT flow ${abs_tol} ${rel_tol} compareECLFiles)
# Restart tests # Restart tests
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-restart-regressionTest.sh "") opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-restart-regressionTest.sh "")
add_test_compareECLRestartFiles(spe1 SPE1CASE2_ACTNUM flow) add_test_compare_restarted_simulation(spe1 SPE1CASE2_ACTNUM flow ${abs_tol} ${rel_tol})
add_test_compareECLRestartFiles(spe9 SPE9_CP_SHORT flow) add_test_compare_restarted_simulation(spe9 SPE9_CP_SHORT flow ${abs_tol} ${rel_tol})
# Init tests # Init tests
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-init-regressionTest.sh "") opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-init-regressionTest.sh "")
add_test_compareECLInitFiles(norne NORNE_ATW2013 flow) add_test_compareECLFiles(norne NORNE_ATW2013 flow ${abs_tol} ${rel_tol} compareECLInitFiles)
# Parallel tests # Parallel tests
if(MPI_FOUND) if(MPI_FOUND)
opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-parallel-regressionTest.sh "") opm_set_test_driver(${PROJECT_SOURCE_DIR}/tests/run-parallel-regressionTest.sh "")
add_test_parallelECLFiles(spe1 SPE1CASE2 flow_mpi) # Different tolerances for these tests
add_test_parallelECLFiles(spe3 SPE3CASE1 flow_mpi) set(abs_tol 0.20)
add_test_parallelECLFiles(spe9 SPE9_CP_SHORT flow_mpi) set(rel_tol 4e-4)
add_test_compare_parallel_simulation(spe1 SPE1CASE2 flow_mpi ${abs_tol} ${rel_tol})
add_test_compare_parallel_simulation(spe3 SPE3CASE1 flow_mpi ${abs_tol} ${rel_tol})
add_test_compare_parallel_simulation(spe9 SPE9_CP_SHORT flow_mpi ${abs_tol} ${rel_tol})
endif() endif()