chore(*): tag 'slow' tests as 'expensive', and exclude 'expensive' tests when testing under valgrind
Signed-off-by: Leonardo de Moura <leonardo@microsoft.com>
This commit is contained in:
parent
162fa25250
commit
0f894f4618
3 changed files with 22 additions and 1 deletions
|
@ -271,7 +271,7 @@ after_script:
|
|||
CONFIG_FILE=DartConfiguration.tcl;
|
||||
sed -i "s,^MemoryCheckSuppressionFile:\W*$,MemoryCheckSuppressionFile:$MEMCHECK_SUPP," $CONFIG_FILE;
|
||||
ulimit -s unlimited;
|
||||
yes "C" | ctest -D ExperimentalMemCheck -VV -I $MEMCHECK_RANGE | ../script/demangle_cpptype.py;
|
||||
yes "C" | ctest -D ExperimentalMemCheck -LE expensive -VV -I $MEMCHECK_RANGE | ../script/demangle_cpptype.py;
|
||||
fi
|
||||
- if [[ $REPO == BLESSED && $PUSH_TO_CDASH == TRUE ]]; then
|
||||
GIT_COMMIT=`git log --oneline -n 1 | cut -d ' ' -f 1`;
|
||||
|
|
|
@ -76,6 +76,16 @@ FOREACH(T ${LEANLUATESTS})
|
|||
COMMAND "./test_single.sh" "${CMAKE_CURRENT_BINARY_DIR}/lean -t" ${T_NAME})
|
||||
ENDFOREACH(T)
|
||||
|
||||
# LEAN LUA SLOW TESTS
|
||||
file(GLOB LEANLUASLOWTESTS "${LEAN_SOURCE_DIR}/../tests/lua/slow/*.lua")
|
||||
FOREACH(T ${LEANLUASLOWTESTS})
|
||||
GET_FILENAME_COMPONENT(T_NAME ${T} NAME)
|
||||
add_test(NAME "leanluaslowtest_${T_NAME}"
|
||||
WORKING_DIRECTORY "${LEAN_SOURCE_DIR}/../tests/lua/slow"
|
||||
COMMAND "../test_single.sh" "${CMAKE_CURRENT_BINARY_DIR}/lean -t" ${T_NAME})
|
||||
set_tests_properties("leanluaslowtest_${T_NAME}" PROPERTIES LABELS "expensive")
|
||||
ENDFOREACH(T)
|
||||
|
||||
# # LEAN DOCS
|
||||
# file(GLOB LEANDOCS "${LEAN_SOURCE_DIR}/../doc/lean/*.md")
|
||||
# FOREACH(T ${LEANDOCS})
|
||||
|
|
11
tests/lua/slow/extra.lua
Normal file
11
tests/lua/slow/extra.lua
Normal file
|
@ -0,0 +1,11 @@
|
|||
-- Execute f, and make sure is throws an error
|
||||
function check_error(f)
|
||||
ok, msg = pcall(function ()
|
||||
f()
|
||||
end)
|
||||
if ok then
|
||||
error("unexpected success...")
|
||||
else
|
||||
print("caught expected error: ", msg)
|
||||
end
|
||||
end
|
Loading…
Reference in a new issue