From 2c836c25937fe5457aff49ada3c43a8c4aaabf77 Mon Sep 17 00:00:00 2001 From: Khalid Shakir Date: Mon, 9 Sep 2019 15:36:21 -0400 Subject: [PATCH] Added heartbeats to swr and refactor kills --- project/Testing.scala | 14 ++++++++- src/ci/bin/test.inc.sh | 18 ++++++----- src/ci/bin/testCentaurTes.sh | 42 +++++++++++++++++--------- src/ci/bin/testSingleWorkflowRunner.sh | 2 ++ src/ci/bin/test_bcs.inc.sh | 2 +- 5 files changed, 53 insertions(+), 25 deletions(-) diff --git a/project/Testing.scala b/project/Testing.scala index c9b32bb2c30..bf7d4c681c0 100644 --- a/project/Testing.scala +++ b/project/Testing.scala @@ -38,7 +38,19 @@ object Testing { private val excludeTestArgs = excludeTestTags.map(Tests.Argument(TestFrameworks.ScalaTest, "-l", _)) private val TestReportArgs = - Tests.Argument(TestFrameworks.ScalaTest, "-oDSI", "-h", "target/test-reports", "-u", "target/test-reports", "-F", spanScaleFactor) + Tests.Argument( + TestFrameworks.ScalaTest, + "-oDSI", + "-h", + "target/test-reports", + "-u", + "target/test-reports", + "-F", + spanScaleFactor, + "-W", + "300", + "300", + ) val testSettings = List( libraryDependencies ++= testDependencies.map(_ % Test), diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index aabcff7bacf..39a645bbaf2 100644 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -636,7 +636,7 @@ cromwell::private::install_wait_for_it() { cromwell::private::start_docker() { local docker_image local docker_cid_file - docker_image="${1:?foo called without a docker image}"; shift + docker_image="${1:?start_docker called without a docker image}"; shift docker_cid_file="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/$(echo "${docker_image}" | tr "/" "_" | tr ":" "-").cid.$$" docker run --cidfile="${docker_cid_file}" --detach "$@" "${docker_image}" @@ -946,6 +946,7 @@ cromwell::private::start_build_heartbeat() { printf "${CROMWELL_BUILD_HEARTBEAT_PATTERN}" done & CROMWELL_BUILD_HEARTBEAT_PID=$! + cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat } cromwell::private::start_cromwell_log_tail() { @@ -953,6 +954,7 @@ cromwell::private::start_cromwell_log_tail() { sleep 2 done && tail -n 0 -f "${CROMWELL_BUILD_CROMWELL_LOG}" 2> /dev/null & CROMWELL_BUILD_CROMWELL_LOG_TAIL_PID=$! + cromwell::private::add_exit_function cromwell::private::kill_cromwell_log_tail } cromwell::private::start_centaur_log_tail() { @@ -960,6 +962,7 @@ cromwell::private::start_centaur_log_tail() { sleep 2 done && tail -n 0 -f "${CROMWELL_BUILD_CENTAUR_LOG}" 2> /dev/null & CROMWELL_BUILD_CENTAUR_LOG_TAIL_PID=$! + cromwell::private::add_exit_function cromwell::private::kill_centaur_log_tail } cromwell::private::cat_centaur_log() { @@ -1037,7 +1040,6 @@ cromwell::private::kill_tree() { kill "${pid}" 2> /dev/null } - cromwell::private::start_conformance_cromwell() { # Start the Cromwell server in the directory containing input files so it can access them via their relative path pushd "${CROMWELL_BUILD_CWL_TEST_RESOURCES}" > /dev/null @@ -1055,6 +1057,8 @@ cromwell::private::start_conformance_cromwell() { CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID=$! popd > /dev/null + + cromwell::private::add_exit_function cromwell::private::kill_conformance_cromwell } cromwell::private::kill_conformance_cromwell() { @@ -1131,9 +1135,6 @@ cromwell::build::setup_centaur_environment() { if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]]; then cromwell::private::add_exit_function cromwell::private::cat_centaur_log fi - cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat - cromwell::private::add_exit_function cromwell::private::kill_cromwell_log_tail - cromwell::private::add_exit_function cromwell::private::kill_centaur_log_tail } cromwell::build::setup_conformance_environment() { @@ -1146,12 +1147,10 @@ cromwell::build::setup_conformance_environment() { cromwell::private::write_cwl_test_inputs cromwell::private::start_build_heartbeat cromwell::private::add_exit_function cromwell::private::cat_conformance_log - cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat } cromwell::build::setup_docker_environment() { cromwell::private::start_build_heartbeat - cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat if [[ "${CROMWELL_BUILD_PROVIDER}" == "${CROMWELL_BUILD_PROVIDER_TRAVIS}" ]]; then # Upgrade docker-compose so that we get the correct exit codes @@ -1200,7 +1199,6 @@ cromwell::build::run_centaur() { cromwell::build::run_conformance() { cromwell::private::start_conformance_cromwell - cromwell::private::add_exit_function cromwell::private::kill_conformance_cromwell # Give cromwell time to start up sleep 30 @@ -1277,6 +1275,10 @@ cromwell::build::pip_install() { cromwell::private::pip_install "$@" } +cromwell::build::start_build_heartbeat() { + cromwell::private::start_build_heartbeat +} + cromwell::build::add_exit_function() { cromwell::private::add_exit_function "$1" } diff --git a/src/ci/bin/testCentaurTes.sh b/src/ci/bin/testCentaurTes.sh index 76f9ef3ee82..4f7ffade4cb 100755 --- a/src/ci/bin/testCentaurTes.sh +++ b/src/ci/bin/testCentaurTes.sh @@ -12,16 +12,33 @@ cromwell::build::setup_centaur_environment cromwell::build::assemble_jars -FUNNEL_PATH="${CROMWELL_BUILD_ROOT_DIRECTORY}/funnel" -FUNNEL_CONF="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/funnel.conf" +startup_funnel() { + local funnel_path + local funnel_conf + local funnel_tar_gz -# Increase max open files to the maximum allowed. Attempt to help on macos due to the default soft ulimit -n -S 256. -ulimit -n "$(ulimit -n -H)" -if [[ ! -f "${FUNNEL_PATH}" ]]; then - FUNNEL_TAR_GZ="funnel-${CROMWELL_BUILD_OS}-amd64-0.5.0.tar.gz" - curl "https://github.com/ohsu-comp-bio/funnel/releases/download/0.5.0/${FUNNEL_TAR_GZ}" -o "${FUNNEL_TAR_GZ}" -L - tar xzf "${FUNNEL_TAR_GZ}" -fi + funnel_path="${CROMWELL_BUILD_ROOT_DIRECTORY}/funnel" + funnel_conf="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/funnel.conf" + + # Increase max open files to the maximum allowed. Attempt to help on macos due to the default soft ulimit -n -S 256. + ulimit -n "$(ulimit -n -H)" + if [[ ! -f "${funnel_path}" ]]; then + funnel_tar_gz="funnel-${CROMWELL_BUILD_OS}-amd64-0.5.0.tar.gz" + curl \ + --location \ + --output "${funnel_tar_gz}" \ + "https://github.com/ohsu-comp-bio/funnel/releases/download/0.5.0/${funnel_tar_gz}" + tar xzf "${funnel_tar_gz}" + fi + + mkdir -p logs + nohup "${funnel_path}" server run --config "${funnel_conf}" &> logs/funnel.log & + + FUNNEL_PID=$! + export FUNNEL_PID + + cromwell::build::add_exit_function shutdown_funnel +} shutdown_funnel() { if [[ -n "${FUNNEL_PID+set}" ]]; then @@ -29,12 +46,7 @@ shutdown_funnel() { fi } -cromwell::build::add_exit_function shutdown_funnel - -mkdir -p logs -nohup "${FUNNEL_PATH}" server run --config "${FUNNEL_CONF}" &> logs/funnel.log & - -FUNNEL_PID=$! +startup_funnel # The following tests are skipped: # diff --git a/src/ci/bin/testSingleWorkflowRunner.sh b/src/ci/bin/testSingleWorkflowRunner.sh index c9a03c3fa67..aeb507ecf14 100755 --- a/src/ci/bin/testSingleWorkflowRunner.sh +++ b/src/ci/bin/testSingleWorkflowRunner.sh @@ -7,6 +7,8 @@ source "${BASH_SOURCE%/*}/test.inc.sh" || source test.inc.sh cromwell::build::setup_common_environment +cromwell::build::start_build_heartbeat + cromwell::build::assemble_jars java -jar $CROMWELL_BUILD_CROMWELL_JAR run ./centaur/src/main/resources/standardTestCases/hello/hello.wdl --inputs ./centaur/src/main/resources/standardTestCases/hello/hello.inputs --metadata-output ./run_mode_metadata.json | tee console_output.txt diff --git a/src/ci/bin/test_bcs.inc.sh b/src/ci/bin/test_bcs.inc.sh index 3c9ad36559b..a415d0a49d2 100644 --- a/src/ci/bin/test_bcs.inc.sh +++ b/src/ci/bin/test_bcs.inc.sh @@ -88,6 +88,7 @@ cromwell::private::bcs::bcs_config() { cromwell::private::bcs::bcs_create_cluster() { cromwell::build::exec_retry_function cromwell::private::bcs::try_bcs_create_cluster + cromwell::build::add_exit_function cromwell::private::bcs::bcs_delete_cluster } cromwell::private::bcs::bcs_delete_cluster() { @@ -125,6 +126,5 @@ cromwell::build::bcs::setup_bcs_environment() { cromwell::private::bcs::bcs_delete_old_resources # Create the BCS cluster before sbt assembly as cluster creation takes a few minutes - cromwell::build::add_exit_function cromwell::private::bcs::bcs_delete_cluster cromwell::private::bcs::bcs_create_cluster }