From f3c0faf2853fca8fec40b810cc988a2bcc4a8465 Mon Sep 17 00:00:00 2001 From: Milosz Linkiewicz Date: Wed, 12 Feb 2025 08:09:07 +0100 Subject: [PATCH] Fixed: Added more verbose output to terminal Fixed: Set an timeout to 12h as tests can be lengthy Refactor: Moved validation tests run to script file Signed-off-by: Milosz Linkiewicz --- .github/scripts/run_validation_tests.sh | 99 ++++++++++++++++++ .github/workflows/gtest-bare-metal.yml | 8 +- .github/workflows/validation-tests.yml | 128 +++++++++--------------- .gitignore | 3 +- 4 files changed, 155 insertions(+), 83 deletions(-) create mode 100755 .github/scripts/run_validation_tests.sh diff --git a/.github/scripts/run_validation_tests.sh b/.github/scripts/run_validation_tests.sh new file mode 100755 index 000000000..575373fa6 --- /dev/null +++ b/.github/scripts/run_validation_tests.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +set +e + +# Function to log messages to GitHub Actions +log_to_github() { + echo "$1" >>"$GITHUB_STEP_SUMMARY" +} + +# Function to run a test and handle retries +run_test() { + local test=$1 + local retries=$2 + local test_port_p=$3 + local test_port_r=$4 + local start_time + local end_time + local duration + + echo "::group::${test}" + start_time=$(date '+%s') + sudo --preserve-env python3 -m pipenv run pytest "${test}" --media=/mnt/media --build="../.." --nic="${test_port_p},${test_port_r}" --collect-only -q --no-summary + + for retry in $(seq 1 "$retries"); do + echo "sudo --preserve-env python3 -m pipenv run pytest \"${test}\" --media=/mnt/media --build=\"../..\" --nic=\"${test_port_p},${test_port_r}\"" + sudo --preserve-env python3 -m pipenv run pytest "${test}" --media=/mnt/media --build="../.." --nic="${test_port_p},${test_port_r}" + local result=$? + echo "RETRY: ${retry}" + [[ "$result" == "0" ]] && break + done + + end_time=$(date '+%s') + duration=$((end_time - start_time)) + local status="❌" + local suffix="[Err]" + + if [[ "$result" == "0" ]]; then + status="✅" + suffix="[OK]" + TESTS_SUCCESS+=("${test}") + else + TESTS_FAIL+=("${test}") + fi + + log_to_github "| ${status} | ${test} | $(date --date="@${start_time}" '+%d%m%y_%H%M%S') | $(date --date="@${end_time}" '+%d%m%y_%H%M%S') | ${duration}s | ${suffix} |" + echo "::endgroup::" +} + +# Main script execution +echo "::group::pre-execution-summary" + +# Export environment variables +export TEST_PORT_P="${TEST_PORT_P}" +export TEST_PORT_R="${TEST_PORT_R}" + +SUMMARY_MAIN_HEADER="Starting " +# Collect tests to be executed +if [[ -n "${VALIDATION_TESTS_1}" ]]; then + SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER} tests/${VALIDATION_TESTS_1}" + python3 -m pipenv run pytest "tests/${VALIDATION_TESTS_1}" --media=/mnt/media --build="../.." --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary >tests.log 2>&1 +fi + +if [[ -n "${VALIDATION_TESTS_2}" ]]; then + SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER}, tests/${VALIDATION_TESTS_2}" + python3 -m pipenv run pytest "tests/${VALIDATION_TESTS_2}" --media=/mnt/media --build="../.." --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary >>tests.log 2>&1 +fi + +mapfile -t TESTS_INCLUDED_IN_EXECUTION < <(grep -v "collected in" tests.log) +NUMBER_OF_TESTS="${#TESTS_INCLUDED_IN_EXECUTION[@]}" +TESTS_FAIL=() +TESTS_SUCCESS=() + +echo "${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" +echo "----------------------------------" +echo "Tests to be executed:" +echo "${TESTS_INCLUDED_IN_EXECUTION[@]}" + +log_to_github "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" +log_to_github "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" +log_to_github "| --- | --- | --- | --- | --- | --- |" +echo "::endgroup::" + +# Execute each test +for test in "${TESTS_INCLUDED_IN_EXECUTION[@]}"; do + run_test "$test" "$PYTEST_RETRIES" "$TEST_PORT_P" "$TEST_PORT_R" +done + +# Summary of test results +log_to_github "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" +log_to_github "${TESTS_SUCCESS[@]}" +log_to_github "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" +log_to_github "${TESTS_FAIL[@]}" + +# Determine exit status +if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${VALIDATION_NO_FAIL_TESTS}" == "true" ]]; then + exit 0 +else + exit 1 +fi diff --git a/.github/workflows/gtest-bare-metal.yml b/.github/workflows/gtest-bare-metal.yml index 6541c4c9d..db90f105c 100644 --- a/.github/workflows/gtest-bare-metal.yml +++ b/.github/workflows/gtest-bare-metal.yml @@ -27,6 +27,7 @@ env: # Customize the env if BUILD_TYPE: 'Release' DPDK_VERSION: '23.11' + DPDK_REBUILD: 'false' # Bellow ENV variables are required to be defined on runner side: # TEST_PF_PORT_P: '0000:49:00.0' # TEST_PF_PORT_R: '0000:49:00.1' @@ -71,6 +72,7 @@ jobs: ref: '${{ inputs.branch-to-checkout || github.head_ref || github.ref }}' - name: Checkout DPDK + if: env.DPDK_REBUILD == 'true' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: repository: 'DPDK/dpdk' @@ -83,10 +85,12 @@ jobs: sudo apt-get install -y systemtap-sdt-dev - name: Apply dpdk patches + if: env.DPDK_REBUILD == 'true' run: | patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch) - name: Build dpdk + if: env.DPDK_REBUILD == 'true' run: | cd dpdk meson build @@ -119,8 +123,8 @@ jobs: - name: Binding network adapter run: | sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true + sudo dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true + sudo dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true - name: Start MtlManager at background run: | diff --git a/.github/workflows/validation-tests.yml b/.github/workflows/validation-tests.yml index 3234e0300..cabd0390b 100644 --- a/.github/workflows/validation-tests.yml +++ b/.github/workflows/validation-tests.yml @@ -27,12 +27,10 @@ on: - TEST_VF_PORT_P_0 - TEST_VF_PORT_P_1 - TEST_VF_PORT_P_2 - - TEST_VF_PORT_P_3 - TEST_PF_PORT_P - TEST_VF_PORT_R_0 - TEST_VF_PORT_R_1 - TEST_VF_PORT_R_2 - - TEST_VF_PORT_R_3 - TEST_PF_PORT_R - TEST_DMA_PORT_P - TEST_DMA_PORT_R @@ -44,12 +42,10 @@ on: - TEST_VF_PORT_P_1 - TEST_VF_PORT_P_0 - TEST_VF_PORT_P_2 - - TEST_VF_PORT_P_3 - TEST_PF_PORT_P - TEST_VF_PORT_R_0 - TEST_VF_PORT_R_1 - TEST_VF_PORT_R_2 - - TEST_VF_PORT_R_3 - TEST_PF_PORT_R - TEST_DMA_PORT_P - TEST_DMA_PORT_R @@ -111,6 +107,7 @@ on: env: BUILD_TYPE: 'Release' DPDK_VERSION: '23.11' + DPDK_REBUILD: 'false' permissions: contents: read @@ -130,8 +127,8 @@ jobs: - name: 'preparation: Restore valid repository owner and print env' if: always() run: | - sudo chown -R "${USER}" "$(pwd)" - env | grep TEST_ + sudo chown -R "${USER}" "$(pwd)" || true + env | grep TEST_ || true - name: 'preparation: Checkout MTL' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -140,6 +137,7 @@ jobs: - name: 'preparation: Checkout DPDK' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + if: env.DPDK_REBUILD == 'true' with: repository: 'DPDK/dpdk' ref: 'v${{ env.DPDK_VERSION }}' @@ -163,14 +161,18 @@ jobs: libsdl2-dev \ libsdl2-ttf-dev \ libssl-dev \ - systemtap-sdt-dev + systemtap-sdt-dev \ + libbpf-dev \ + libelf1 - name: 'configuration: Apply dpdk patches' + if: env.DPDK_REBUILD == 'true' run: | patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch) - name: 'installation: Build dpdk' working-directory: dpdk + if: env.DPDK_REBUILD == 'true' run: | meson build ninja -C build @@ -189,9 +191,11 @@ jobs: python3 -m pipenv install -r requirements.txt echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> "$GITHUB_ENV" + # Timeout of this job is set to 12h [60m/h*12h=720m] validation-run-tests: needs: [validation-build-mtl] runs-on: [Linux, self-hosted, DPDK] + timeout-minutes: 720 env: PYTEST_ALIAS: 'sudo --preserve-env python3 -m pipenv run pytest' PYTEST_PARAMS: '--media=/mnt/media --build="../.."' @@ -229,64 +233,22 @@ jobs: sudo rmmod irdma || true sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_P}" || true sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_R}" || true - sudo modprobe irdma || true - name: 'preparation: Start MtlManager at background' run: | sudo MtlManager & - - name: 'execution: Run validation-tests in pipenv environment' + - name: 'execution: Run validation-bare-metal tests in pipenv environment' working-directory: tests/validation run: | - set +e - export TEST_PORT_P="${{ env.TEST_PORT_P }}" - export TEST_PORT_R="${{ env.TEST_PORT_R }}" - TESTS_INCLUDED_IN_EXECUTION=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "tests/${{ inputs.validation-tests-1 }}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) ) - SUMMARY_MAIN_HEADER="Starting tests/${{ inputs.validation-tests-1 }}" - if [[ -n "${{ inputs.validation-tests-2 }}" ]]; then - TESTS_INCLUDED_IN_EXECUTION+=( $(grep -v "collected in" <(${{ env.PYTEST_ALIAS }} "tests/${{ inputs.validation-tests-2 }}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) ) - SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER}, tests/${{ inputs.validation-tests-2 }}" - fi - - NUMBER_OF_TESTS="${#TESTS_INCLUDED_IN_EXECUTION[@]}" - TESTS_FAIL=() - TESTS_SUCCESS=() - echo "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" >> "$GITHUB_STEP_SUMMARY" - echo "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" >> "$GITHUB_STEP_SUMMARY" - echo "| --- | ---| --- | --- | --- | --- |" >> "$GITHUB_STEP_SUMMARY" - - for test in ${TESTS_INCLUDED_IN_EXECUTION[@]}; do - echo "::group::${test}" - PYTEST_START_HI="$(date '+%s')" - ${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary - for retry in $(seq 1 "${{ env.PYTEST_RETRIES }}"); do - ${{ env.PYTEST_ALIAS }} "${test}" ${{ env.PYTEST_PARAMS }} --nic="${TEST_PORT_P},${TEST_PORT_R}" - PYTEST_RESULT="$?" - echo "RETRY: ${retry}" - [[ "${PYTEST_RESULT}" == "0" ]] && break - done - PYTEST_END_HI="$(date '+%s')" - if [[ "${PYTEST_RESULT}" == "0" ]]; then - PREFIX="✅" - SUFFIX="[OK]" - TESTS_SUCCESS+=( "${test}" ) - else - PREFIX="❌" - SUFFIX="[Err]" - TESTS_FAIL+=( "${test}" ) - fi - echo "| ${PREFIX} | ${test} | $(date --date=@${PYTEST_START_HI} '+%d%m%y_%H%M%S') | $(date --date=@${PYTEST_END_HI} '+%d%m%y_%H%M%S') | $((PYTEST_END_HI-PYTEST_START_HI))s | ${SUFFIX} |" >> "$GITHUB_STEP_SUMMARY" - echo "::endgroup::" - done - echo "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" >> "$GITHUB_STEP_SUMMARY" - echo "${TESTS_SUCCESS[@]}" >> "$GITHUB_STEP_SUMMARY" - echo "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" >> "$GITHUB_STEP_SUMMARY" - echo "${TESTS_FAIL[@]}" >> "$GITHUB_STEP_SUMMARY" - if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${{ inputs.validation-no-fail-tests }}" == "true" ]]; then - exit 0 - else - exit 1 - fi + . "${{ github.workspace }}/.github/scripts/run_validation_tests.sh" + env: + TEST_PORT_P: ${{ env.TEST_PORT_P }} + TEST_PORT_R: ${{ env.TEST_PORT_R }} + PYTEST_RETRIES: '3' + VALIDATION_TESTS_1: ${{ inputs.validation-tests-1 }} + VALIDATION_TESTS_2: ${{ inputs.validation-tests-2 }} + VALIDATION_NO_FAIL_TESTS: ${{ inputs.validation-no-fail-tests }} - name: 'execution: Run validation-pre-release-1 in pipenv environment' if: inputs.validation-pre-release-1 != 'NONE' @@ -313,11 +275,16 @@ jobs: sudo killall -SIGINT pytest || true sudo killall -SIGINT MtlManager || true + - name: 'cleanup: Create archive with logs from execution' + if: always() + working-directory: tests/validation + run: | + sudo tar -czf "validation-execution-logs.tar.gz" "./logs" + sudo rm -rf "./logs" + - name: 'cleanup: Restore valid owner to repository and directories' if: always() run: | - sudo tar -czf "${{ github.workspace }}/tests/validation/validation-execution-logs.tar.gz" "${{ github.workspace }}/tests/validation/logs" - sudo rm -rf "${{ github.workspace }}/tests/validation/logs" sudo chown -R "${USER}" "$(pwd)" - name: 'cleanup: Bind pf to kernel driver' @@ -326,33 +293,34 @@ jobs: sudo rmmod irdma || true sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_P}" || true sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_R}" || true - sudo modprobe irdma || true - name: 'cleanup: Validation execution logs' if: always() uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: - name: 'validation-execution-logs' + name: 'validation-execution-logs.tar.gz' path: '${{ github.workspace }}/tests/validation/validation-execution-logs.tar.gz' - name: 'cleanup: Generate runner summary' if: always() run: | - echo "## Runner ${{ runner.name }}" >> "$GITHUB_STEP_SUMMARY" - echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" >> "$GITHUB_STEP_SUMMARY" - echo "| Variable | Value |" >> "$GITHUB_STEP_SUMMARY" - echo "| --- | --- |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_PF_PORT_P | ${TEST_PF_PORT_P} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_PORT_P | ${TEST_PORT_P} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_PORT_R | ${TEST_PORT_R} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" >> "$GITHUB_STEP_SUMMARY" - echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" >> "$GITHUB_STEP_SUMMARY" + { + echo "## Runner ${{ runner.name }}" + echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" + echo "| Variable | Value |" + echo "| --- | --- |" + echo "| TEST_PF_PORT_P | ${TEST_PF_PORT_P} |" + echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" + echo "| TEST_PORT_P | ${TEST_PORT_P} |" + echo "| TEST_PORT_R | ${TEST_PORT_R} |" + echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" + echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" + echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" + echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" + echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" + echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" + echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" + echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" + echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" + echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.gitignore b/.gitignore index e2a5179d4..00e06991a 100644 --- a/.gitignore +++ b/.gitignore @@ -88,4 +88,5 @@ doc/_build # Gpu direct files gpu_direct/tests/fff.h gpu_direct/subprojects/* -!gpu_direct/subprojects/gtest.wrap \ No newline at end of file +!gpu_direct/subprojects/gtest.wrap +_temp*