diff --git a/.github/scripts/run_validation_tests.sh b/.github/scripts/run_validation_tests.sh new file mode 100755 index 000000000..575373fa6 --- /dev/null +++ b/.github/scripts/run_validation_tests.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +set +e + +# Function to log messages to GitHub Actions +log_to_github() { + echo "$1" >>"$GITHUB_STEP_SUMMARY" +} + +# Function to run a test and handle retries +run_test() { + local test=$1 + local retries=$2 + local test_port_p=$3 + local test_port_r=$4 + local start_time + local end_time + local duration + + echo "::group::${test}" + start_time=$(date '+%s') + sudo --preserve-env python3 -m pipenv run pytest "${test}" --media=/mnt/media --build="../.." --nic="${test_port_p},${test_port_r}" --collect-only -q --no-summary + + for retry in $(seq 1 "$retries"); do + echo "sudo --preserve-env python3 -m pipenv run pytest \"${test}\" --media=/mnt/media --build=\"../..\" --nic=\"${test_port_p},${test_port_r}\"" + sudo --preserve-env python3 -m pipenv run pytest "${test}" --media=/mnt/media --build="../.." --nic="${test_port_p},${test_port_r}" + local result=$? + echo "RETRY: ${retry}" + [[ "$result" == "0" ]] && break + done + + end_time=$(date '+%s') + duration=$((end_time - start_time)) + local status="❌" + local suffix="[Err]" + + if [[ "$result" == "0" ]]; then + status="✅" + suffix="[OK]" + TESTS_SUCCESS+=("${test}") + else + TESTS_FAIL+=("${test}") + fi + + log_to_github "| ${status} | ${test} | $(date --date="@${start_time}" '+%d%m%y_%H%M%S') | $(date --date="@${end_time}" '+%d%m%y_%H%M%S') | ${duration}s | ${suffix} |" + echo "::endgroup::" +} + +# Main script execution +echo "::group::pre-execution-summary" + +# Export environment variables +export TEST_PORT_P="${TEST_PORT_P}" +export TEST_PORT_R="${TEST_PORT_R}" + +SUMMARY_MAIN_HEADER="Starting " +# Collect tests to be executed +if [[ -n "${VALIDATION_TESTS_1}" ]]; then + SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER} tests/${VALIDATION_TESTS_1}" + python3 -m pipenv run pytest "tests/${VALIDATION_TESTS_1}" --media=/mnt/media --build="../.." --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary >tests.log 2>&1 +fi + +if [[ -n "${VALIDATION_TESTS_2}" ]]; then + SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER}, tests/${VALIDATION_TESTS_2}" + python3 -m pipenv run pytest "tests/${VALIDATION_TESTS_2}" --media=/mnt/media --build="../.." --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary >>tests.log 2>&1 +fi + +mapfile -t TESTS_INCLUDED_IN_EXECUTION < <(grep -v "collected in" tests.log) +NUMBER_OF_TESTS="${#TESTS_INCLUDED_IN_EXECUTION[@]}" +TESTS_FAIL=() +TESTS_SUCCESS=() + +echo "${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" +echo "----------------------------------" +echo "Tests to be executed:" +echo "${TESTS_INCLUDED_IN_EXECUTION[@]}" + +log_to_github "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" +log_to_github "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" +log_to_github "| --- | --- | --- | --- | --- | --- |" +echo "::endgroup::" + +# Execute each test +for test in "${TESTS_INCLUDED_IN_EXECUTION[@]}"; do + run_test "$test" "$PYTEST_RETRIES" "$TEST_PORT_P" "$TEST_PORT_R" +done + +# Summary of test results +log_to_github "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" +log_to_github "${TESTS_SUCCESS[@]}" +log_to_github "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" +log_to_github "${TESTS_FAIL[@]}" + +# Determine exit status +if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${VALIDATION_NO_FAIL_TESTS}" == "true" ]]; then + exit 0 +else + exit 1 +fi diff --git a/.github/workflows/gtest-bare-metal.yml b/.github/workflows/gtest-bare-metal.yml index 6541c4c9d..db90f105c 100644 --- a/.github/workflows/gtest-bare-metal.yml +++ b/.github/workflows/gtest-bare-metal.yml @@ -27,6 +27,7 @@ env: # Customize the env if BUILD_TYPE: 'Release' DPDK_VERSION: '23.11' + DPDK_REBUILD: 'false' # Bellow ENV variables are required to be defined on runner side: # TEST_PF_PORT_P: '0000:49:00.0' # TEST_PF_PORT_R: '0000:49:00.1' @@ -71,6 +72,7 @@ jobs: ref: '${{ inputs.branch-to-checkout || github.head_ref || github.ref }}' - name: Checkout DPDK + if: env.DPDK_REBUILD == 'true' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: repository: 'DPDK/dpdk' @@ -83,10 +85,12 @@ jobs: sudo apt-get install -y systemtap-sdt-dev - name: Apply dpdk patches + if: env.DPDK_REBUILD == 'true' run: | patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch) - name: Build dpdk + if: env.DPDK_REBUILD == 'true' run: | cd dpdk meson build @@ -119,8 +123,8 @@ jobs: - name: Binding network adapter run: | sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true + sudo dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true + sudo dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true - name: Start MtlManager at background run: | diff --git a/.github/workflows/validation-tests.yml b/.github/workflows/validation-tests.yml index 075745267..cabd0390b 100644 --- a/.github/workflows/validation-tests.yml +++ b/.github/workflows/validation-tests.yml @@ -9,23 +9,105 @@ on: default: 'main' required: false description: 'Branch name to use' - validation-tests: + validation-iface-binding: + type: choice + required: true + description: 'Type of iface binding to use' + options: + - "create_vf" + - "create_kvf" + - "create_tvf" + - "bind_pmd" + - "bind_kernel" + validation-test-port-p: + type: choice + required: true + description: 'Which to use as Test-Port-P' + options: + - TEST_VF_PORT_P_0 + - TEST_VF_PORT_P_1 + - TEST_VF_PORT_P_2 + - TEST_PF_PORT_P + - TEST_VF_PORT_R_0 + - TEST_VF_PORT_R_1 + - TEST_VF_PORT_R_2 + - TEST_PF_PORT_R + - TEST_DMA_PORT_P + - TEST_DMA_PORT_R + validation-test-port-r: + type: choice + required: true + description: 'Which to use as Test-Port-R' + options: + - TEST_VF_PORT_P_1 + - TEST_VF_PORT_P_0 + - TEST_VF_PORT_P_2 + - TEST_PF_PORT_P + - TEST_VF_PORT_R_0 + - TEST_VF_PORT_R_1 + - TEST_VF_PORT_R_2 + - TEST_PF_PORT_R + - TEST_DMA_PORT_P + - TEST_DMA_PORT_R + validation-no-fail-tests: + type: choice + required: false + description: 'Run all tests, non will fail' + options: + - "true" + - "false" + validation-tests-1: type: string - default: 'tests/single/video/pacing' + default: 'single/video/pacing' required: true - description: 'Validation tests to run' + description: '1st validation tests to run' + validation-tests-2: + type: string + default: 'single/ancillary' + required: false + description: '2nd validation tests to run' + validation-pre-release-1: + description: 'Select from pre-release group tests nr-1' + required: false + type: choice + options: + - NONE + - ancillary + - kernel-socket + - rss-mode + - st20p + - st30p + - st41 + - udp + - video + - xdp + validation-pre-release-2: + description: 'Select from pre-release group tests nr-2' + required: false + type: choice + options: + - NONE + - ffmpeg-plugin + - fuzzy-tests + - performance + - ptp + - rx-timing + - vero + - virtio-enable + - wrong-parameter + validation-pre-release-3: + description: 'Select from pre-release group tests nr-3' + required: false + type: choice + options: + - NONE + - gpu-direct + - gpu-enabling env: - # Customize the env if BUILD_TYPE: 'Release' DPDK_VERSION: '23.11' - # Bellow ENV variables are required to be defined on runner side: - # TEST_PF_PORT_P: '0000:49:00.0' - # TEST_PF_PORT_R: '0000:49:00.1' - # TEST_PORT_P: '0000:49:01.2' - # TEST_PORT_R: '0000:49:01.3' - # TEST_DMA_PORT_P: '0000:6a:01.0' - # TEST_DMA_PORT_R: '0000:6f:01.0' + DPDK_REBUILD: 'false' permissions: contents: read @@ -34,78 +116,211 @@ jobs: validation-build-mtl: runs-on: [Linux, self-hosted, DPDK] timeout-minutes: 60 - + outputs: + pipenv-activate: ${{ steps.pipenv-install.outputs.VIRTUAL_ENV }} steps: - - name: Harden Runner + - name: 'preparation: Harden Runner' uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 with: egress-policy: audit - - name: Checkout MTL + - name: 'preparation: Restore valid repository owner and print env' + if: always() + run: | + sudo chown -R "${USER}" "$(pwd)" || true + env | grep TEST_ || true + + - name: 'preparation: Checkout MTL' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: '${{ inputs.branch-to-checkout }}' - - name: Checkout DPDK + - name: 'preparation: Checkout DPDK' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + if: env.DPDK_REBUILD == 'true' with: repository: 'DPDK/dpdk' ref: 'v${{ env.DPDK_VERSION }}' path: 'dpdk' - - name: Install the build dependency + - name: 'configuration: Install the build dependency' run: | - sudo apt-get install -y git gcc meson python3 python3-pyelftools pkg-config libnuma-dev libjson-c-dev libpcap-dev libgtest-dev libsdl2-dev libsdl2-ttf-dev libssl-dev - sudo apt-get install -y systemtap-sdt-dev pipenv + sudo apt update + sudo apt-get remove -y pipenv || true + sudo apt-get install -y \ + git gcc meson tar zip \ + pkg-config \ + python3 \ + python3-pyelftools \ + python3-virtualenv \ + python3-pip \ + libnuma-dev \ + libjson-c-dev \ + libpcap-dev \ + libgtest-dev \ + libsdl2-dev \ + libsdl2-ttf-dev \ + libssl-dev \ + systemtap-sdt-dev \ + libbpf-dev \ + libelf1 - - name: Apply dpdk patches + - name: 'configuration: Apply dpdk patches' + if: env.DPDK_REBUILD == 'true' run: | patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch) - - name: Build dpdk + - name: 'installation: Build dpdk' + working-directory: dpdk + if: env.DPDK_REBUILD == 'true' run: | - cd dpdk meson build ninja -C build - cd build - sudo ninja install + sudo ninja -C build install - - name: Build + - name: 'installation: Build mtl' run: | ./build.sh sudo ldconfig - - name: Prepare pipenv environment + - name: 'installation: Install pipenv environment' working-directory: tests/validation + id: pipenv-install run: | - pipenv install -r requirements.txt + python3 -m pip install pipenv + python3 -m pipenv install -r requirements.txt + echo "VIRTUAL_ENV=$(python3 -m pipenv --venv)/bin/activate" >> "$GITHUB_ENV" + # Timeout of this job is set to 12h [60m/h*12h=720m] validation-run-tests: needs: [validation-build-mtl] runs-on: [Linux, self-hosted, DPDK] + timeout-minutes: 720 + env: + PYTEST_ALIAS: 'sudo --preserve-env python3 -m pipenv run pytest' + PYTEST_PARAMS: '--media=/mnt/media --build="../.."' + PYTEST_RETRIES: '3' steps: - - name: Harden Runner + - name: 'preparation: Harden Runner' uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 with: egress-policy: audit - - name: Kill previous pytest routine + - name: 'preparation: Evaluate choosen validation-test-port-p and validation-test-port-r' + run: | + eval "export TEST_PORT_P=\$${{ inputs.validation-test-port-p }}" + eval "export TEST_PORT_R=\$${{ inputs.validation-test-port-r }}" + echo "TEST_PORT_P=${TEST_PORT_P}" >> "$GITHUB_ENV" + echo "TEST_PORT_R=${TEST_PORT_R}" >> "$GITHUB_ENV" + echo "TEST_PORT_P=${TEST_PORT_P}" + echo "TEST_PORT_R=${TEST_PORT_R}" + + - name: 'preparation: Kill MtlManager and pytest routines' run: | sudo killall -SIGINT pipenv || true sudo killall -SIGINT pytest || true sudo killall -SIGINT MtlManager || true - - name: Binding network adapter + - name: 'preparation: Binding network adapter pf to kernel driver' + if: inputs.validation-iface-binding != 'bind_pmd' + run: | + sudo rmmod irdma || true + sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_P}" || true + sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_R}" || true + + - name: 'preparation: Binding network adapter ${{ inputs.validation-iface-binding }}' run: | - sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true + sudo rmmod irdma || true + sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_P}" || true + sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_R}" || true - - name: Start MtlManager at background + - name: 'preparation: Start MtlManager at background' run: | sudo MtlManager & - - name: Run tests in pipenv environment + - name: 'execution: Run validation-bare-metal tests in pipenv environment' working-directory: tests/validation run: | - sudo pipenv run pytest '${{ inputs.validation-tests }}' --nic="${TEST_PORT_P},${TEST_PORT_R}" --media=/mnt/media + . "${{ github.workspace }}/.github/scripts/run_validation_tests.sh" + env: + TEST_PORT_P: ${{ env.TEST_PORT_P }} + TEST_PORT_R: ${{ env.TEST_PORT_R }} + PYTEST_RETRIES: '3' + VALIDATION_TESTS_1: ${{ inputs.validation-tests-1 }} + VALIDATION_TESTS_2: ${{ inputs.validation-tests-2 }} + VALIDATION_NO_FAIL_TESTS: ${{ inputs.validation-no-fail-tests }} + + - name: 'execution: Run validation-pre-release-1 in pipenv environment' + if: inputs.validation-pre-release-1 != 'NONE' + working-directory: tests/validation + run: | + echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-1 }} ==" || ${{ inputs.validation-no-fail-tests }} + + - name: 'execution: Run validation-pre-release-2 in pipenv environment' + if: inputs.validation-pre-release-2 != 'NONE' + working-directory: tests/validation + run: | + echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-2 }} ==" || ${{ inputs.validation-no-fail-tests }} + + - name: 'execution: Run validation-pre-release-3 in pipenv environment' + if: inputs.validation-pre-release-3 != 'NONE' + working-directory: tests/validation + run: | + echo "== TO BE IMPLEMENTED ${{ inputs.validation-pre-release-3 }} ==" || ${{ inputs.validation-no-fail-tests }} + + - name: 'cleanup: Kill MtlManager and pytest routines' + if: always() + run: | + sudo killall -SIGINT pipenv || true + sudo killall -SIGINT pytest || true + sudo killall -SIGINT MtlManager || true + + - name: 'cleanup: Create archive with logs from execution' + if: always() + working-directory: tests/validation + run: | + sudo tar -czf "validation-execution-logs.tar.gz" "./logs" + sudo rm -rf "./logs" + + - name: 'cleanup: Restore valid owner to repository and directories' + if: always() + run: | + sudo chown -R "${USER}" "$(pwd)" + + - name: 'cleanup: Bind pf to kernel driver' + if: always() + run: | + sudo rmmod irdma || true + sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_P}" || true + sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_R}" || true + + - name: 'cleanup: Validation execution logs' + if: always() + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + with: + name: 'validation-execution-logs.tar.gz' + path: '${{ github.workspace }}/tests/validation/validation-execution-logs.tar.gz' + + - name: 'cleanup: Generate runner summary' + if: always() + run: | + { + echo "## Runner ${{ runner.name }}" + echo "Bellow ENV variables are defined on the ${{ runner.name }} self-hosted runner side" + echo "| Variable | Value |" + echo "| --- | --- |" + echo "| TEST_PF_PORT_P | ${TEST_PF_PORT_P} |" + echo "| TEST_PF_PORT_R | ${TEST_PF_PORT_R} |" + echo "| TEST_PORT_P | ${TEST_PORT_P} |" + echo "| TEST_PORT_R | ${TEST_PORT_R} |" + echo "| TEST_DMA_PORT_P | ${TEST_DMA_PORT_P} |" + echo "| TEST_DMA_PORT_R | ${TEST_DMA_PORT_R} |" + echo "| TEST_VF_PORT_P_0 | ${TEST_VF_PORT_P_0} |" + echo "| TEST_VF_PORT_P_1 | ${TEST_VF_PORT_P_1} |" + echo "| TEST_VF_PORT_P_2 | ${TEST_VF_PORT_P_2} |" + echo "| TEST_VF_PORT_P_3 | ${TEST_VF_PORT_P_3} |" + echo "| TEST_VF_PORT_R_0 | ${TEST_VF_PORT_R_0} |" + echo "| TEST_VF_PORT_R_1 | ${TEST_VF_PORT_R_1} |" + echo "| TEST_VF_PORT_R_2 | ${TEST_VF_PORT_R_2} |" + echo "| TEST_VF_PORT_R_3 | ${TEST_VF_PORT_R_3} |" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.gitignore b/.gitignore index e2a5179d4..00e06991a 100644 --- a/.gitignore +++ b/.gitignore @@ -88,4 +88,5 @@ doc/_build # Gpu direct files gpu_direct/tests/fff.h gpu_direct/subprojects/* -!gpu_direct/subprojects/gtest.wrap \ No newline at end of file +!gpu_direct/subprojects/gtest.wrap +_temp* diff --git a/tests/validation/README.md b/tests/validation/README.md index e57a9d6bd..1ce299d02 100644 --- a/tests/validation/README.md +++ b/tests/validation/README.md @@ -27,19 +27,75 @@ Folder `tests`. Logs from executions are available in `logs` folder. Latest resu Test engine is based on pytest. -Installation: +Installation can be done in 2, similar in outcome, ways: + +Approach using pure venv: ```bash -python -m venv .venv +# Install mandatory system packages +sudo apt update +sudo apt install -y python3-dev python3-virtualenv python3-venv python3-pip +# Create virtual environment (venv) for python3 +python3 -m venv .venv +# Activate venv for python3 source .venv/bin/activate -pip install -r requirements.txt +# Install required python3 modules +python3 -m pip install -r requirements.txt +# User now can run command like pytest using: +sudo --preserve-env python3 -m pytest +``` + +Approach using pipenv: + +```bash +# Install mandatory system packages +sudo apt update +sudo apt install -y python3-dev python3-virtualenv python3-venv python3-pip +# Install pipenv for python3 +python3 -m pip install pipenv +# Install venv using pipenv for python3 +python3 -m pipenv install -r requirements.txt +# User now can run command like pytest using: +sudo --preserve-env python3 -m pipenv run pytest +# User can also activate pipenv shell to have it persistent: +sudo --preserve-env python3 -m pipenv shell +``` + +Testcases should be run using `sudo` or root account. The basic and minimal approach command takes `--nic="${TEST_PORT_P},${TEST_PORT_R}"` as input. +Self-hosted runners will have an media directory that is available under `/mnt/media` path (this is the default value for media parameter: `--media=/mnt/media`). +Simple example when being run from tests/validation subdirectory will look like for pipenv: + +```bash +sudo \ + --preserve-env python3 \ + -m pipenv run pytest \ + --nic="${TEST_PORT_P},${TEST_PORT_R}" \ + --media=/mnt/media \ + --build="../.." ``` Content of tests repository: - invalid - negative tests to check library response to wrong values - single - functional tests of different features. These tests are using single host and can be run on PF or VF. - - dpdk - tests using DPDK driver + - xdp + - kernel + - ancillary + - audio + - dma + - ffmpeg + - gstreamer + - kernel_socket - tests using kernel sockets + - performance + - ptp + - rss_mode + - rx_timing + - st20p + - st22p + - st30p + - st41 + - udp + - video + - virtio_user - xdp - tests using XDP driver mode - - kernel - tests using kernel sockets - dual - functional, load and stress tests. These tests require dual host setup with switch. diff --git a/tests/validation/tests/Engine/fixtures.py b/tests/validation/tests/Engine/fixtures.py index 26c3baf56..7ec4a3a69 100644 --- a/tests/validation/tests/Engine/fixtures.py +++ b/tests/validation/tests/Engine/fixtures.py @@ -1,6 +1,7 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright(c) 2024-2025 Intel Corporation import os +import time from typing import Dict import pytest @@ -96,3 +97,9 @@ def test_time(request): if test_time is None: return 30 return int(test_time) + + +@pytest.fixture(autouse=True) +def delay_between_tests(): + time.sleep(3) + yield