diff --git a/.github/scripts/run_validation_tests.sh b/.github/scripts/run_validation_tests.sh index b6e9051a3..198242e59 100755 --- a/.github/scripts/run_validation_tests.sh +++ b/.github/scripts/run_validation_tests.sh @@ -2,21 +2,9 @@ set +e -VALIDATION_TESTS_1="${1:-$VALIDATION_TESTS_1}" -VALIDATION_TESTS_2="${2:-$VALIDATION_TESTS_2}" -PYTEST_ALIAS="${3:-$PYTEST_ALIAS}" -PYTEST_PARAMS="${4:-$PYTEST_PARAMS}" -export TEST_PORT_P="${5:-$TEST_PORT_P}" -export TEST_PORT_R="${6:-$TEST_PORT_R}" -PYTEST_RETRIES="${PYTEST_RETRIES:-3}" - # Function to log messages to GitHub Actions -function LOG_GITHUB_SUMMARY() { - echo "$@" >> "$GITHUB_STEP_SUMMARY" -} - -function LOG_GITHUB_CONSOLE() { - echo "$@" +log_to_github() { + echo "$1" >> "$GITHUB_STEP_SUMMARY" } # Function to run a test and handle retries @@ -27,84 +15,83 @@ run_test() { local pytest_params=$4 local test_port_p=$5 local test_port_r=$6 - local PYTEST_START_TIME="" - local PYTEST_END_TIME="" - local PYTEST_DURATION="" - local PYTEST_TASK_STATUS="❌" - local PYTEST_SUFFIX="[Err]" - LOG_GITHUB_CONSOLE "::group::${test}" - PYTEST_START_TIME=$(date '+%s') - # shellcheck disable=SC2086 - ${pytest_alias} "${test}" ${pytest_params} --nic="${test_port_p},${test_port_r}" --collect-only -q --no-summary + echo "::group::${test}" + local start_time=$(date '+%s') + $pytest_alias "${test}" $pytest_params --nic="${test_port_p},${test_port_r}" --collect-only -q --no-summary for retry in $(seq 1 "$retries"); do - # shellcheck disable=SC2086 - ${pytest_alias} "${test}" ${pytest_params} --nic="${test_port_p},${test_port_r}" + $pytest_alias "${test}" $pytest_params --nic="${test_port_p},${test_port_r}" local result=$? - LOG_GITHUB_CONSOLE "RETRY: ${retry}" + echo "RETRY: ${retry}" [[ "$result" == "0" ]] && break done - PYTEST_END_TIME="$(date '+%s')" - PYTEST_DURATION="$((PYTEST_END_TIME - PYTEST_START_TIME))" + local end_time=$(date '+%s') + local duration=$((end_time - start_time)) + local status="❌" + local suffix="[Err]" if [[ "$result" == "0" ]]; then - PYTEST_TASK_STATUS="✅" - PYTEST_SUFFIX="[OK]" + status="✅" + suffix="[OK]" TESTS_SUCCESS+=("${test}") else TESTS_FAIL+=("${test}") fi - LOG_GITHUB_SUMMARY "| ${PYTEST_TASK_STATUS} | ${test} | $(date --date=@${PYTEST_START_TIME} '+%d%m%y_%H%M%S') | $(date --date="@${PYTEST_END_TIME}" '+%d%m%y_%H%M%S') | ${PYTEST_DURATION}s | ${PYTEST_SUFFIX} |" - LOG_GITHUB_CONSOLE "::endgroup::" + log_to_github "| ${status} | ${test} | $(date --date=@${start_time} '+%d%m%y_%H%M%S') | $(date --date=@${end_time} '+%d%m%y_%H%M%S') | ${duration}s | ${suffix} |" + echo "::endgroup::" } # Main script execution -LOG_GITHUB_CONSOLE "::group::pre-execution-summary" +echo "::group::pre-execution-summary" + +# Export environment variables +export TEST_PORT_P="${TEST_PORT_P}" +export TEST_PORT_R="${TEST_PORT_R}" # Collect tests to be executed TESTS_INCLUDED_IN_EXECUTION=( - $(grep -v "collected in" <(${PYTEST_ALIAS} "tests/${VALIDATION_TESTS_1}" --collect-only -q --no-summary 2>&1)) + $(grep -v "collected in" <($PYTEST_ALIAS "tests/${VALIDATION_TESTS_1}" $PYTEST_PARAMS --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) ) SUMMARY_MAIN_HEADER="Starting tests/${VALIDATION_TESTS_1}" if [[ -n "${VALIDATION_TESTS_2}" ]]; then TESTS_INCLUDED_IN_EXECUTION+=( - $(grep -v "collected in" <(${PYTEST_ALIAS} "tests/${VALIDATION_TESTS_2}" --collect-only -q --no-summary 2>&1)) + $(grep -v "collected in" <($PYTEST_ALIAS "tests/${VALIDATION_TESTS_2}" $PYTEST_PARAMS --nic="${TEST_PORT_P},${TEST_PORT_R}" --collect-only -q --no-summary 2>&1)) ) SUMMARY_MAIN_HEADER="${SUMMARY_MAIN_HEADER}, tests/${VALIDATION_TESTS_2}" fi +NUMBER_OF_TESTS="${#TESTS_INCLUDED_IN_EXECUTION[@]}" TESTS_FAIL=() TESTS_SUCCESS=() -LOG_GITHUB_CONSOLE "${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" -LOG_GITHUB_CONSOLE "----------------------------------" -LOG_GITHUB_CONSOLE "Tests to be executed:" -LOG_GITHUB_CONSOLE "${TESTS_INCLUDED_IN_EXECUTION[@]}" - -LOG_GITHUB_SUMMARY "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" -LOG_GITHUB_SUMMARY "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" -LOG_GITHUB_SUMMARY "| --- | --- | --- | --- | --- | --- |" +echo "${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" +echo "----------------------------------" +echo "Tests to be executed:" +echo "${TESTS_INCLUDED_IN_EXECUTION[@]}" -LOG_GITHUB_CONSOLE "::endgroup::" +log_to_github "## ${SUMMARY_MAIN_HEADER} tests (total ${NUMBER_OF_TESTS}) :rocket:" +log_to_github "| ❌/✅ | Collected Test | Started | Ended | Took (s) | Result |" +log_to_github "| --- | --- | --- | --- | --- | --- |" +echo "::endgroup::" # Execute each test for test in "${TESTS_INCLUDED_IN_EXECUTION[@]}"; do - run_test "$test" "${PYTEST_RETRIES}" "${PYTEST_ALIAS}" "${PYTEST_PARAMS}" "${TEST_PORT_P}" "${TEST_PORT_R}" + run_test "$test" "$PYTEST_RETRIES" "$PYTEST_ALIAS" "$PYTEST_PARAMS" "$TEST_PORT_P" "$TEST_PORT_R" done # Summary of test results -LOG_GITHUB_SUMMARY "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" -LOG_GITHUB_SUMMARY "${TESTS_SUCCESS[@]}" -LOG_GITHUB_SUMMARY "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" -LOG_GITHUB_SUMMARY "${TESTS_FAIL[@]}" +log_to_github "### Total success ${#TESTS_SUCCESS[@]}/${NUMBER_OF_TESTS}:" +log_to_github "${TESTS_SUCCESS[@]}" +log_to_github "### Total failed ${#TESTS_FAIL[@]}/${NUMBER_OF_TESTS}:" +log_to_github "${TESTS_FAIL[@]}" # Determine exit status if [[ "${#TESTS_FAIL[@]}" == "0" ]] || [[ "${VALIDATION_NO_FAIL_TESTS}" == "true" ]]; then exit 0 +else + exit 1 fi - -exit 1 diff --git a/.github/workflows/gtest-bare-metal.yml b/.github/workflows/gtest-bare-metal.yml index 6541c4c9d..db90f105c 100644 --- a/.github/workflows/gtest-bare-metal.yml +++ b/.github/workflows/gtest-bare-metal.yml @@ -27,6 +27,7 @@ env: # Customize the env if BUILD_TYPE: 'Release' DPDK_VERSION: '23.11' + DPDK_REBUILD: 'false' # Bellow ENV variables are required to be defined on runner side: # TEST_PF_PORT_P: '0000:49:00.0' # TEST_PF_PORT_R: '0000:49:00.1' @@ -71,6 +72,7 @@ jobs: ref: '${{ inputs.branch-to-checkout || github.head_ref || github.ref }}' - name: Checkout DPDK + if: env.DPDK_REBUILD == 'true' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: repository: 'DPDK/dpdk' @@ -83,10 +85,12 @@ jobs: sudo apt-get install -y systemtap-sdt-dev - name: Apply dpdk patches + if: env.DPDK_REBUILD == 'true' run: | patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch) - name: Build dpdk + if: env.DPDK_REBUILD == 'true' run: | cd dpdk meson build @@ -119,8 +123,8 @@ jobs: - name: Binding network adapter run: | sudo ./script/nicctl.sh create_vf "${TEST_PF_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true - sudo ./dpdk/usertools/dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true + sudo dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_P}" || true + sudo dpdk-devbind.py -b vfio-pci "${TEST_DMA_PORT_R}" || true - name: Start MtlManager at background run: | diff --git a/.github/workflows/validation-tests.yml b/.github/workflows/validation-tests.yml index 2ce2a2194..ca3c221eb 100644 --- a/.github/workflows/validation-tests.yml +++ b/.github/workflows/validation-tests.yml @@ -111,6 +111,7 @@ on: env: BUILD_TYPE: 'Release' DPDK_VERSION: '23.11' + DPDK_REBUILD: 'false' permissions: contents: read @@ -140,6 +141,7 @@ jobs: - name: 'preparation: Checkout DPDK' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + if: env.DPDK_REBUILD == 'true' with: repository: 'DPDK/dpdk' ref: 'v${{ env.DPDK_VERSION }}' @@ -166,11 +168,13 @@ jobs: systemtap-sdt-dev - name: 'configuration: Apply dpdk patches' + if: env.DPDK_REBUILD == 'true' run: | patch -d "dpdk" -p1 -i <(cat patches/dpdk/${{ env.DPDK_VERSION }}/*.patch) - name: 'installation: Build dpdk' working-directory: dpdk + if: env.DPDK_REBUILD == 'true' run: | meson build ninja -C build @@ -231,7 +235,6 @@ jobs: sudo rmmod irdma || true sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_P}" || true sudo ./script/nicctl.sh ${{ inputs.validation-iface-binding }} "${TEST_PF_PORT_R}" || true - sudo modprobe irdma || true - name: 'preparation: Start MtlManager at background' run: | @@ -240,7 +243,7 @@ jobs: - name: 'execution: Run validation-bare-metal tests in pipenv environment' working-directory: tests/validation run: | - "${{ github.workspace }}/.github/scripts/run_validation_tests.sh" + . "${{ github.workspace }}/.github/scripts/run_validation_tests.sh" env: TEST_PORT_P: ${{ env.TEST_PORT_P }} TEST_PORT_R: ${{ env.TEST_PORT_R }} @@ -294,7 +297,6 @@ jobs: sudo rmmod irdma || true sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_P}" || true sudo ./script/nicctl.sh bind_kernel "${TEST_PF_PORT_R}" || true - sudo modprobe irdma || true - name: 'cleanup: Validation execution logs' if: always()