Skip to content

Store last N transactions at least for every account in RPC #286

Store last N transactions at least for every account in RPC

Store last N transactions at least for every account in RPC #286

Workflow file for this run

name: Network Tests
on:
merge_group:
branches: [master]
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
CC: /usr/bin/clang
CXX: /usr/bin/clang++
TYCHO_BUILD_PROFILE: release_check
TYCHO_ARTIFACT_NAME: tycho-binary-${{ github.run_id }}
jobs:
# artifact name has postfix with job id so it's unique and always fresh.
# We don't delete it because 1d retention is enough.
# It's faster than build node even with cache(job timed out after 30m)
build-node:
name: Build Tycho Binary
runs-on: [self-hosted, linux]
if: github.event_name == 'merge_group'
outputs:
binary_path: ${{ steps.get_path.outputs.path }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Initialize Environment (for Build)
uses: ./.github/actions/init
with:
setup-cache: true
- name: Build Project
shell: bash
run: |
echo "Building ${{ env.TYCHO_BUILD_PROFILE }} profile..."
just build
- name: Determine Binary Path
id: get_path
shell: bash
run: |
# This will now just echo the path since the build is done
binary_path=$(./scripts/build-node.sh)
echo "Binary built at: $binary_path"
# Output the path relative to workspace root for artifact upload
echo "path=${binary_path#${GITHUB_WORKSPACE}/}" >> $GITHUB_OUTPUT
- name: Upload Tycho Binary Artifact
uses: actions/upload-artifact@v4
with:
name: ${{ env.TYCHO_ARTIFACT_NAME }}
path: ${{ steps.get_path.outputs.path }}
retention-days: 1 # Keep artifact only for 1 day
network-tests:
name: Network Integration Tests - ${{ matrix.test_name }}
runs-on: [self-hosted, linux]
needs: build-node
if: github.event_name == 'merge_group'
timeout-minutes: 30
strategy:
fail-fast: false # Allow other tests to run even if one fails
matrix:
include:
- test_name: destroyable
test_image: ghcr.io/broxus/tycho-tests/tycho-tests-destroyable:latest
- test_name: ping-pong
test_image: ghcr.io/broxus/tycho-tests/tycho-tests-ping-pong:latest
- test_name: one-to-many-internal-messages
test_image: ghcr.io/broxus/tycho-tests/tycho-tests-one-to-many-internal-messages:latest
- test_name: fq-deploy
test_image: ghcr.io/broxus/tycho-tests/tycho-tests-fq:latest
- test_name: nft-index
test_image: ghcr.io/broxus/tycho-tests/tycho-tests-nft-index:latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Initialize Environment
uses: ./.github/actions/init
with:
setup-cache: false
fake-procfs: true
- name: Download Tycho Binary Artifact
uses: actions/download-artifact@v4
with:
name: ${{ env.TYCHO_ARTIFACT_NAME }}
path: ./downloaded_bin/
- name: Prepare Binary and Set Path
shell: bash
run: |
downloaded_binary=$(find ./downloaded_bin/ -type f)
if [ -z "$downloaded_binary" ]; then
echo "ERROR: Downloaded binary not found in ./downloaded_bin/"
exit 1
fi
echo "Found downloaded binary: $downloaded_binary"
chmod +x "$downloaded_binary"
# Set the environment variable to the absolute path of the binary
echo "TYCHO_BIN_PATH=$(readlink -f "$downloaded_binary")" >> $GITHUB_ENV
echo "Using TYCHO_BIN_PATH=${TYCHO_BIN_PATH}"
- name: Login to GHCR
shell: bash
run: |
set -e
echo "${{ secrets.GHCR_TOKEN }}" | podman login ghcr.io -u ${{ secrets.GHCR_USER }} --password-stdin
- name: Run network tests
shell: bash
run: |
# The script will use the TYCHO_BIN_PATH env
./scripts/run-network-tests.sh ${{ matrix.test_image }}
- name: Archive Logs and Configs
if: always()
id: archive
shell: bash
run: |
archive_dir=".temp"
archive_name="network-test-logs-configs-${{ matrix.test_name }}.tar.zst"
echo "ARCHIVE_PATH=${archive_name}" >> $GITHUB_ENV # Store path for next step
# Check if the directory exists before attempting to archive
if [ -d "$archive_dir" ]; then
echo "Archiving directory: $archive_dir to $archive_name ..."
# Create archive and compress with zstd (using multiple threads if available: -T0)
# Exclude potentially problematic socket files explicitly if needed, though tar usually handles them okay.
# Using --ignore-failed-read in case some files are transient/deleted during archiving
tar --ignore-failed-read -cf - "$archive_dir" | zstd -T0 -o "$archive_name"
if [ $? -eq 0 ]; then
echo "Archive created successfully."
echo "ARCHIVE_CREATED=true" >> $GITHUB_ENV
else
echo "Archiving failed."
echo "ARCHIVE_CREATED=false" >> $GITHUB_ENV
fi
else
echo "Directory $archive_dir not found, skipping archiving."
echo "ARCHIVE_CREATED=false" >> $GITHUB_ENV
fi
- name: Upload Logs and Configs Artifact
# run if prior steps failed, but only if the archive was actually created
if: always() && steps.archive.outputs.ARCHIVE_CREATED == 'true'
uses: actions/upload-artifact@v4
with:
name: network-test-logs-${{ matrix.test_name }}-${{ github.run_id }}
path: ${{ env.ARCHIVE_PATH }}
retention-days: 5 # 5 days retention
if-no-files-found: warn
network_tests_status:
name: Network Tests Status Check # This is the name to require in branch protection
runs-on: ubuntu-latest
# Depends on the matrix job, but runs even if it's skipped or fails
needs: network-tests
if: always() # Ensures this job runs even if 'network-tests' is skipped (on PR) or fails
steps:
- name: Check network tests matrix outcome
shell: bash
run: |
echo "Event: ${{ github.event_name }}"
echo "Network Tests Matrix Job Result: ${{ needs.network-tests.result }}"
# If on merge_group and the matrix failed or was cancelled, fail this status check
if [[ "${{ github.event_name }}" == "merge_group" && \
("${{ needs.network-tests.result }}" == "failure" || \
"${{ needs.network-tests.result }}" == "cancelled") ]]; then
echo "Network tests failed or were cancelled on merge_group run."
exit 1
else
# Otherwise (on PR where matrix is skipped, or on merge_group where matrix succeeded)
echo "Network tests status check passed."
exit 0
fi