Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update devctr to 24.04 #4784

Open
wants to merge 14 commits into
base: main
Choose a base branch
from
6 changes: 3 additions & 3 deletions .buildkite/pipeline_cpu_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class BkStep(str, Enum):
cpu_template_test = {
"rdmsr": {
BkStep.COMMAND: [
"tools/devtool -y test --no-build -- -s -ra -m nonci -n4 --log-cli-level=INFO integration_tests/functional/test_cpu_features.py -k 'test_cpu_rdmsr' "
"tools/devtool -y test --no-build -- -m nonci -n4 --dist worksteal integration_tests/functional/test_cpu_features.py -k 'test_cpu_rdmsr' "
],
BkStep.LABEL: "📖 rdmsr",
"instances": ["c5n.metal", "m5n.metal", "m6a.metal", "m6i.metal"],
Expand All @@ -40,7 +40,7 @@ class BkStep(str, Enum):
"cpuid_wrmsr": {
"snapshot": {
BkStep.COMMAND: [
"tools/devtool -y test --no-build -- -s -ra -m nonci -n4 --log-cli-level=INFO integration_tests/functional/test_cpu_features.py -k 'test_cpu_wrmsr_snapshot or test_cpu_cpuid_snapshot'",
"tools/devtool -y test --no-build -- -m nonci -n4 --dist worksteal integration_tests/functional/test_cpu_features.py -k 'test_cpu_wrmsr_snapshot or test_cpu_cpuid_snapshot'",
"mkdir -pv tests/snapshot_artifacts_upload/{instance}_{os}_{kv}",
"sudo mv tests/snapshot_artifacts/* tests/snapshot_artifacts_upload/{instance}_{os}_{kv}",
],
Expand All @@ -52,7 +52,7 @@ class BkStep(str, Enum):
BkStep.COMMAND: [
"buildkite-agent artifact download tests/snapshot_artifacts_upload/{instance}_{os}_{kv}/**/* .",
"mv tests/snapshot_artifacts_upload/{instance}_{os}_{kv} tests/snapshot_artifacts",
"tools/devtool -y test --no-build -- -s -ra -m nonci -n4 --log-cli-level=INFO integration_tests/functional/test_cpu_features.py -k 'test_cpu_wrmsr_restore or test_cpu_cpuid_restore'",
"tools/devtool -y test --no-build -- -m nonci -n4 --dist worksteal integration_tests/functional/test_cpu_features.py -k 'test_cpu_wrmsr_restore or test_cpu_cpuid_restore'",
],
BkStep.LABEL: "📸 load snapshot artifacts created on {instance} {snapshot_os} {snapshot_kv} to {restore_instance} {restore_os} {restore_kv}",
BkStep.TIMEOUT: 30,
Expand Down
3 changes: 3 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,6 @@ build/
src/
tests/
docs/
resources/
tools/test-popular-containers/
test_results/
62 changes: 35 additions & 27 deletions tests/integration_tests/functional/test_cpu_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

# pylint: disable=too-many-lines

import csv
import io
import os
import platform
Expand All @@ -14,7 +15,6 @@
from difflib import unified_diff
from pathlib import Path

import pandas as pd
import pytest

import framework.utils_cpuid as cpuid_utils
Expand All @@ -30,6 +30,12 @@
DATA_FILES = Path("./data/msr")


def read_msr_csv(fd):
"""Read a CSV of MSRs"""
csvin = csv.DictReader(fd)
return list(csvin)


def clean_and_mkdir(dir_path):
"""
Create a clean directory
Expand Down Expand Up @@ -313,7 +319,7 @@ def test_cpu_rdmsr(
assert stderr == ""

# Load results read from the microvm
microvm_df = pd.read_csv(io.StringIO(stdout))
guest_recs = read_msr_csv(io.StringIO(stdout))

# Load baseline
host_cpu = global_props.cpu_codename
Expand All @@ -329,11 +335,9 @@ def test_cpu_rdmsr(

# Load baseline
baseline_file_path = DATA_FILES / baseline_file_name
# We can use the following line when regathering baselines.
# microvm_df.to_csv(baseline_file_path, index=False, encoding="utf-8")
baseline_df = pd.read_csv(baseline_file_path)
baseline_recs = read_msr_csv(baseline_file_path.open())

check_msrs_are_equal(baseline_df, microvm_df)
check_msrs_are_equal(baseline_recs, guest_recs)


# These names need to be consistent across the two parts of the snapshot-restore test
Expand Down Expand Up @@ -441,29 +445,31 @@ def test_cpu_wrmsr_snapshot(
snapshot.save_to(snapshot_artifacts_dir)


def check_msrs_are_equal(before_df, after_df):
def check_msrs_are_equal(before_recs, after_recs):
"""
Checks that reported MSRs and their values in the files are equal.
"""

before = {x["MSR_ADDR"]: x["VALUE"] for x in before_recs}
after = {x["MSR_ADDR"]: x["VALUE"] for x in after_recs}
# We first want to see if the same set of MSRs are exposed in the microvm.
# Drop the VALUE columns and compare the 2 dataframes.
join = pd.merge(before_df, after_df, on="MSR_ADDR", how="outer", indicator=True)
removed = join[join["_merge"] == "left_only"]
added = join[join["_merge"] == "right_only"]

assert removed.empty, f"MSRs removed:\n{removed[['MSR_ADDR', 'VALUE_x']]}"
assert added.empty, f"MSRs added:\n{added[['MSR_ADDR', 'VALUE_y']]}"

# Remove MSR that can change at runtime.
before_df = before_df[~before_df["MSR_ADDR"].isin(MSR_EXCEPTION_LIST)]
after_df = after_df[~after_df["MSR_ADDR"].isin(MSR_EXCEPTION_LIST)]

# Compare values
val_diff = pd.concat(
[before_df, after_df], keys=["before", "after"]
).drop_duplicates(keep=False)
assert val_diff.empty, f"\n {val_diff.to_string()}"
all_msrs = set(before.keys()) | set(after.keys())

changes = 0
for msr in all_msrs:
if msr in before and msr not in after:
print(f"MSR removed {msr} before={before[msr]}")
changes += 1
elif msr not in before and msr in after:
print(f"MSR added {msr} after={after[msr]}")
changes += 1
elif msr in MSR_EXCEPTION_LIST:
continue
elif before[msr] != after[msr]:
# Compare values
print(f"MSR changed {msr} before={before[msr]} after={after[msr]}")
changes += 1
assert changes == 0


@pytest.mark.skipif(
Expand Down Expand Up @@ -504,11 +510,12 @@ def test_cpu_wrmsr_restore(microvm_factory, msr_cpu_template, guest_kernel):
# Dump MSR state to a file for further comparison
msrs_after_fname = snapshot_artifacts_dir / shared_names["msrs_after_fname"]
dump_msr_state_to_file(msrs_after_fname, vm.ssh, shared_names)
msrs_before_fname = snapshot_artifacts_dir / shared_names["msrs_before_fname"]

# Compare the two lists of MSR values and assert they are equal
before_df = pd.read_csv(snapshot_artifacts_dir / shared_names["msrs_before_fname"])
after_df = pd.read_csv(snapshot_artifacts_dir / shared_names["msrs_after_fname"])
check_msrs_are_equal(before_df, after_df)
before_recs = read_msr_csv(msrs_before_fname.open())
after_recs = read_msr_csv(msrs_after_fname.open())
check_msrs_are_equal(before_recs, after_recs)


def dump_cpuid_to_file(dump_fname, ssh_conn):
Expand Down Expand Up @@ -679,6 +686,7 @@ def test_cpu_template(uvm_plain_any, cpu_template, microvm_factory):
def check_masked_features(test_microvm, cpu_template):
"""Verify the masked features of the given template."""
# fmt: off
must_be_unset = []
if cpu_template == "C3":
must_be_unset = [
(0x1, 0x0, "ecx",
Expand Down
47 changes: 21 additions & 26 deletions tests/integration_tests/functional/test_drive_virtio.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,18 +168,15 @@ def test_non_partuuid_boot(uvm_plain_any, io_engine):

test_microvm.start()

# Prepare the input for doing the assertion
assert_dict = {}
# Keep an array of strings specifying the location where some string
# from the output is located.
# 1-0 means line 1, column 0.
keys_array = ["1-0", "1-6", "2-0"]
# Keep a dictionary where the keys are the location and the values
# represent the input to assert against.
assert_dict[keys_array[0]] = "ro"
assert_dict[keys_array[1]] = "/dev/vda"
assert_dict[keys_array[2]] = "ro"
_check_drives(test_microvm, assert_dict, keys_array)
# 1, 0 means line 1, column 0.
assert_dict = {
(1, 0): "ro",
(1, 6): "/dev/vda",
(2, 0): "ro",
}
_check_drives(test_microvm, assert_dict, assert_dict.keys())


def test_partuuid_boot(uvm_plain_any, partuuid_and_disk_path_tmpfs, io_engine):
Expand Down Expand Up @@ -207,13 +204,13 @@ def test_partuuid_boot(uvm_plain_any, partuuid_and_disk_path_tmpfs, io_engine):
)
test_microvm.start()

assert_dict = {}
keys_array = ["1-0", "1-6", "2-0", "2-6"]
assert_dict[keys_array[0]] = "rw"
assert_dict[keys_array[1]] = "/dev/vda"
assert_dict[keys_array[2]] = "rw"
assert_dict[keys_array[3]] = "/dev/vda1"
_check_drives(test_microvm, assert_dict, keys_array)
assert_dict = {
(1, 0): "rw",
(1, 6): "/dev/vda",
(2, 0): "rw",
(2, 6): "/dev/vda1",
}
_check_drives(test_microvm, assert_dict, assert_dict.keys())


def test_partuuid_update(uvm_plain_any, io_engine):
Expand Down Expand Up @@ -247,11 +244,11 @@ def test_partuuid_update(uvm_plain_any, io_engine):
test_microvm.start()

# Assert that the final booting method is from /dev/vda.
assert_dict = {}
keys_array = ["1-0", "1-6"]
assert_dict[keys_array[0]] = "rw"
assert_dict[keys_array[1]] = "/dev/vda"
_check_drives(test_microvm, assert_dict, keys_array)
assert_dict = {
(1, 0): "rw",
(1, 6): "/dev/vda",
}
_check_drives(test_microvm, assert_dict, assert_dict.keys())


def test_patch_drive(uvm_plain_any, io_engine):
Expand Down Expand Up @@ -370,11 +367,9 @@ def _check_file_size(ssh_connection, dev_path, size):

def _process_blockdev_output(blockdev_out, assert_dict, keys_array):
blockdev_out_lines = blockdev_out.splitlines()
for key in keys_array:
line = int(key.split("-")[0])
col = int(key.split("-")[1])
for line, col in keys_array:
blockdev_out_line_cols = blockdev_out_lines[line].split()
assert blockdev_out_line_cols[col] == assert_dict[key]
assert blockdev_out_line_cols[col] == assert_dict[line, col]


def _check_drives(test_microvm, assert_dict, keys_array):
Expand Down
1 change: 1 addition & 0 deletions tests/integration_tests/functional/test_topology.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def _check_cache_topology_x86(
)

cpu_vendor = utils.get_cpu_vendor()
expected_level_1_topology = expected_level_3_topology = None
if cpu_vendor == utils.CpuVendor.AMD:
key_share = "extra cores sharing this cache"
expected_level_1_topology = {
Expand Down
Loading
Loading